Auto merge of #16434 - Veykril:things, r=Veykril

internal: Restructure and cleanup hir-expand a bit
This commit is contained in:
bors 2024-01-27 10:16:35 +00:00
commit 27c3ed9de8
62 changed files with 750 additions and 724 deletions

View file

@ -2,8 +2,8 @@
#![warn(rust_2018_idioms, unused_lifetimes)] #![warn(rust_2018_idioms, unused_lifetimes)]
mod input;
mod change; mod change;
mod input;
use std::panic; use std::panic;

View file

@ -18,28 +18,6 @@ pub enum CfgAtom {
KeyValue { key: SmolStr, value: SmolStr }, KeyValue { key: SmolStr, value: SmolStr },
} }
impl CfgAtom {
/// Returns `true` when the atom comes from the target specification.
///
/// If this returns `true`, then changing this atom requires changing the compilation target. If
/// it returns `false`, the atom might come from a build script or the build system.
pub fn is_target_defined(&self) -> bool {
match self {
CfgAtom::Flag(flag) => matches!(&**flag, "unix" | "windows"),
CfgAtom::KeyValue { key, value: _ } => matches!(
&**key,
"target_arch"
| "target_os"
| "target_env"
| "target_family"
| "target_endian"
| "target_pointer_width"
| "target_vendor" // NOTE: `target_feature` is left out since it can be configured via `-Ctarget-feature`
),
}
}
}
impl fmt::Display for CfgAtom { impl fmt::Display for CfgAtom {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {

View file

@ -131,11 +131,9 @@ impl CfgDiff {
/// of both. /// of both.
pub fn new(enable: Vec<CfgAtom>, disable: Vec<CfgAtom>) -> Option<CfgDiff> { pub fn new(enable: Vec<CfgAtom>, disable: Vec<CfgAtom>) -> Option<CfgDiff> {
let mut occupied = FxHashSet::default(); let mut occupied = FxHashSet::default();
for item in enable.iter().chain(disable.iter()) { if enable.iter().chain(disable.iter()).any(|item| !occupied.insert(item)) {
if !occupied.insert(item) { // was present
// was present return None;
return None;
}
} }
Some(CfgDiff { enable, disable }) Some(CfgDiff { enable, disable })

View file

@ -32,6 +32,7 @@ use crate::{
VariantId, VariantId,
}; };
/// Desugared attributes of an item post `cfg_attr` expansion.
#[derive(Default, Debug, Clone, PartialEq, Eq)] #[derive(Default, Debug, Clone, PartialEq, Eq)]
pub struct Attrs(RawAttrs); pub struct Attrs(RawAttrs);
@ -228,7 +229,6 @@ pub enum DocAtom {
KeyValue { key: SmolStr, value: SmolStr }, KeyValue { key: SmolStr, value: SmolStr },
} }
// Adapted from `CfgExpr` parsing code
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum DocExpr { pub enum DocExpr {
Invalid, Invalid,
@ -448,10 +448,7 @@ impl AttrsWithOwner {
let map = db.fields_attrs_source_map(id.parent); let map = db.fields_attrs_source_map(id.parent);
let file_id = id.parent.file_id(db); let file_id = id.parent.file_id(db);
let root = db.parse_or_expand(file_id); let root = db.parse_or_expand(file_id);
let owner = match &map[id.local_id] { let owner = ast::AnyHasAttrs::new(map[id.local_id].to_node(&root));
Either::Left(it) => ast::AnyHasAttrs::new(it.to_node(&root)),
Either::Right(it) => ast::AnyHasAttrs::new(it.to_node(&root)),
};
InFile::new(file_id, owner) InFile::new(file_id, owner)
} }
AttrDefId::AdtId(adt) => match adt { AttrDefId::AdtId(adt) => match adt {
@ -634,7 +631,7 @@ fn attrs_from_item_tree_assoc<'db, N: ItemTreeModItemNode>(
pub(crate) fn fields_attrs_source_map( pub(crate) fn fields_attrs_source_map(
db: &dyn DefDatabase, db: &dyn DefDatabase,
def: VariantId, def: VariantId,
) -> Arc<ArenaMap<LocalFieldId, Either<AstPtr<ast::TupleField>, AstPtr<ast::RecordField>>>> { ) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>> {
let mut res = ArenaMap::default(); let mut res = ArenaMap::default();
let child_source = def.child_source(db); let child_source = def.child_source(db);
@ -643,7 +640,7 @@ pub(crate) fn fields_attrs_source_map(
idx, idx,
variant variant
.as_ref() .as_ref()
.either(|l| Either::Left(AstPtr::new(l)), |r| Either::Right(AstPtr::new(r))), .either(|l| AstPtr::new(l).wrap_left(), |r| AstPtr::new(r).wrap_right()),
); );
} }

View file

@ -1,10 +1,10 @@
//! Defines `Body`: a lowered representation of bodies of functions, statics and //! Defines `Body`: a lowered representation of bodies of functions, statics and
//! consts. //! consts.
mod lower; mod lower;
mod pretty;
pub mod scope;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
pub mod scope;
mod pretty;
use std::ops::Index; use std::ops::Index;

View file

@ -194,7 +194,7 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
fn fields_attrs_source_map( fn fields_attrs_source_map(
&self, &self,
def: VariantId, def: VariantId,
) -> Arc<ArenaMap<LocalFieldId, Either<AstPtr<ast::TupleField>, AstPtr<ast::RecordField>>>>; ) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>>;
#[salsa::invoke(AttrsWithOwner::attrs_query)] #[salsa::invoke(AttrsWithOwner::attrs_query)]
fn attrs(&self, def: AttrDefId) -> Attrs; fn attrs(&self, def: AttrDefId) -> Attrs;

View file

@ -12,8 +12,8 @@
//! //!
//! See also a neighboring `body` module. //! See also a neighboring `body` module.
pub mod type_ref;
pub mod format_args; pub mod format_args;
pub mod type_ref;
use std::fmt; use std::fmt;

View file

@ -25,13 +25,13 @@ extern crate ra_ap_rustc_abi as rustc_abi;
pub mod db; pub mod db;
pub mod attr; pub mod attr;
pub mod path;
pub mod builtin_type; pub mod builtin_type;
pub mod per_ns;
pub mod item_scope; pub mod item_scope;
pub mod path;
pub mod per_ns;
pub mod lower;
pub mod expander; pub mod expander;
pub mod lower;
pub mod dyn_map; pub mod dyn_map;
@ -46,24 +46,24 @@ pub use self::hir::type_ref;
pub mod body; pub mod body;
pub mod resolver; pub mod resolver;
mod trace;
pub mod nameres; pub mod nameres;
mod trace;
pub mod src;
pub mod child_by_source; pub mod child_by_source;
pub mod src;
pub mod visibility;
pub mod find_path; pub mod find_path;
pub mod import_map; pub mod import_map;
pub mod visibility;
pub use rustc_abi as layout; pub use rustc_abi as layout;
use triomphe::Arc; use triomphe::Arc;
#[cfg(test)]
mod test_db;
#[cfg(test)] #[cfg(test)]
mod macro_expansion_tests; mod macro_expansion_tests;
mod pretty; mod pretty;
#[cfg(test)]
mod test_db;
use std::{ use std::{
hash::{Hash, Hasher}, hash::{Hash, Hasher},
@ -73,7 +73,6 @@ use std::{
use base_db::{impl_intern_key, salsa, CrateId, Edition}; use base_db::{impl_intern_key, salsa, CrateId, Edition};
use hir_expand::{ use hir_expand::{
ast_id_map::{AstIdNode, FileAstId}, ast_id_map::{AstIdNode, FileAstId},
attrs::{Attr, AttrId, AttrInput},
builtin_attr_macro::BuiltinAttrExpander, builtin_attr_macro::BuiltinAttrExpander,
builtin_derive_macro::BuiltinDeriveExpander, builtin_derive_macro::BuiltinDeriveExpander,
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
@ -1274,60 +1273,6 @@ fn macro_call_as_call_id_with_eager(
Ok(res) Ok(res)
} }
fn derive_macro_as_call_id(
db: &dyn DefDatabase,
item_attr: &AstIdWithPath<ast::Adt>,
derive_attr_index: AttrId,
derive_pos: u32,
call_site: Span,
krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
let (macro_id, def_id) = resolver(item_attr.path.clone())
.filter(|(_, def_id)| def_id.is_derive())
.ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?;
let call_id = def_id.as_lazy_macro(
db.upcast(),
krate,
MacroCallKind::Derive {
ast_id: item_attr.ast_id,
derive_index: derive_pos,
derive_attr_index,
},
call_site,
);
Ok((macro_id, def_id, call_id))
}
fn attr_macro_as_call_id(
db: &dyn DefDatabase,
item_attr: &AstIdWithPath<ast::Item>,
macro_attr: &Attr,
krate: CrateId,
def: MacroDefId,
) -> MacroCallId {
let arg = match macro_attr.input.as_deref() {
Some(AttrInput::TokenTree(tt)) => {
let mut tt = tt.as_ref().clone();
tt.delimiter = tt::Delimiter::invisible_spanned(macro_attr.span);
Some(tt)
}
_ => None,
};
def.as_lazy_macro(
db.upcast(),
krate,
MacroCallKind::Attr {
ast_id: item_attr.ast_id,
attr_args: arg.map(Arc::new),
invoc_attr_index: macro_attr.id,
},
macro_attr.span,
)
}
#[derive(Debug)] #[derive(Debug)]
pub struct UnresolvedMacro { pub struct UnresolvedMacro {
pub path: hir_expand::mod_path::ModPath, pub path: hir_expand::mod_path::ModPath,

View file

@ -1,11 +1,11 @@
//! Tests specific to declarative macros, aka macros by example. This covers //! Tests specific to declarative macros, aka macros by example. This covers
//! both stable `macro_rules!` macros as well as unstable `macro` macros. //! both stable `macro_rules!` macros as well as unstable `macro` macros.
mod tt_conversion;
mod matching; mod matching;
mod meta_syntax; mod meta_syntax;
mod metavar_expr; mod metavar_expr;
mod regression; mod regression;
mod tt_conversion;
use expect_test::expect; use expect_test::expect;

View file

@ -9,9 +9,9 @@
//! write unit-tests (in fact, we used to do that), but that makes tests brittle //! write unit-tests (in fact, we used to do that), but that makes tests brittle
//! and harder to understand. //! and harder to understand.
mod mbe;
mod builtin_fn_macro;
mod builtin_derive_macro; mod builtin_derive_macro;
mod builtin_fn_macro;
mod mbe;
mod proc_macros; mod proc_macros;
use std::{iter, ops::Range, sync}; use std::{iter, ops::Range, sync};

View file

@ -48,11 +48,11 @@
//! the result //! the result
pub mod attr_resolution; pub mod attr_resolution;
pub mod proc_macro;
pub mod diagnostics;
mod collector; mod collector;
pub mod diagnostics;
mod mod_resolution; mod mod_resolution;
mod path_resolution; mod path_resolution;
pub mod proc_macro;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;

View file

@ -1,16 +1,21 @@
//! Post-nameres attribute resolution. //! Post-nameres attribute resolution.
use hir_expand::{attrs::Attr, MacroCallId}; use base_db::CrateId;
use hir_expand::{
attrs::{Attr, AttrId, AttrInput},
MacroCallId, MacroCallKind, MacroDefId,
};
use span::Span;
use syntax::{ast, SmolStr}; use syntax::{ast, SmolStr};
use triomphe::Arc;
use crate::{ use crate::{
attr::builtin::{find_builtin_attr_idx, TOOL_MODULES}, attr::builtin::{find_builtin_attr_idx, TOOL_MODULES},
attr_macro_as_call_id,
db::DefDatabase, db::DefDatabase,
item_scope::BuiltinShadowMode, item_scope::BuiltinShadowMode,
nameres::path_resolution::ResolveMode, nameres::path_resolution::ResolveMode,
path::{ModPath, PathKind}, path::{self, ModPath, PathKind},
AstIdWithPath, LocalModuleId, UnresolvedMacro, AstIdWithPath, LocalModuleId, MacroId, UnresolvedMacro,
}; };
use super::{DefMap, MacroSubNs}; use super::{DefMap, MacroSubNs};
@ -93,3 +98,57 @@ impl DefMap {
false false
} }
} }
pub(super) fn attr_macro_as_call_id(
db: &dyn DefDatabase,
item_attr: &AstIdWithPath<ast::Item>,
macro_attr: &Attr,
krate: CrateId,
def: MacroDefId,
) -> MacroCallId {
let arg = match macro_attr.input.as_deref() {
Some(AttrInput::TokenTree(tt)) => {
let mut tt = tt.as_ref().clone();
tt.delimiter = tt::Delimiter::invisible_spanned(macro_attr.span);
Some(tt)
}
_ => None,
};
def.as_lazy_macro(
db.upcast(),
krate,
MacroCallKind::Attr {
ast_id: item_attr.ast_id,
attr_args: arg.map(Arc::new),
invoc_attr_index: macro_attr.id,
},
macro_attr.span,
)
}
pub(super) fn derive_macro_as_call_id(
db: &dyn DefDatabase,
item_attr: &AstIdWithPath<ast::Adt>,
derive_attr_index: AttrId,
derive_pos: u32,
call_site: Span,
krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
let (macro_id, def_id) = resolver(item_attr.path.clone())
.filter(|(_, def_id)| def_id.is_derive())
.ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?;
let call_id = def_id.as_lazy_macro(
db.upcast(),
krate,
MacroCallKind::Derive {
ast_id: item_attr.ast_id,
derive_index: derive_pos,
derive_attr_index,
},
call_site,
);
Ok((macro_id, def_id, call_id))
}

View file

@ -30,9 +30,7 @@ use triomphe::Arc;
use crate::{ use crate::{
attr::Attrs, attr::Attrs,
attr_macro_as_call_id,
db::DefDatabase, db::DefDatabase,
derive_macro_as_call_id,
item_scope::{ImportId, ImportOrExternCrate, ImportType, PerNsGlobImports}, item_scope::{ImportId, ImportOrExternCrate, ImportType, PerNsGlobImports},
item_tree::{ item_tree::{
self, ExternCrate, Fields, FileItemTreeId, ImportKind, ItemTree, ItemTreeId, self, ExternCrate, Fields, FileItemTreeId, ImportKind, ItemTree, ItemTreeId,
@ -40,6 +38,7 @@ use crate::{
}, },
macro_call_as_call_id, macro_call_as_call_id_with_eager, macro_call_as_call_id, macro_call_as_call_id_with_eager,
nameres::{ nameres::{
attr_resolution::{attr_macro_as_call_id, derive_macro_as_call_id},
diagnostics::DefDiagnostic, diagnostics::DefDiagnostic,
mod_resolution::ModDir, mod_resolution::ModDir,
path_resolution::ReachedFixedPoint, path_resolution::ReachedFixedPoint,
@ -1245,7 +1244,9 @@ impl DefCollector<'_> {
MacroDefId { kind: MacroDefKind::BuiltInAttr(expander, _),.. } MacroDefId { kind: MacroDefKind::BuiltInAttr(expander, _),.. }
if expander.is_derive() if expander.is_derive()
) { ) {
// Resolved to `#[derive]` // Resolved to `#[derive]`, we don't actually expand this attribute like
// normal (as that would just be an identity expansion with extra output)
// Instead we treat derive attributes special and apply them separately.
let item_tree = tree.item_tree(self.db); let item_tree = tree.item_tree(self.db);
let ast_adt_id: FileAstId<ast::Adt> = match *mod_item { let ast_adt_id: FileAstId<ast::Adt> = match *mod_item {
@ -1284,7 +1285,8 @@ impl DefCollector<'_> {
} }
// We treat the #[derive] macro as an attribute call, but we do not resolve it for nameres collection. // We treat the #[derive] macro as an attribute call, but we do not resolve it for nameres collection.
// This is just a trick to be able to resolve the input to derives as proper paths. // This is just a trick to be able to resolve the input to derives
// as proper paths in `Semantics`.
// Check the comment in [`builtin_attr_macro`]. // Check the comment in [`builtin_attr_macro`].
let call_id = attr_macro_as_call_id( let call_id = attr_macro_as_call_id(
self.db, self.db,

View file

@ -155,7 +155,14 @@ impl PartialEq for AstIdMap {
impl Eq for AstIdMap {} impl Eq for AstIdMap {}
impl AstIdMap { impl AstIdMap {
pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap { pub(crate) fn ast_id_map(
db: &dyn ExpandDatabase,
file_id: span::HirFileId,
) -> triomphe::Arc<AstIdMap> {
triomphe::Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id)))
}
fn from_source(node: &SyntaxNode) -> AstIdMap {
assert!(node.parent().is_none()); assert!(node.parent().is_none());
let mut res = AstIdMap::default(); let mut res = AstIdMap::default();

View file

@ -117,14 +117,10 @@ impl RawAttrs {
None => return smallvec![attr.clone()], None => return smallvec![attr.clone()],
}; };
let index = attr.id; let index = attr.id;
let attrs = let attrs = parts
parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| { .enumerate()
let tree = Subtree { .take(1 << AttrId::CFG_ATTR_BITS)
delimiter: tt::Delimiter::invisible_spanned(attr.first()?.first_span()), .filter_map(|(idx, attr)| Attr::from_tt(db, attr, index.with_cfg_attr(idx)));
token_trees: attr.to_vec(),
};
Attr::from_tt(db, &tree, index.with_cfg_attr(idx))
});
let cfg_options = &crate_graph[krate].cfg_options; let cfg_options = &crate_graph[krate].cfg_options;
let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() }; let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
@ -222,12 +218,40 @@ impl Attr {
Some(Attr { id, path, input, span }) Some(Attr { id, path, input, span })
} }
fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> { fn from_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree], id: AttrId) -> Option<Attr> {
// FIXME: Unecessary roundtrip tt -> ast -> tt let span = tt.first()?.first_span();
let (parse, map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem); let path_end = tt
let ast = ast::Meta::cast(parse.syntax_node())?; .iter()
.position(|tt| {
!matches!(
tt,
tt::TokenTree::Leaf(
tt::Leaf::Punct(tt::Punct { char: ':' | '$', .. }) | tt::Leaf::Ident(_),
)
)
})
.unwrap_or_else(|| tt.len());
Self::from_src(db, ast, SpanMapRef::ExpansionSpanMap(&map), id) let (path, input) = tt.split_at(path_end);
let path = Interned::new(ModPath::from_tt(db, path)?);
let input = match input.get(0) {
Some(tt::TokenTree::Subtree(tree)) => {
Some(Interned::new(AttrInput::TokenTree(Box::new(tree.clone()))))
}
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. }))) => {
let input = match input.get(1) {
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text, .. }))) => {
//FIXME the trimming here isn't quite right, raw strings are not handled
Some(Interned::new(AttrInput::Literal(text.trim_matches('"').into())))
}
_ => None,
};
input
}
_ => None,
};
Some(Attr { id, path, input, span })
} }
pub fn path(&self) -> &ModPath { pub fn path(&self) -> &ModPath {
@ -277,29 +301,8 @@ impl Attr {
.token_trees .token_trees
.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. })))) .split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
.filter_map(move |tts| { .filter_map(move |tts| {
if tts.is_empty() { let span = tts.first()?.first_span();
return None; Some((ModPath::from_tt(db, tts)?, span))
}
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation
// here or maybe just parse a mod path from a token tree directly
let subtree = tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(tts.first()?.first_span()),
token_trees: tts.to_vec(),
};
let (parse, span_map) =
mbe::token_tree_to_syntax_node(&subtree, mbe::TopEntryPoint::MetaItem);
let meta = ast::Meta::cast(parse.syntax_node())?;
// Only simple paths are allowed.
if meta.eq_token().is_some() || meta.expr().is_some() || meta.token_tree().is_some()
{
return None;
}
let path = meta.path()?;
let call_site = span_map.span_at(path.syntax().text_range().start());
Some((
ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?,
call_site,
))
}); });
Some(paths) Some(paths)

View file

@ -48,11 +48,13 @@ impl BuiltinAttrExpander {
register_builtin! { expand: register_builtin! { expand:
(bench, Bench) => dummy_attr_expand, (bench, Bench) => dummy_attr_expand,
(cfg, Cfg) => dummy_attr_expand,
(cfg_attr, CfgAttr) => dummy_attr_expand,
(cfg_accessible, CfgAccessible) => dummy_attr_expand, (cfg_accessible, CfgAccessible) => dummy_attr_expand,
(cfg_eval, CfgEval) => dummy_attr_expand, (cfg_eval, CfgEval) => dummy_attr_expand,
(derive, Derive) => derive_attr_expand, (derive, Derive) => derive_expand,
// derive const is equivalent to derive for our proposes. // derive const is equivalent to derive for our proposes.
(derive_const, DeriveConst) => derive_attr_expand, (derive_const, DeriveConst) => derive_expand,
(global_allocator, GlobalAllocator) => dummy_attr_expand, (global_allocator, GlobalAllocator) => dummy_attr_expand,
(test, Test) => dummy_attr_expand, (test, Test) => dummy_attr_expand,
(test_case, TestCase) => dummy_attr_expand (test_case, TestCase) => dummy_attr_expand
@ -91,7 +93,7 @@ fn dummy_attr_expand(
/// always resolve as a derive without nameres recollecting them. /// always resolve as a derive without nameres recollecting them.
/// So this hacky approach is a lot more friendly for us, though it does require a bit of support in /// So this hacky approach is a lot more friendly for us, though it does require a bit of support in
/// [`hir::Semantics`] to make this work. /// [`hir::Semantics`] to make this work.
fn derive_attr_expand( fn derive_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,

View file

@ -1,16 +1,11 @@
//! Defines database & queries for macro expansion. //! Defines database & queries for macro expansion.
use std::sync::OnceLock; use base_db::{salsa, CrateId, FileId, SourceDatabase};
use base_db::{
salsa::{self, debug::DebugQueryTable},
CrateId, Edition, FileId, SourceDatabase, VersionReq,
};
use either::Either; use either::Either;
use limit::Limit; use limit::Limit;
use mbe::{syntax_node_to_token_tree, ValueResult}; use mbe::{syntax_node_to_token_tree, ValueResult};
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use span::{Span, SyntaxContextId}; use span::SyntaxContextId;
use syntax::{ use syntax::{
ast::{self, HasAttrs}, ast::{self, HasAttrs},
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
@ -19,13 +14,14 @@ use triomphe::Arc;
use crate::{ use crate::{
ast_id_map::AstIdMap, ast_id_map::AstIdMap,
attrs::{collect_attrs, RawAttrs}, attrs::collect_attrs,
builtin_attr_macro::pseudo_derive_attr_expansion, builtin_attr_macro::pseudo_derive_attr_expansion,
builtin_fn_macro::EagerExpander, builtin_fn_macro::EagerExpander,
declarative::DeclarativeMacroExpander,
fixup::{self, reverse_fixups, SyntaxFixupUndoInfo}, fixup::{self, reverse_fixups, SyntaxFixupUndoInfo},
hygiene::{ hygiene::{
apply_mark, span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt, span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt,
SyntaxContextData, Transparency, SyntaxContextData,
}, },
proc_macro::ProcMacros, proc_macro::ProcMacros,
span_map::{RealSpanMap, SpanMap, SpanMapRef}, span_map::{RealSpanMap, SpanMap, SpanMapRef},
@ -43,82 +39,6 @@ use crate::{
/// Actual max for `analysis-stats .` at some point: 30672. /// Actual max for `analysis-stats .` at some point: 30672.
static TOKEN_LIMIT: Limit = Limit::new(1_048_576); static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
#[derive(Debug, Clone, Eq, PartialEq)]
/// Old-style `macro_rules` or the new macros 2.0
pub struct DeclarativeMacroExpander {
pub mac: mbe::DeclarativeMacro<span::Span>,
pub transparency: Transparency,
}
// FIXME: Remove this once we drop support for 1.76
static REQUIREMENT: OnceLock<VersionReq> = OnceLock::new();
impl DeclarativeMacroExpander {
pub fn expand(
&self,
db: &dyn ExpandDatabase,
tt: tt::Subtree,
call_id: MacroCallId,
) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(call_id);
let toolchain = &db.crate_graph()[loc.def.krate].toolchain;
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
&base_db::Version {
pre: base_db::Prerelease::EMPTY,
build: base_db::BuildMetadata::EMPTY,
major: version.major,
minor: version.minor,
patch: version.patch,
},
)
});
match self.mac.err() {
Some(e) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }),
ExpandError::other(format!("invalid macro definition: {e}")),
),
None => self
.mac
.expand(
&tt,
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
new_meta_vars,
loc.call_site,
)
.map_err(Into::into),
}
}
pub fn expand_unhygienic(
&self,
db: &dyn ExpandDatabase,
tt: tt::Subtree,
krate: CrateId,
call_site: Span,
) -> ExpandResult<tt::Subtree> {
let toolchain = &db.crate_graph()[krate].toolchain;
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
&base_db::Version {
pre: base_db::Prerelease::EMPTY,
build: base_db::BuildMetadata::EMPTY,
major: version.major,
minor: version.minor,
patch: version.patch,
},
)
});
match self.mac.err() {
Some(e) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::other(format!("invalid macro definition: {e}")),
),
None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into),
}
}
}
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
pub enum TokenExpander { pub enum TokenExpander {
/// Old-style `macro_rules` or the new macros 2.0 /// Old-style `macro_rules` or the new macros 2.0
@ -141,6 +61,7 @@ pub trait ExpandDatabase: SourceDatabase {
#[salsa::input] #[salsa::input]
fn proc_macros(&self) -> Arc<ProcMacros>; fn proc_macros(&self) -> Arc<ProcMacros>;
#[salsa::invoke(AstIdMap::ast_id_map)]
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>; fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
/// Main public API -- parses a hir file, not caring whether it's a real /// Main public API -- parses a hir file, not caring whether it's a real
@ -156,8 +77,10 @@ pub trait ExpandDatabase: SourceDatabase {
macro_file: MacroFileId, macro_file: MacroFileId,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>; ) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>;
#[salsa::transparent] #[salsa::transparent]
#[salsa::invoke(SpanMap::new)]
fn span_map(&self, file_id: HirFileId) -> SpanMap; fn span_map(&self, file_id: HirFileId) -> SpanMap;
#[salsa::invoke(crate::span_map::real_span_map)]
fn real_span_map(&self, file_id: FileId) -> Arc<RealSpanMap>; fn real_span_map(&self, file_id: FileId) -> Arc<RealSpanMap>;
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the /// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
@ -173,6 +96,7 @@ pub trait ExpandDatabase: SourceDatabase {
#[salsa::transparent] #[salsa::transparent]
fn setup_syntax_context_root(&self) -> (); fn setup_syntax_context_root(&self) -> ();
#[salsa::transparent] #[salsa::transparent]
#[salsa::invoke(crate::hygiene::dump_syntax_contexts)]
fn dump_syntax_contexts(&self) -> String; fn dump_syntax_contexts(&self) -> String;
/// Lowers syntactic macro call to a token tree representation. That's a firewall /// Lowers syntactic macro call to a token tree representation. That's a firewall
@ -184,8 +108,10 @@ pub trait ExpandDatabase: SourceDatabase {
) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>>; ) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>>;
/// Fetches the expander for this macro. /// Fetches the expander for this macro.
#[salsa::transparent] #[salsa::transparent]
#[salsa::invoke(TokenExpander::macro_expander)]
fn macro_expander(&self, id: MacroDefId) -> TokenExpander; fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
/// Fetches (and compiles) the expander of this decl macro. /// Fetches (and compiles) the expander of this decl macro.
#[salsa::invoke(DeclarativeMacroExpander::expander)]
fn decl_macro_expander( fn decl_macro_expander(
&self, &self,
def_crate: CrateId, def_crate: CrateId,
@ -203,36 +129,6 @@ pub trait ExpandDatabase: SourceDatabase {
) -> ExpandResult<Box<[SyntaxError]>>; ) -> ExpandResult<Box<[SyntaxError]>>;
} }
#[inline]
pub fn span_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
HirFileIdRepr::MacroFile(m) => {
SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1)
}
}
}
pub fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<RealSpanMap> {
use syntax::ast::HasModuleItem;
let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)];
let ast_id_map = db.ast_id_map(file_id.into());
let tree = db.parse(file_id).tree();
// FIXME: Descend into modules and other item containing items that are not annotated with attributes
// and allocate pairs for those as well. This gives us finer grained span anchors resulting in
// better incrementality
pairs.extend(
tree.items()
.map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())),
);
Arc::new(RealSpanMap::from_file(
file_id,
pairs.into_boxed_slice(),
tree.syntax().text_range().end(),
))
}
/// This expands the given macro call, but with different arguments. This is /// This expands the given macro call, but with different arguments. This is
/// used for completion, where we want to see what 'would happen' if we insert a /// used for completion, where we want to see what 'would happen' if we insert a
/// token. The `token_to_map` mapped down into the expansion, with the mapped /// token. The `token_to_map` mapped down into the expansion, with the mapped
@ -357,10 +253,6 @@ pub fn expand_speculative(
Some((node.syntax_node(), token)) Some((node.syntax_node(), token))
} }
fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id)))
}
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode { fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
match file_id.repr() { match file_id.repr() {
HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(), HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(),
@ -412,7 +304,10 @@ fn parse_macro_expansion_error(
.map(|it| it.0.errors().to_vec().into_boxed_slice()) .map(|it| it.0.errors().to_vec().into_boxed_slice())
} }
fn parse_with_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> (Parse<SyntaxNode>, SpanMap) { pub(crate) fn parse_with_map(
db: &dyn ExpandDatabase,
file_id: HirFileId,
) -> (Parse<SyntaxNode>, SpanMap) {
match file_id.repr() { match file_id.repr() {
HirFileIdRepr::FileId(file_id) => { HirFileIdRepr::FileId(file_id) => {
(db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id))) (db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
@ -581,100 +476,18 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
.unwrap_or_default() .unwrap_or_default()
} }
fn decl_macro_expander( impl TokenExpander {
db: &dyn ExpandDatabase, fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
def_crate: CrateId, match id.kind {
id: AstId<ast::Macro>, MacroDefKind::Declarative(ast_id) => {
) -> Arc<DeclarativeMacroExpander> { TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id))
let crate_data = &db.crate_graph()[def_crate]; }
let is_2021 = crate_data.edition >= Edition::Edition2021; MacroDefKind::BuiltIn(expander, _) => TokenExpander::BuiltIn(expander),
let (root, map) = parse_with_map(db, id.file_id); MacroDefKind::BuiltInAttr(expander, _) => TokenExpander::BuiltInAttr(expander),
let root = root.syntax_node(); MacroDefKind::BuiltInDerive(expander, _) => TokenExpander::BuiltInDerive(expander),
MacroDefKind::BuiltInEager(expander, ..) => TokenExpander::BuiltInEager(expander),
let transparency = |node| { MacroDefKind::ProcMacro(expander, ..) => TokenExpander::ProcMacro(expander),
// ... would be nice to have the item tree here
let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate);
match &*attrs
.iter()
.find(|it| {
it.path.as_ident().and_then(|it| it.as_str()) == Some("rustc_macro_transparency")
})?
.token_tree_value()?
.token_trees
{
[tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text {
"transparent" => Some(Transparency::Transparent),
"semitransparent" => Some(Transparency::SemiTransparent),
"opaque" => Some(Transparency::Opaque),
_ => None,
},
_ => None,
} }
};
let toolchain = crate_data.toolchain.as_ref();
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
&base_db::Version {
pre: base_db::Prerelease::EMPTY,
build: base_db::BuildMetadata::EMPTY,
major: version.major,
minor: version.minor,
patch: version.patch,
},
)
});
let (mac, transparency) = match id.to_ptr(db).to_node(&root) {
ast::Macro::MacroRules(macro_rules) => (
match macro_rules.token_tree() {
Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(
arg.syntax(),
map.as_ref(),
map.span_for_range(macro_rules.macro_rules_token().unwrap().text_range()),
);
mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021, new_meta_vars)
}
None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
},
transparency(&macro_rules).unwrap_or(Transparency::SemiTransparent),
),
ast::Macro::MacroDef(macro_def) => (
match macro_def.body() {
Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(
arg.syntax(),
map.as_ref(),
map.span_for_range(macro_def.macro_token().unwrap().text_range()),
);
mbe::DeclarativeMacro::parse_macro2(&tt, is_2021, new_meta_vars)
}
None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
},
transparency(&macro_def).unwrap_or(Transparency::Opaque),
),
};
Arc::new(DeclarativeMacroExpander { mac, transparency })
}
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
match id.kind {
MacroDefKind::Declarative(ast_id) => {
TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id))
}
MacroDefKind::BuiltIn(expander, _) => TokenExpander::BuiltIn(expander),
MacroDefKind::BuiltInAttr(expander, _) => TokenExpander::BuiltInAttr(expander),
MacroDefKind::BuiltInDerive(expander, _) => TokenExpander::BuiltInDerive(expander),
MacroDefKind::BuiltInEager(expander, ..) => TokenExpander::BuiltInEager(expander),
MacroDefKind::ProcMacro(expander, ..) => TokenExpander::ProcMacro(expander),
} }
} }
@ -862,40 +675,3 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> {
fn setup_syntax_context_root(db: &dyn ExpandDatabase) { fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
db.intern_syntax_context(SyntaxContextData::root()); db.intern_syntax_context(SyntaxContextData::root());
} }
fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
let mut s = String::from("Expansions:");
let mut entries = InternMacroCallLookupQuery.in_db(db).entries::<Vec<_>>();
entries.sort_by_key(|e| e.key);
for e in entries {
let id = e.key;
let expn_data = e.value.as_ref().unwrap();
s.push_str(&format!(
"\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}",
id,
expn_data.kind.file_id(),
expn_data.call_site,
SyntaxContextId::ROOT, // FIXME expn_data.def_site,
expn_data.kind.descr(),
));
}
s.push_str("\n\nSyntaxContexts:\n");
let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::<Vec<_>>();
entries.sort_by_key(|e| e.key);
for e in entries {
struct SyntaxContextDebug<'a>(
&'a dyn ExpandDatabase,
SyntaxContextId,
&'a SyntaxContextData,
);
impl<'a> std::fmt::Debug for SyntaxContextDebug<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.2.fancy_debug(self.1, self.0, f)
}
}
stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap()));
}
s
}

View file

@ -0,0 +1,177 @@
//! Compiled declarative macro expanders (`macro_rules!`` and `macro`)
use std::sync::OnceLock;
use base_db::{CrateId, Edition, VersionReq};
use span::{MacroCallId, Span};
use syntax::{ast, AstNode};
use triomphe::Arc;
use crate::{
attrs::RawAttrs,
db::ExpandDatabase,
hygiene::{apply_mark, Transparency},
tt, AstId, ExpandError, ExpandResult,
};
/// Old-style `macro_rules` or the new macros 2.0
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct DeclarativeMacroExpander {
pub mac: mbe::DeclarativeMacro<span::Span>,
pub transparency: Transparency,
}
// FIXME: Remove this once we drop support for 1.76
static REQUIREMENT: OnceLock<VersionReq> = OnceLock::new();
impl DeclarativeMacroExpander {
pub fn expand(
&self,
db: &dyn ExpandDatabase,
tt: tt::Subtree,
call_id: MacroCallId,
) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(call_id);
let toolchain = &db.crate_graph()[loc.def.krate].toolchain;
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
&base_db::Version {
pre: base_db::Prerelease::EMPTY,
build: base_db::BuildMetadata::EMPTY,
major: version.major,
minor: version.minor,
patch: version.patch,
},
)
});
match self.mac.err() {
Some(e) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }),
ExpandError::other(format!("invalid macro definition: {e}")),
),
None => self
.mac
.expand(
&tt,
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
new_meta_vars,
loc.call_site,
)
.map_err(Into::into),
}
}
pub fn expand_unhygienic(
&self,
db: &dyn ExpandDatabase,
tt: tt::Subtree,
krate: CrateId,
call_site: Span,
) -> ExpandResult<tt::Subtree> {
let toolchain = &db.crate_graph()[krate].toolchain;
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
&base_db::Version {
pre: base_db::Prerelease::EMPTY,
build: base_db::BuildMetadata::EMPTY,
major: version.major,
minor: version.minor,
patch: version.patch,
},
)
});
match self.mac.err() {
Some(e) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::other(format!("invalid macro definition: {e}")),
),
None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into),
}
}
pub(crate) fn expander(
db: &dyn ExpandDatabase,
def_crate: CrateId,
id: AstId<ast::Macro>,
) -> Arc<DeclarativeMacroExpander> {
let crate_data = &db.crate_graph()[def_crate];
let is_2021 = crate_data.edition >= Edition::Edition2021;
let (root, map) = crate::db::parse_with_map(db, id.file_id);
let root = root.syntax_node();
let transparency = |node| {
// ... would be nice to have the item tree here
let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate);
match &*attrs
.iter()
.find(|it| {
it.path.as_ident().and_then(|it| it.as_str())
== Some("rustc_macro_transparency")
})?
.token_tree_value()?
.token_trees
{
[tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text {
"transparent" => Some(Transparency::Transparent),
"semitransparent" => Some(Transparency::SemiTransparent),
"opaque" => Some(Transparency::Opaque),
_ => None,
},
_ => None,
}
};
let toolchain = crate_data.toolchain.as_ref();
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
&base_db::Version {
pre: base_db::Prerelease::EMPTY,
build: base_db::BuildMetadata::EMPTY,
major: version.major,
minor: version.minor,
patch: version.patch,
},
)
});
let (mac, transparency) = match id.to_ptr(db).to_node(&root) {
ast::Macro::MacroRules(macro_rules) => (
match macro_rules.token_tree() {
Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(
arg.syntax(),
map.as_ref(),
map.span_for_range(
macro_rules.macro_rules_token().unwrap().text_range(),
),
);
mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021, new_meta_vars)
}
None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
},
transparency(&macro_rules).unwrap_or(Transparency::SemiTransparent),
),
ast::Macro::MacroDef(macro_def) => (
match macro_def.body() {
Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(
arg.syntax(),
map.as_ref(),
map.span_for_range(macro_def.macro_token().unwrap().text_range()),
);
mbe::DeclarativeMacro::parse_macro2(&tt, is_2021, new_meta_vars)
}
None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
},
transparency(&macro_def).unwrap_or(Transparency::Opaque),
),
};
Arc::new(DeclarativeMacroExpander { mac, transparency })
}
}

View file

@ -245,3 +245,43 @@ pub fn marks_rev(
}) })
.map(|ctx| ctx.outer_mark(db)) .map(|ctx| ctx.outer_mark(db))
} }
pub(crate) fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
use crate::db::{InternMacroCallLookupQuery, InternSyntaxContextLookupQuery};
use base_db::salsa::debug::DebugQueryTable;
let mut s = String::from("Expansions:");
let mut entries = InternMacroCallLookupQuery.in_db(db).entries::<Vec<_>>();
entries.sort_by_key(|e| e.key);
for e in entries {
let id = e.key;
let expn_data = e.value.as_ref().unwrap();
s.push_str(&format!(
"\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}",
id,
expn_data.kind.file_id(),
expn_data.call_site,
SyntaxContextId::ROOT, // FIXME expn_data.def_site,
expn_data.kind.descr(),
));
}
s.push_str("\n\nSyntaxContexts:\n");
let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::<Vec<_>>();
entries.sort_by_key(|e| e.key);
for e in entries {
struct SyntaxContextDebug<'a>(
&'a dyn ExpandDatabase,
SyntaxContextId,
&'a SyntaxContextData,
);
impl<'a> std::fmt::Debug for SyntaxContextDebug<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.2.fancy_debug(self.1, self.0, f)
}
}
stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap()));
}
s
}

View file

@ -11,16 +11,18 @@ pub mod attrs;
pub mod builtin_attr_macro; pub mod builtin_attr_macro;
pub mod builtin_derive_macro; pub mod builtin_derive_macro;
pub mod builtin_fn_macro; pub mod builtin_fn_macro;
pub mod change;
pub mod db; pub mod db;
pub mod declarative;
pub mod eager; pub mod eager;
pub mod files; pub mod files;
pub mod change;
pub mod hygiene; pub mod hygiene;
pub mod mod_path; pub mod mod_path;
pub mod name; pub mod name;
pub mod proc_macro; pub mod proc_macro;
pub mod quote; pub mod quote;
pub mod span_map; pub mod span_map;
mod fixup; mod fixup;
use attrs::collect_attrs; use attrs::collect_attrs;
@ -167,7 +169,8 @@ pub struct MacroCallLoc {
pub krate: CrateId, pub krate: CrateId,
/// Some if this is a macro call for an eager macro. Note that this is `None` /// Some if this is a macro call for an eager macro. Note that this is `None`
/// for the eager input macro file. /// for the eager input macro file.
// FIXME: This seems bad to save in an interned structure // FIXME: This is being interned, subtrees can vary quickly differ just slightly causing
// leakage problems here
eager: Option<Arc<EagerCallInfo>>, eager: Option<Arc<EagerCallInfo>>,
pub kind: MacroCallKind, pub kind: MacroCallKind,
pub call_site: Span, pub call_site: Span,
@ -220,7 +223,7 @@ pub enum MacroCallKind {
}, },
Attr { Attr {
ast_id: AstId<ast::Item>, ast_id: AstId<ast::Item>,
// FIXME: This is being interned, subtrees can very quickly differ just slightly causing // FIXME: This is being interned, subtrees can vary quickly differ just slightly causing
// leakage problems here // leakage problems here
attr_args: Option<Arc<tt::Subtree>>, attr_args: Option<Arc<tt::Subtree>>,
/// Syntactical index of the invoking `#[attribute]`. /// Syntactical index of the invoking `#[attribute]`.

View file

@ -10,6 +10,7 @@ use crate::{
hygiene::{marks_rev, SyntaxContextExt, Transparency}, hygiene::{marks_rev, SyntaxContextExt, Transparency},
name::{known, AsName, Name}, name::{known, AsName, Name},
span_map::SpanMapRef, span_map::SpanMapRef,
tt,
}; };
use base_db::CrateId; use base_db::CrateId;
use smallvec::SmallVec; use smallvec::SmallVec;
@ -39,7 +40,7 @@ pub enum PathKind {
Crate, Crate,
/// Absolute path (::foo) /// Absolute path (::foo)
Abs, Abs,
// FIXME: Remove this // FIXME: Can we remove this somehow?
/// `$crate` from macro expansion /// `$crate` from macro expansion
DollarCrate(CrateId), DollarCrate(CrateId),
} }
@ -50,11 +51,16 @@ impl ModPath {
path: ast::Path, path: ast::Path,
span_map: SpanMapRef<'_>, span_map: SpanMapRef<'_>,
) -> Option<ModPath> { ) -> Option<ModPath> {
convert_path(db, None, path, span_map) convert_path(db, path, span_map)
}
pub fn from_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModPath> {
convert_path_tt(db, tt)
} }
pub fn from_segments(kind: PathKind, segments: impl IntoIterator<Item = Name>) -> ModPath { pub fn from_segments(kind: PathKind, segments: impl IntoIterator<Item = Name>) -> ModPath {
let segments = segments.into_iter().collect(); let mut segments: SmallVec<_> = segments.into_iter().collect();
segments.shrink_to_fit();
ModPath { kind, segments } ModPath { kind, segments }
} }
@ -193,22 +199,15 @@ fn display_fmt_path(
fn convert_path( fn convert_path(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
prefix: Option<ModPath>,
path: ast::Path, path: ast::Path,
span_map: SpanMapRef<'_>, span_map: SpanMapRef<'_>,
) -> Option<ModPath> { ) -> Option<ModPath> {
let prefix = match path.qualifier() { let mut segments = path.segments();
Some(qual) => Some(convert_path(db, prefix, qual, span_map)?),
None => prefix,
};
let segment = path.segment()?; let segment = &segments.next()?;
let mut mod_path = match segment.kind()? { let mut mod_path = match segment.kind()? {
ast::PathSegmentKind::Name(name_ref) => { ast::PathSegmentKind::Name(name_ref) => {
if name_ref.text() == "$crate" { if name_ref.text() == "$crate" {
if prefix.is_some() {
return None;
}
ModPath::from_kind( ModPath::from_kind(
resolve_crate_root( resolve_crate_root(
db, db,
@ -218,41 +217,36 @@ fn convert_path(
.unwrap_or(PathKind::Crate), .unwrap_or(PathKind::Crate),
) )
} else { } else {
let mut res = prefix.unwrap_or_else(|| { let mut res = ModPath::from_kind(
ModPath::from_kind( segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs), );
)
});
res.segments.push(name_ref.as_name()); res.segments.push(name_ref.as_name());
res res
} }
} }
ast::PathSegmentKind::SelfTypeKw => { ast::PathSegmentKind::SelfTypeKw => {
if prefix.is_some() {
return None;
}
ModPath::from_segments(PathKind::Plain, Some(known::SELF_TYPE)) ModPath::from_segments(PathKind::Plain, Some(known::SELF_TYPE))
} }
ast::PathSegmentKind::CrateKw => { ast::PathSegmentKind::CrateKw => ModPath::from_segments(PathKind::Crate, iter::empty()),
if prefix.is_some() { ast::PathSegmentKind::SelfKw => ModPath::from_segments(PathKind::Super(0), iter::empty()),
return None;
}
ModPath::from_segments(PathKind::Crate, iter::empty())
}
ast::PathSegmentKind::SelfKw => {
if prefix.is_some() {
return None;
}
ModPath::from_segments(PathKind::Super(0), iter::empty())
}
ast::PathSegmentKind::SuperKw => { ast::PathSegmentKind::SuperKw => {
let nested_super_count = match prefix.map(|p| p.kind) { let mut deg = 1;
Some(PathKind::Super(n)) => n, let mut next_segment = None;
Some(_) => return None, while let Some(segment) = segments.next() {
None => 0, match segment.kind()? {
}; ast::PathSegmentKind::SuperKw => deg += 1,
ast::PathSegmentKind::Name(name) => {
next_segment = Some(name.as_name());
break;
}
ast::PathSegmentKind::Type { .. }
| ast::PathSegmentKind::SelfTypeKw
| ast::PathSegmentKind::SelfKw
| ast::PathSegmentKind::CrateKw => return None,
}
}
ModPath::from_segments(PathKind::Super(nested_super_count + 1), iter::empty()) ModPath::from_segments(PathKind::Super(deg), next_segment)
} }
ast::PathSegmentKind::Type { .. } => { ast::PathSegmentKind::Type { .. } => {
// not allowed in imports // not allowed in imports
@ -260,6 +254,14 @@ fn convert_path(
} }
}; };
for segment in segments {
let name = match segment.kind()? {
ast::PathSegmentKind::Name(name) => name.as_name(),
_ => return None,
};
mod_path.segments.push(name);
}
// handle local_inner_macros : // handle local_inner_macros :
// Basically, even in rustc it is quite hacky: // Basically, even in rustc it is quite hacky:
// https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456 // https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456
@ -281,6 +283,46 @@ fn convert_path(
Some(mod_path) Some(mod_path)
} }
fn convert_path_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModPath> {
let mut leafs = tt.iter().filter_map(|tt| match tt {
tt::TokenTree::Leaf(leaf) => Some(leaf),
tt::TokenTree::Subtree(_) => None,
});
let mut segments = smallvec::smallvec![];
let kind = match leafs.next()? {
tt::Leaf::Punct(tt::Punct { char: ':', .. }) => match leafs.next()? {
tt::Leaf::Punct(tt::Punct { char: ':', .. }) => PathKind::Abs,
_ => return None,
},
tt::Leaf::Ident(tt::Ident { text, span }) if text == "$crate" => {
resolve_crate_root(db, span.ctx).map(PathKind::DollarCrate).unwrap_or(PathKind::Crate)
}
tt::Leaf::Ident(tt::Ident { text, .. }) if text == "self" => PathKind::Super(0),
tt::Leaf::Ident(tt::Ident { text, .. }) if text == "super" => {
let mut deg = 1;
while let Some(tt::Leaf::Ident(tt::Ident { text, .. })) = leafs.next() {
if text != "super" {
segments.push(Name::new_text_dont_use(text.clone()));
break;
}
deg += 1;
}
PathKind::Super(deg)
}
tt::Leaf::Ident(tt::Ident { text, .. }) if text == "crate" => PathKind::Crate,
tt::Leaf::Ident(ident) => {
segments.push(Name::new_text_dont_use(ident.text.clone()));
PathKind::Plain
}
_ => return None,
};
segments.extend(leafs.filter_map(|leaf| match leaf {
::tt::Leaf::Ident(ident) => Some(Name::new_text_dont_use(ident.text.clone())),
_ => None,
}));
Some(ModPath { kind, segments })
}
pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> Option<CrateId> { pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> Option<CrateId> {
// When resolving `$crate` from a `macro_rules!` invoked in a `macro`, // When resolving `$crate` from a `macro_rules!` invoked in a `macro`,
// we don't want to pretend that the `macro_rules!` definition is in the `macro` // we don't want to pretend that the `macro_rules!` definition is in the `macro`

View file

@ -1,10 +1,12 @@
//! Span maps for real files and macro expansions. //! Span maps for real files and macro expansions.
use span::Span; use span::{FileId, HirFileId, HirFileIdRepr, Span};
use syntax::TextRange; use syntax::{AstNode, TextRange};
use triomphe::Arc; use triomphe::Arc;
pub use span::RealSpanMap; pub use span::RealSpanMap;
use crate::db::ExpandDatabase;
pub type ExpansionSpanMap = span::SpanMap<Span>; pub type ExpansionSpanMap = span::SpanMap<Span>;
/// Spanmap for a macro file or a real file /// Spanmap for a macro file or a real file
@ -34,7 +36,6 @@ impl mbe::SpanMapper<Span> for SpanMapRef<'_> {
self.span_for_range(range) self.span_for_range(range)
} }
} }
impl SpanMap { impl SpanMap {
pub fn span_for_range(&self, range: TextRange) -> Span { pub fn span_for_range(&self, range: TextRange) -> Span {
match self { match self {
@ -53,6 +54,16 @@ impl SpanMap {
Self::RealSpanMap(span_map) => SpanMapRef::RealSpanMap(span_map), Self::RealSpanMap(span_map) => SpanMapRef::RealSpanMap(span_map),
} }
} }
#[inline]
pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
HirFileIdRepr::MacroFile(m) => {
SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1)
}
}
}
} }
impl SpanMapRef<'_> { impl SpanMapRef<'_> {
@ -63,3 +74,23 @@ impl SpanMapRef<'_> {
} }
} }
} }
pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<RealSpanMap> {
use syntax::ast::HasModuleItem;
let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)];
let ast_id_map = db.ast_id_map(file_id.into());
let tree = db.parse(file_id).tree();
// FIXME: Descend into modules and other item containing items that are not annotated with attributes
// and allocate pairs for those as well. This gives us finer grained span anchors resulting in
// better incrementality
pairs.extend(
tree.items()
.map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())),
);
Arc::new(RealSpanMap::from_file(
file_id,
pairs.into_boxed_slice(),
tree.syntax().text_range().end(),
))
}

View file

@ -1,8 +1,8 @@
//! Type inference-based diagnostics. //! Type inference-based diagnostics.
mod decl_check;
mod expr; mod expr;
mod match_check; mod match_check;
mod unsafe_check; mod unsafe_check;
mod decl_check;
pub use crate::diagnostics::{ pub use crate::diagnostics::{
decl_check::{incorrect_case, CaseType, IncorrectCase}, decl_check::{incorrect_case, CaseType, IncorrectCase},

View file

@ -41,10 +41,10 @@ pub mod mir;
pub mod primitive; pub mod primitive;
pub mod traits; pub mod traits;
#[cfg(test)]
mod tests;
#[cfg(test)] #[cfg(test)]
mod test_db; mod test_db;
#[cfg(test)]
mod tests;
use std::{ use std::{
collections::hash_map::Entry, collections::hash_map::Entry,

View file

@ -21,11 +21,11 @@ use hir_def::{
}; };
use la_arena::{Arena, ArenaMap, Idx, RawIdx}; use la_arena::{Arena, ArenaMap, Idx, RawIdx};
mod borrowck;
mod eval; mod eval;
mod lower; mod lower;
mod borrowck;
mod pretty;
mod monomorphization; mod monomorphization;
mod pretty;
pub use borrowck::{borrowck_query, BorrowckResult, MutabilityReason}; pub use borrowck::{borrowck_query, BorrowckResult, MutabilityReason};
pub use eval::{ pub use eval::{

View file

@ -1,14 +1,14 @@
mod never_type;
mod coercion; mod coercion;
mod regression; mod diagnostics;
mod simple;
mod patterns;
mod traits;
mod method_resolution;
mod macros;
mod display_source_code; mod display_source_code;
mod incremental; mod incremental;
mod diagnostics; mod macros;
mod method_resolution;
mod never_type;
mod patterns;
mod regression;
mod simple;
mod traits;
use std::{collections::HashMap, env}; use std::{collections::HashMap, env};

View file

@ -2,7 +2,6 @@
use std::ops::ControlFlow; use std::ops::ControlFlow;
use base_db::FileId;
use hir_def::{ use hir_def::{
attr::AttrsWithOwner, attr::AttrsWithOwner,
item_scope::ItemInNs, item_scope::ItemInNs,
@ -11,12 +10,8 @@ use hir_def::{
resolver::{HasResolver, Resolver, TypeNs}, resolver::{HasResolver, Resolver, TypeNs},
AssocItemId, AttrDefId, ModuleDefId, AssocItemId, AttrDefId, ModuleDefId,
}; };
use hir_expand::{ use hir_expand::{mod_path::PathKind, name::Name};
name::Name,
span_map::{RealSpanMap, SpanMapRef},
};
use hir_ty::{db::HirDatabase, method_resolution}; use hir_ty::{db::HirDatabase, method_resolution};
use syntax::{ast, AstNode};
use crate::{ use crate::{
Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl, Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl,
@ -129,7 +124,7 @@ fn resolve_doc_path_on_(
AttrDefId::GenericParamId(_) => return None, AttrDefId::GenericParamId(_) => return None,
}; };
let mut modpath = modpath_from_str(db, link)?; let mut modpath = modpath_from_str(link)?;
let resolved = resolver.resolve_module_path_in_items(db.upcast(), &modpath); let resolved = resolver.resolve_module_path_in_items(db.upcast(), &modpath);
if resolved.is_none() { if resolved.is_none() {
@ -305,34 +300,37 @@ fn as_module_def_if_namespace_matches(
(ns.unwrap_or(expected_ns) == expected_ns).then(|| DocLinkDef::ModuleDef(def)) (ns.unwrap_or(expected_ns) == expected_ns).then(|| DocLinkDef::ModuleDef(def))
} }
fn modpath_from_str(db: &dyn HirDatabase, link: &str) -> Option<ModPath> { fn modpath_from_str(link: &str) -> Option<ModPath> {
// FIXME: this is not how we should get a mod path here. // FIXME: this is not how we should get a mod path here.
let try_get_modpath = |link: &str| { let try_get_modpath = |link: &str| {
let ast_path = ast::SourceFile::parse(&format!("type T = {link};")) let mut parts = link.split("::");
.syntax_node() let mut first_segment = None;
.descendants() let kind = match parts.next()? {
.find_map(ast::Path::cast)?; "" => PathKind::Abs,
if ast_path.syntax().text() != link { "crate" => PathKind::Crate,
return None; "self" => PathKind::Super(0),
} "super" => {
ModPath::from_src( let mut deg = 1;
db.upcast(), while let Some(segment) = parts.next() {
ast_path, if segment == "super" {
SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId::BOGUS)), deg += 1;
) } else {
first_segment = Some(segment);
break;
}
}
PathKind::Super(deg)
}
segment => {
first_segment = Some(segment);
PathKind::Plain
}
};
let parts = first_segment.into_iter().chain(parts).map(|segment| match segment.parse() {
Ok(idx) => Name::new_tuple_field(idx),
Err(_) => Name::new_text_dont_use(segment.into()),
});
Some(ModPath::from_segments(kind, parts))
}; };
try_get_modpath(link)
let full = try_get_modpath(link);
if full.is_some() {
return full;
}
// Tuple field names cannot be a part of `ModPath` usually, but rustdoc can
// resolve doc paths like `TupleStruct::0`.
// FIXME: Find a better way to handle these.
let (base, maybe_tuple_field) = link.rsplit_once("::")?;
let tuple_field = Name::new_tuple_field(maybe_tuple_field.parse().ok()?);
let mut modpath = try_get_modpath(base)?;
modpath.push_segment(tuple_field);
Some(modpath)
} }

View file

@ -24,12 +24,12 @@
mod semantics; mod semantics;
mod source_analyzer; mod source_analyzer;
mod from_id;
mod attrs; mod attrs;
mod from_id;
mod has_source; mod has_source;
pub mod diagnostics;
pub mod db; pub mod db;
pub mod diagnostics;
pub mod symbols; pub mod symbols;
mod display; mod display;
@ -70,13 +70,12 @@ use hir_ty::{
primitive::UintTy, primitive::UintTy,
traits::FnTrait, traits::FnTrait,
AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg, AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg,
GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution, GenericArgData, InferenceDiagnostic, Interner, ParamKind, QuantifiedWhereClause, Scalar,
TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, ValueTyDefId, Substitution, TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind,
WhereClause, ValueTyDefId, WhereClause,
}; };
use itertools::Itertools; use itertools::Itertools;
use nameres::diagnostics::DefDiagnosticKind; use nameres::diagnostics::DefDiagnosticKind;
use once_cell::unsync::Lazy;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use stdx::{impl_from, never}; use stdx::{impl_from, never};
use syntax::{ use syntax::{
@ -1592,53 +1591,46 @@ impl DefWithBody {
} }
for diag in source_map.diagnostics() { for diag in source_map.diagnostics() {
match diag { acc.push(match diag {
BodyDiagnostic::InactiveCode { node, cfg, opts } => acc.push( BodyDiagnostic::InactiveCode { node, cfg, opts } => {
InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into(), InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into()
), }
BodyDiagnostic::MacroError { node, message } => acc.push( BodyDiagnostic::MacroError { node, message } => MacroError {
MacroError { node: (*node).map(|it| it.into()),
node: (*node).map(|it| it.into()), precise_location: None,
precise_location: None, message: message.to_string(),
message: message.to_string(), }
} .into(),
.into(), BodyDiagnostic::UnresolvedProcMacro { node, krate } => UnresolvedProcMacro {
), node: (*node).map(|it| it.into()),
BodyDiagnostic::UnresolvedProcMacro { node, krate } => acc.push( precise_location: None,
UnresolvedProcMacro { macro_name: None,
node: (*node).map(|it| it.into()), kind: MacroKind::ProcMacro,
precise_location: None, krate: *krate,
macro_name: None, }
kind: MacroKind::ProcMacro, .into(),
krate: *krate, BodyDiagnostic::UnresolvedMacroCall { node, path } => UnresolvedMacroCall {
} macro_call: (*node).map(|ast_ptr| ast_ptr.into()),
.into(), precise_location: None,
), path: path.clone(),
BodyDiagnostic::UnresolvedMacroCall { node, path } => acc.push( is_bang: true,
UnresolvedMacroCall { }
macro_call: (*node).map(|ast_ptr| ast_ptr.into()), .into(),
precise_location: None,
path: path.clone(),
is_bang: true,
}
.into(),
),
BodyDiagnostic::UnreachableLabel { node, name } => { BodyDiagnostic::UnreachableLabel { node, name } => {
acc.push(UnreachableLabel { node: *node, name: name.clone() }.into()) UnreachableLabel { node: *node, name: name.clone() }.into()
} }
BodyDiagnostic::UndeclaredLabel { node, name } => { BodyDiagnostic::UndeclaredLabel { node, name } => {
acc.push(UndeclaredLabel { node: *node, name: name.clone() }.into()) UndeclaredLabel { node: *node, name: name.clone() }.into()
} }
} });
} }
let infer = db.infer(self.into()); let infer = db.infer(self.into());
let source_map = Lazy::new(|| db.body_with_source_map(self.into()).1);
let expr_syntax = |expr| source_map.expr_syntax(expr).expect("unexpected synthetic"); let expr_syntax = |expr| source_map.expr_syntax(expr).expect("unexpected synthetic");
let pat_syntax = |pat| source_map.pat_syntax(pat).expect("unexpected synthetic"); let pat_syntax = |pat| source_map.pat_syntax(pat).expect("unexpected synthetic");
for d in &infer.diagnostics { for d in &infer.diagnostics {
match d { acc.push(match d {
&hir_ty::InferenceDiagnostic::NoSuchField { field: expr, private } => { &InferenceDiagnostic::NoSuchField { field: expr, private } => {
let expr_or_pat = match expr { let expr_or_pat = match expr {
ExprOrPatId::ExprId(expr) => { ExprOrPatId::ExprId(expr) => {
source_map.field_syntax(expr).map(AstPtr::wrap_left) source_map.field_syntax(expr).map(AstPtr::wrap_left)
@ -1647,57 +1639,48 @@ impl DefWithBody {
source_map.pat_field_syntax(pat).map(AstPtr::wrap_right) source_map.pat_field_syntax(pat).map(AstPtr::wrap_right)
} }
}; };
acc.push(NoSuchField { field: expr_or_pat, private }.into()) NoSuchField { field: expr_or_pat, private }.into()
} }
&hir_ty::InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => { &InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
acc.push( MismatchedArgCount { call_expr: expr_syntax(call_expr), expected, found }.into()
MismatchedArgCount { call_expr: expr_syntax(call_expr), expected, found }
.into(),
)
} }
&hir_ty::InferenceDiagnostic::PrivateField { expr, field } => { &InferenceDiagnostic::PrivateField { expr, field } => {
let expr = expr_syntax(expr); let expr = expr_syntax(expr);
let field = field.into(); let field = field.into();
acc.push(PrivateField { expr, field }.into()) PrivateField { expr, field }.into()
} }
&hir_ty::InferenceDiagnostic::PrivateAssocItem { id, item } => { &InferenceDiagnostic::PrivateAssocItem { id, item } => {
let expr_or_pat = match id { let expr_or_pat = match id {
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left), ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right), ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right),
}; };
let item = item.into(); let item = item.into();
acc.push(PrivateAssocItem { expr_or_pat, item }.into()) PrivateAssocItem { expr_or_pat, item }.into()
} }
hir_ty::InferenceDiagnostic::ExpectedFunction { call_expr, found } => { InferenceDiagnostic::ExpectedFunction { call_expr, found } => {
let call_expr = expr_syntax(*call_expr); let call_expr = expr_syntax(*call_expr);
ExpectedFunction {
acc.push( call: call_expr,
ExpectedFunction { found: Type::new(db, DefWithBodyId::from(self), found.clone()),
call: call_expr, }
found: Type::new(db, DefWithBodyId::from(self), found.clone()), .into()
}
.into(),
)
} }
hir_ty::InferenceDiagnostic::UnresolvedField { InferenceDiagnostic::UnresolvedField {
expr, expr,
receiver, receiver,
name, name,
method_with_same_name_exists, method_with_same_name_exists,
} => { } => {
let expr = expr_syntax(*expr); let expr = expr_syntax(*expr);
UnresolvedField {
acc.push( expr,
UnresolvedField { name: name.clone(),
expr, receiver: Type::new(db, DefWithBodyId::from(self), receiver.clone()),
name: name.clone(), method_with_same_name_exists: *method_with_same_name_exists,
receiver: Type::new(db, DefWithBodyId::from(self), receiver.clone()), }
method_with_same_name_exists: *method_with_same_name_exists, .into()
}
.into(),
)
} }
hir_ty::InferenceDiagnostic::UnresolvedMethodCall { InferenceDiagnostic::UnresolvedMethodCall {
expr, expr,
receiver, receiver,
name, name,
@ -1705,50 +1688,38 @@ impl DefWithBody {
assoc_func_with_same_name, assoc_func_with_same_name,
} => { } => {
let expr = expr_syntax(*expr); let expr = expr_syntax(*expr);
UnresolvedMethodCall {
acc.push( expr,
UnresolvedMethodCall { name: name.clone(),
expr, receiver: Type::new(db, DefWithBodyId::from(self), receiver.clone()),
name: name.clone(), field_with_same_name: field_with_same_name
receiver: Type::new(db, DefWithBodyId::from(self), receiver.clone()), .clone()
field_with_same_name: field_with_same_name .map(|ty| Type::new(db, DefWithBodyId::from(self), ty)),
.clone() assoc_func_with_same_name: *assoc_func_with_same_name,
.map(|ty| Type::new(db, DefWithBodyId::from(self), ty)), }
assoc_func_with_same_name: *assoc_func_with_same_name, .into()
}
.into(),
)
} }
&hir_ty::InferenceDiagnostic::UnresolvedAssocItem { id } => { &InferenceDiagnostic::UnresolvedAssocItem { id } => {
let expr_or_pat = match id { let expr_or_pat = match id {
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left), ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right), ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right),
}; };
acc.push(UnresolvedAssocItem { expr_or_pat }.into()) UnresolvedAssocItem { expr_or_pat }.into()
} }
&hir_ty::InferenceDiagnostic::BreakOutsideOfLoop { &InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break, bad_value_break } => {
expr,
is_break,
bad_value_break,
} => {
let expr = expr_syntax(expr); let expr = expr_syntax(expr);
acc.push(BreakOutsideOfLoop { expr, is_break, bad_value_break }.into()) BreakOutsideOfLoop { expr, is_break, bad_value_break }.into()
} }
hir_ty::InferenceDiagnostic::TypedHole { expr, expected } => { InferenceDiagnostic::TypedHole { expr, expected } => {
let expr = expr_syntax(*expr); let expr = expr_syntax(*expr);
acc.push(
TypedHole { TypedHole {
expr, expr,
expected: Type::new(db, DefWithBodyId::from(self), expected.clone()), expected: Type::new(db, DefWithBodyId::from(self), expected.clone()),
} }
.into(), .into()
)
} }
&hir_ty::InferenceDiagnostic::MismatchedTupleStructPatArgCount { &InferenceDiagnostic::MismatchedTupleStructPatArgCount { pat, expected, found } => {
pat,
expected,
found,
} => {
let expr_or_pat = match pat { let expr_or_pat = match pat {
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left), ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
ExprOrPatId::PatId(pat) => { ExprOrPatId::PatId(pat) => {
@ -1762,11 +1733,9 @@ impl DefWithBody {
InFile { file_id, value: ptr } InFile { file_id, value: ptr }
} }
}; };
acc.push( MismatchedTupleStructPatArgCount { expr_or_pat, expected, found }.into()
MismatchedTupleStructPatArgCount { expr_or_pat, expected, found }.into(),
)
} }
} });
} }
for (pat_or_expr, mismatch) in infer.type_mismatches() { for (pat_or_expr, mismatch) in infer.type_mismatches() {
let expr_or_pat = match pat_or_expr { let expr_or_pat = match pat_or_expr {
@ -1805,8 +1774,6 @@ impl DefWithBody {
} }
} }
let hir_body = db.body(self.into());
if let Ok(borrowck_results) = db.borrowck(self.into()) { if let Ok(borrowck_results) = db.borrowck(self.into()) {
for borrowck_result in borrowck_results.iter() { for borrowck_result in borrowck_results.iter() {
let mir_body = &borrowck_result.mir_body; let mir_body = &borrowck_result.mir_body;
@ -1828,7 +1795,7 @@ impl DefWithBody {
) )
} }
let mol = &borrowck_result.mutability_of_locals; let mol = &borrowck_result.mutability_of_locals;
for (binding_id, binding_data) in hir_body.bindings.iter() { for (binding_id, binding_data) in body.bindings.iter() {
if binding_data.problems.is_some() { if binding_data.problems.is_some() {
// We should report specific diagnostics for these problems, not `need-mut` and `unused-mut`. // We should report specific diagnostics for these problems, not `need-mut` and `unused-mut`.
continue; continue;

View file

@ -111,6 +111,8 @@ mod handlers {
mod add_label_to_loop; mod add_label_to_loop;
mod add_lifetime_to_type; mod add_lifetime_to_type;
mod add_missing_impl_members; mod add_missing_impl_members;
mod add_missing_match_arms;
mod add_return_type;
mod add_turbo_fish; mod add_turbo_fish;
mod apply_demorgan; mod apply_demorgan;
mod auto_import; mod auto_import;
@ -124,15 +126,15 @@ mod handlers {
mod convert_iter_for_each_to_for; mod convert_iter_for_each_to_for;
mod convert_let_else_to_match; mod convert_let_else_to_match;
mod convert_match_to_let_else; mod convert_match_to_let_else;
mod convert_named_struct_to_tuple_struct;
mod convert_nested_function_to_closure; mod convert_nested_function_to_closure;
mod convert_to_guarded_return;
mod convert_tuple_return_type_to_struct; mod convert_tuple_return_type_to_struct;
mod convert_tuple_struct_to_named_struct; mod convert_tuple_struct_to_named_struct;
mod convert_named_struct_to_tuple_struct;
mod convert_to_guarded_return;
mod convert_two_arm_bool_match_to_matches_macro; mod convert_two_arm_bool_match_to_matches_macro;
mod convert_while_to_loop; mod convert_while_to_loop;
mod desugar_doc_comment;
mod destructure_tuple_binding; mod destructure_tuple_binding;
mod desugar_doc_comment;
mod expand_glob_import; mod expand_glob_import;
mod extract_expressions_from_format_string; mod extract_expressions_from_format_string;
mod extract_function; mod extract_function;
@ -140,7 +142,6 @@ mod handlers {
mod extract_struct_from_enum_variant; mod extract_struct_from_enum_variant;
mod extract_type_alias; mod extract_type_alias;
mod extract_variable; mod extract_variable;
mod add_missing_match_arms;
mod fix_visibility; mod fix_visibility;
mod flip_binexpr; mod flip_binexpr;
mod flip_comma; mod flip_comma;
@ -148,6 +149,7 @@ mod handlers {
mod generate_constant; mod generate_constant;
mod generate_default_from_enum_variant; mod generate_default_from_enum_variant;
mod generate_default_from_new; mod generate_default_from_new;
mod generate_delegate_methods;
mod generate_delegate_trait; mod generate_delegate_trait;
mod generate_deref; mod generate_deref;
mod generate_derive; mod generate_derive;
@ -162,62 +164,60 @@ mod handlers {
mod generate_is_empty_from_len; mod generate_is_empty_from_len;
mod generate_mut_trait_impl; mod generate_mut_trait_impl;
mod generate_new; mod generate_new;
mod generate_delegate_methods;
mod generate_trait_from_impl; mod generate_trait_from_impl;
mod add_return_type;
mod inline_call; mod inline_call;
mod inline_const_as_literal; mod inline_const_as_literal;
mod inline_local_variable; mod inline_local_variable;
mod inline_macro; mod inline_macro;
mod inline_type_alias; mod inline_type_alias;
mod into_to_qualified_from;
mod introduce_named_generic;
mod introduce_named_lifetime; mod introduce_named_lifetime;
mod invert_if; mod invert_if;
mod merge_imports; mod merge_imports;
mod merge_match_arms; mod merge_match_arms;
mod merge_nested_if;
mod move_bounds; mod move_bounds;
mod move_const_to_impl; mod move_const_to_impl;
mod move_from_mod_rs;
mod move_guard; mod move_guard;
mod move_module_to_file; mod move_module_to_file;
mod move_to_mod_rs; mod move_to_mod_rs;
mod move_from_mod_rs;
mod number_representation; mod number_representation;
mod promote_local_to_const; mod promote_local_to_const;
mod pull_assignment_up; mod pull_assignment_up;
mod qualify_path;
mod qualify_method_call; mod qualify_method_call;
mod qualify_path;
mod raw_string; mod raw_string;
mod remove_dbg; mod remove_dbg;
mod remove_mut; mod remove_mut;
mod remove_parentheses;
mod remove_unused_imports; mod remove_unused_imports;
mod remove_unused_param; mod remove_unused_param;
mod remove_parentheses;
mod reorder_fields; mod reorder_fields;
mod reorder_impl_items; mod reorder_impl_items;
mod replace_try_expr_with_match; mod replace_arith_op;
mod replace_derive_with_manual_impl; mod replace_derive_with_manual_impl;
mod replace_if_let_with_match; mod replace_if_let_with_match;
mod replace_is_method_with_if_let_method; mod replace_is_method_with_if_let_method;
mod replace_method_eager_lazy;
mod replace_arith_op;
mod introduce_named_generic;
mod replace_let_with_if_let; mod replace_let_with_if_let;
mod replace_method_eager_lazy;
mod replace_named_generic_with_impl; mod replace_named_generic_with_impl;
mod replace_qualified_name_with_use; mod replace_qualified_name_with_use;
mod replace_string_with_char; mod replace_string_with_char;
mod replace_try_expr_with_match;
mod replace_turbofish_with_explicit_type; mod replace_turbofish_with_explicit_type;
mod split_import;
mod unmerge_match_arm;
mod unwrap_tuple;
mod sort_items; mod sort_items;
mod split_import;
mod toggle_ignore; mod toggle_ignore;
mod unmerge_match_arm;
mod unmerge_use; mod unmerge_use;
mod unnecessary_async; mod unnecessary_async;
mod unqualify_method_call;
mod unwrap_block; mod unwrap_block;
mod unwrap_result_return_type; mod unwrap_result_return_type;
mod unqualify_method_call; mod unwrap_tuple;
mod wrap_return_type_in_result; mod wrap_return_type_in_result;
mod into_to_qualified_from;
mod merge_nested_if;
pub(crate) fn all() -> &'static [Handler] { pub(crate) fn all() -> &'static [Handler] {
&[ &[

View file

@ -23,8 +23,8 @@ use syntax::{
use crate::assist_context::{AssistContext, SourceChangeBuilder}; use crate::assist_context::{AssistContext, SourceChangeBuilder};
pub(crate) mod suggest_name;
mod gen_trait_fn_body; mod gen_trait_fn_body;
pub(crate) mod suggest_name;
pub(crate) fn unwrap_trivial_block(block_expr: ast::BlockExpr) -> ast::Expr { pub(crate) fn unwrap_trivial_block(block_expr: ast::BlockExpr) -> ast::Expr {
extract_trivial_expression(&block_expr) extract_trivial_expression(&block_expr)

View file

@ -2,8 +2,10 @@
pub(crate) mod attribute; pub(crate) mod attribute;
pub(crate) mod dot; pub(crate) mod dot;
pub(crate) mod env_vars;
pub(crate) mod expr; pub(crate) mod expr;
pub(crate) mod extern_abi; pub(crate) mod extern_abi;
pub(crate) mod extern_crate;
pub(crate) mod field; pub(crate) mod field;
pub(crate) mod flyimport; pub(crate) mod flyimport;
pub(crate) mod fn_param; pub(crate) mod fn_param;
@ -19,8 +21,6 @@ pub(crate) mod snippet;
pub(crate) mod r#type; pub(crate) mod r#type;
pub(crate) mod use_; pub(crate) mod use_;
pub(crate) mod vis; pub(crate) mod vis;
pub(crate) mod env_vars;
pub(crate) mod extern_crate;
use std::iter; use std::iter;

View file

@ -25,8 +25,8 @@ use crate::{
mod cfg; mod cfg;
mod derive; mod derive;
mod lint; mod lint;
mod repr;
mod macro_use; mod macro_use;
mod repr;
pub(crate) use self::derive::complete_derive_path; pub(crate) use self::derive::complete_derive_path;

View file

@ -8,9 +8,9 @@ mod context;
mod item; mod item;
mod render; mod render;
mod snippet;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
mod snippet;
use ide_db::{ use ide_db::{
base_db::FilePosition, base_db::FilePosition,

View file

@ -1,14 +1,14 @@
//! `render` module provides utilities for rendering completion suggestions //! `render` module provides utilities for rendering completion suggestions
//! into code pieces that will be presented to user. //! into code pieces that will be presented to user.
pub(crate) mod macro_;
pub(crate) mod function;
pub(crate) mod const_; pub(crate) mod const_;
pub(crate) mod function;
pub(crate) mod literal;
pub(crate) mod macro_;
pub(crate) mod pattern; pub(crate) mod pattern;
pub(crate) mod type_alias; pub(crate) mod type_alias;
pub(crate) mod variant;
pub(crate) mod union_literal; pub(crate) mod union_literal;
pub(crate) mod literal; pub(crate) mod variant;
use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type}; use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type};
use ide_db::{ use ide_db::{

View file

@ -12,8 +12,8 @@ mod attribute;
mod expression; mod expression;
mod flyimport; mod flyimport;
mod fn_param; mod fn_param;
mod item_list;
mod item; mod item;
mod item_list;
mod pattern; mod pattern;
mod predicate; mod predicate;
mod proc_macros; mod proc_macros;

View file

@ -9,6 +9,7 @@ mod apply_change;
pub mod active_parameter; pub mod active_parameter;
pub mod assists; pub mod assists;
pub mod defs; pub mod defs;
pub mod documentation;
pub mod famous_defs; pub mod famous_defs;
pub mod helpers; pub mod helpers;
pub mod items_locator; pub mod items_locator;
@ -22,7 +23,6 @@ pub mod symbol_index;
pub mod traits; pub mod traits;
pub mod ty_filter; pub mod ty_filter;
pub mod use_trivial_constructor; pub mod use_trivial_constructor;
pub mod documentation;
pub mod imports { pub mod imports {
pub mod import_assets; pub mod import_assets;
@ -35,10 +35,10 @@ pub mod generated {
} }
pub mod syntax_helpers { pub mod syntax_helpers {
pub mod node_ext;
pub mod insert_whitespace_into_node;
pub mod format_string; pub mod format_string;
pub mod format_string_exprs; pub mod format_string_exprs;
pub mod insert_whitespace_into_node;
pub mod node_ext;
pub use parser::LexedStr; pub use parser::LexedStr;
} }
@ -414,6 +414,6 @@ impl SnippetCap {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
mod sourcegen_lints;
mod line_index; mod line_index;
mod sourcegen_lints;
} }

View file

@ -44,30 +44,30 @@ mod handlers {
pub(crate) mod private_assoc_item; pub(crate) mod private_assoc_item;
pub(crate) mod private_field; pub(crate) mod private_field;
pub(crate) mod replace_filter_map_next_with_find_map; pub(crate) mod replace_filter_map_next_with_find_map;
pub(crate) mod trait_impl_orphan;
pub(crate) mod trait_impl_incorrect_safety; pub(crate) mod trait_impl_incorrect_safety;
pub(crate) mod trait_impl_missing_assoc_item; pub(crate) mod trait_impl_missing_assoc_item;
pub(crate) mod trait_impl_orphan;
pub(crate) mod trait_impl_redundant_assoc_item; pub(crate) mod trait_impl_redundant_assoc_item;
pub(crate) mod typed_hole;
pub(crate) mod type_mismatch; pub(crate) mod type_mismatch;
pub(crate) mod typed_hole;
pub(crate) mod undeclared_label;
pub(crate) mod unimplemented_builtin_macro; pub(crate) mod unimplemented_builtin_macro;
pub(crate) mod unreachable_label;
pub(crate) mod unresolved_assoc_item; pub(crate) mod unresolved_assoc_item;
pub(crate) mod unresolved_extern_crate; pub(crate) mod unresolved_extern_crate;
pub(crate) mod unresolved_field; pub(crate) mod unresolved_field;
pub(crate) mod unresolved_method;
pub(crate) mod unresolved_import; pub(crate) mod unresolved_import;
pub(crate) mod unresolved_macro_call; pub(crate) mod unresolved_macro_call;
pub(crate) mod unresolved_method;
pub(crate) mod unresolved_module; pub(crate) mod unresolved_module;
pub(crate) mod unresolved_proc_macro; pub(crate) mod unresolved_proc_macro;
pub(crate) mod undeclared_label;
pub(crate) mod unreachable_label;
pub(crate) mod unused_variables; pub(crate) mod unused_variables;
// The handlers below are unusual, the implement the diagnostics as well. // The handlers below are unusual, the implement the diagnostics as well.
pub(crate) mod field_shorthand; pub(crate) mod field_shorthand;
pub(crate) mod useless_braces;
pub(crate) mod unlinked_file;
pub(crate) mod json_is_not_rust; pub(crate) mod json_is_not_rust;
pub(crate) mod unlinked_file;
pub(crate) mod useless_braces;
} }
#[cfg(test)] #[cfg(test)]

View file

@ -69,11 +69,11 @@
// // foo($a, $b) ==>> ($a).foo($b) // // foo($a, $b) ==>> ($a).foo($b)
// ``` // ```
mod fragments;
mod from_comment; mod from_comment;
mod matching; mod matching;
mod nester; mod nester;
mod parsing; mod parsing;
mod fragments;
mod replacing; mod replacing;
mod resolving; mod resolving;
mod search; mod search;

View file

@ -25,13 +25,13 @@ mod bind_pat;
mod binding_mode; mod binding_mode;
mod chaining; mod chaining;
mod closing_brace; mod closing_brace;
mod closure_ret;
mod closure_captures; mod closure_captures;
mod closure_ret;
mod discriminant; mod discriminant;
mod fn_lifetime_fn; mod fn_lifetime_fn;
mod implicit_drop;
mod implicit_static; mod implicit_static;
mod param_name; mod param_name;
mod implicit_drop;
mod range_exclusive; mod range_exclusive;
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]

View file

@ -21,24 +21,25 @@ macro_rules! eprintln {
mod fixture; mod fixture;
mod markup; mod markup;
mod prime_caches;
mod navigation_target; mod navigation_target;
mod prime_caches;
mod annotations; mod annotations;
mod call_hierarchy; mod call_hierarchy;
mod signature_help;
mod doc_links; mod doc_links;
mod highlight_related;
mod expand_macro; mod expand_macro;
mod extend_selection; mod extend_selection;
mod fetch_crates;
mod file_structure; mod file_structure;
mod folding_ranges; mod folding_ranges;
mod goto_declaration; mod goto_declaration;
mod goto_definition; mod goto_definition;
mod goto_implementation; mod goto_implementation;
mod goto_type_definition; mod goto_type_definition;
mod highlight_related;
mod hover; mod hover;
mod inlay_hints; mod inlay_hints;
mod interpret_function;
mod join_lines; mod join_lines;
mod markdown_remove; mod markdown_remove;
mod matching_brace; mod matching_brace;
@ -48,6 +49,8 @@ mod parent_module;
mod references; mod references;
mod rename; mod rename;
mod runnables; mod runnables;
mod shuffle_crate_graph;
mod signature_help;
mod ssr; mod ssr;
mod static_index; mod static_index;
mod status; mod status;
@ -56,12 +59,9 @@ mod syntax_tree;
mod typing; mod typing;
mod view_crate_graph; mod view_crate_graph;
mod view_hir; mod view_hir;
mod view_mir;
mod interpret_function;
mod view_item_tree; mod view_item_tree;
mod shuffle_crate_graph;
mod fetch_crates;
mod view_memory_layout; mod view_memory_layout;
mod view_mir;
use std::ffi::OsStr; use std::ffi::OsStr;

View file

@ -3,11 +3,11 @@ pub(crate) mod tags;
mod highlights; mod highlights;
mod injector; mod injector;
mod highlight;
mod format;
mod macro_;
mod inject;
mod escape; mod escape;
mod format;
mod highlight;
mod inject;
mod macro_;
mod html; mod html;
#[cfg(test)] #[cfg(test)]

View file

@ -8,11 +8,11 @@
#![warn(rust_2018_idioms, unused_lifetimes)] #![warn(rust_2018_idioms, unused_lifetimes)]
mod parser;
mod expander; mod expander;
mod parser;
mod syntax_bridge; mod syntax_bridge;
mod tt_iter;
mod to_parser_input; mod to_parser_input;
mod tt_iter;
#[cfg(test)] #[cfg(test)]
mod benchmark; mod benchmark;

View file

@ -234,7 +234,7 @@ where
let mut stack = NonEmptyVec::new(entry); let mut stack = NonEmptyVec::new(entry);
while let Some((token, abs_range)) = conv.bump() { while let Some((token, abs_range)) = conv.bump() {
let tt::Subtree { delimiter, token_trees: result } = stack.last_mut(); let tt::Subtree { delimiter, token_trees } = stack.last_mut();
let tt = match token.as_leaf() { let tt = match token.as_leaf() {
Some(leaf) => tt::TokenTree::Leaf(leaf.clone()), Some(leaf) => tt::TokenTree::Leaf(leaf.clone()),
@ -243,7 +243,7 @@ where
COMMENT => { COMMENT => {
let span = conv.span_for(abs_range); let span = conv.span_for(abs_range);
if let Some(tokens) = conv.convert_doc_comment(&token, span) { if let Some(tokens) = conv.convert_doc_comment(&token, span) {
result.extend(tokens); token_trees.extend(tokens);
} }
continue; continue;
} }
@ -317,7 +317,7 @@ where
span: conv span: conv
.span_for(TextRange::at(abs_range.start(), TextSize::of('\''))), .span_for(TextRange::at(abs_range.start(), TextSize::of('\''))),
}); });
result.push(apostrophe.into()); token_trees.push(apostrophe.into());
let ident = tt::Leaf::from(tt::Ident { let ident = tt::Leaf::from(tt::Ident {
text: SmolStr::new(&token.to_text(conv)[1..]), text: SmolStr::new(&token.to_text(conv)[1..]),
@ -326,7 +326,7 @@ where
abs_range.end(), abs_range.end(),
)), )),
}); });
result.push(ident.into()); token_trees.push(ident.into());
continue; continue;
} }
_ => continue, _ => continue,
@ -337,7 +337,7 @@ where
}, },
}; };
result.push(tt); token_trees.push(tt);
} }
// If we get here, we've consumed all input tokens. // If we get here, we've consumed all input tokens.

View file

@ -30,12 +30,12 @@
mod attributes; mod attributes;
mod expressions; mod expressions;
mod generic_args;
mod generic_params;
mod items; mod items;
mod params; mod params;
mod paths; mod paths;
mod patterns; mod patterns;
mod generic_args;
mod generic_params;
mod types; mod types;
use crate::{ use crate::{

View file

@ -1,5 +1,5 @@
mod consts;
mod adt; mod adt;
mod consts;
mod traits; mod traits;
mod use_item; mod use_item;

View file

@ -26,15 +26,15 @@ extern crate ra_ap_rustc_lexer as rustc_lexer;
#[cfg(feature = "in-rust-tree")] #[cfg(feature = "in-rust-tree")]
extern crate rustc_lexer; extern crate rustc_lexer;
mod lexed_str;
mod token_set;
mod syntax_kind;
mod event; mod event;
mod parser;
mod grammar; mod grammar;
mod input; mod input;
mod lexed_str;
mod output; mod output;
mod parser;
mod shortcuts; mod shortcuts;
mod syntax_kind;
mod token_set;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;

View file

@ -1,6 +1,6 @@
mod prefix_entries;
mod sourcegen_inline_tests; mod sourcegen_inline_tests;
mod top_entries; mod top_entries;
mod prefix_entries;
use std::{ use std::{
fmt::Write, fmt::Write,

View file

@ -21,8 +21,8 @@ extern crate proc_macro;
extern crate rustc_driver as _; extern crate rustc_driver as _;
mod dylib; mod dylib;
mod server;
mod proc_macros; mod proc_macros;
mod server;
use std::{ use std::{
collections::{hash_map::Entry, HashMap}, collections::{hash_map::Entry, HashMap},

View file

@ -13,9 +13,9 @@ use proc_macro::bridge;
mod token_stream; mod token_stream;
pub use token_stream::TokenStream; pub use token_stream::TokenStream;
pub mod token_id;
pub mod rust_analyzer_span; pub mod rust_analyzer_span;
mod symbol; mod symbol;
pub mod token_id;
pub use symbol::*; pub use symbol::*;
use tt::Spacing; use tt::Spacing;

View file

@ -206,7 +206,7 @@ impl server::TokenStream for TokenIdServer {
stream: if subtree.token_trees.is_empty() { stream: if subtree.token_trees.is_empty() {
None None
} else { } else {
Some(subtree.token_trees.into_iter().collect()) Some(TokenStream { token_trees: subtree.token_trees })
}, },
span: bridge::DelimSpan::from_single(subtree.delimiter.open), span: bridge::DelimSpan::from_single(subtree.delimiter.open),
}), }),

View file

@ -2,11 +2,11 @@
#![warn(rust_2018_idioms, unused_lifetimes)] #![warn(rust_2018_idioms, unused_lifetimes)]
mod stop_watch;
mod memory_usage;
#[cfg(feature = "cpu_profiler")] #[cfg(feature = "cpu_profiler")]
mod google_cpu_profiler; mod google_cpu_profiler;
mod hprof; mod hprof;
mod memory_usage;
mod stop_watch;
mod tree; mod tree;
use std::cell::RefCell; use std::cell::RefCell;

View file

@ -17,15 +17,15 @@
#![warn(rust_2018_idioms, unused_lifetimes)] #![warn(rust_2018_idioms, unused_lifetimes)]
mod manifest_path; mod build_scripts;
mod cargo_workspace; mod cargo_workspace;
mod cfg_flag; mod cfg_flag;
mod manifest_path;
mod project_json; mod project_json;
mod sysroot;
mod workspace;
mod rustc_cfg; mod rustc_cfg;
mod build_scripts; mod sysroot;
pub mod target_data_layout; pub mod target_data_layout;
mod workspace;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;

View file

@ -1,16 +1,16 @@
//! Various batch processing tasks, intended primarily for debugging. //! Various batch processing tasks, intended primarily for debugging.
pub mod flags;
mod parse;
mod symbols;
mod highlight;
mod analysis_stats; mod analysis_stats;
mod diagnostics; mod diagnostics;
mod ssr; pub mod flags;
mod highlight;
mod lsif; mod lsif;
mod scip; mod parse;
mod run_tests; mod run_tests;
mod rustc_tests; mod rustc_tests;
mod scip;
mod ssr;
mod symbols;
mod progress_report; mod progress_report;

View file

@ -2,11 +2,11 @@
use core::fmt; use core::fmt;
pub(crate) mod utils;
pub(crate) mod semantic_tokens;
pub mod ext; pub mod ext;
pub(crate) mod from_proto; pub(crate) mod from_proto;
pub(crate) mod semantic_tokens;
pub(crate) mod to_proto; pub(crate) mod to_proto;
pub(crate) mod utils;
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct LspError { pub(crate) struct LspError {

View file

@ -1,7 +1,4 @@
//! File and span related types. //! File and span related types.
// FIXME: This should be moved into its own crate to get rid of the dependency inversion, base-db
// has business depending on tt, tt should depend on a span crate only (which unforunately will have
// to depend on salsa)
use std::fmt::{self, Write}; use std::fmt::{self, Write};
use salsa::InternId; use salsa::InternId;

View file

@ -6,13 +6,13 @@ use std::io as sio;
use std::process::Command; use std::process::Command;
use std::{cmp::Ordering, ops, time::Instant}; use std::{cmp::Ordering, ops, time::Instant};
pub mod anymap;
mod macros; mod macros;
pub mod process;
pub mod panic_context;
pub mod non_empty_vec; pub mod non_empty_vec;
pub mod panic_context;
pub mod process;
pub mod rand; pub mod rand;
pub mod thread; pub mod thread;
pub mod anymap;
pub use always_assert::{always, never}; pub use always_assert::{always, never};
pub use itertools; pub use itertools;

View file

@ -1,15 +1,15 @@
//! Abstract Syntax Tree, layered on top of untyped `SyntaxNode`s //! Abstract Syntax Tree, layered on top of untyped `SyntaxNode`s
mod generated;
mod traits;
mod token_ext;
mod node_ext;
mod expr_ext;
mod operators;
pub mod edit; pub mod edit;
pub mod edit_in_place; pub mod edit_in_place;
mod expr_ext;
mod generated;
pub mod make; pub mod make;
mod node_ext;
mod operators;
pub mod prec; pub mod prec;
mod token_ext;
mod traits;
use std::marker::PhantomData; use std::marker::PhantomData;

View file

@ -32,22 +32,22 @@ macro_rules! eprintln {
($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
} }
mod syntax_node;
mod syntax_error;
mod parsing; mod parsing;
mod validation;
mod ptr; mod ptr;
mod token_text; mod syntax_error;
mod syntax_node;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
mod token_text;
mod validation;
pub mod algo; pub mod algo;
pub mod ast; pub mod ast;
#[doc(hidden)] #[doc(hidden)]
pub mod fuzz; pub mod fuzz;
pub mod utils;
pub mod ted;
pub mod hacks; pub mod hacks;
pub mod ted;
pub mod utils;
use std::marker::PhantomData; use std::marker::PhantomData;

View file

@ -6,7 +6,9 @@
"brackets": [ "brackets": [
["{", "}"], ["{", "}"],
["[", "]"], ["[", "]"],
["(", ")"] ["(", ")"],
["#[", "]"],
["#![", "]"]
], ],
"colorizedBracketPairs": [ "colorizedBracketPairs": [
["{", "}"], ["{", "}"],
@ -17,6 +19,8 @@
{ "open": "{", "close": "}" }, { "open": "{", "close": "}" },
{ "open": "[", "close": "]" }, { "open": "[", "close": "]" },
{ "open": "(", "close": ")" }, { "open": "(", "close": ")" },
{ "open": "#[", "close": "]" },
{ "open": "#![", "close": "]" },
{ "open": "\"", "close": "\"", "notIn": ["string"] }, { "open": "\"", "close": "\"", "notIn": ["string"] },
{ "open": "/*", "close": " */" }, { "open": "/*", "close": " */" },
{ "open": "`", "close": "`", "notIn": ["string"] } { "open": "`", "close": "`", "notIn": ["string"] }

View file

@ -6,11 +6,11 @@
#![warn(rust_2018_idioms, unused_lifetimes)] #![warn(rust_2018_idioms, unused_lifetimes)]
mod msg;
mod stdio;
mod error; mod error;
mod socket; mod msg;
mod req_queue; mod req_queue;
mod socket;
mod stdio;
use std::{ use std::{
io, io,

View file

@ -1,2 +1,2 @@
reorder_modules = false reorder_modules = true
use_small_heuristics = "Max" use_small_heuristics = "Max"

View file

@ -12,11 +12,11 @@
mod flags; mod flags;
mod install;
mod release;
mod dist; mod dist;
mod publish; mod install;
mod metrics; mod metrics;
mod publish;
mod release;
use anyhow::bail; use anyhow::bail;
use std::{ use std::{