From 880baa9e568c5aff70a49f02e517fce075e40b3f Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 25 Jan 2024 10:07:29 +0100 Subject: [PATCH] Shuffle hir-expand things around --- crates/hir-expand/src/ast_id_map.rs | 9 +- crates/hir-expand/src/db.rs | 275 +++------------------------ crates/hir-expand/src/declarative.rs | 176 +++++++++++++++++ crates/hir-expand/src/hygiene.rs | 40 ++++ crates/hir-expand/src/lib.rs | 9 +- crates/hir-expand/src/mod_path.rs | 5 +- crates/hir-expand/src/span_map.rs | 37 +++- rustfmt.toml | 2 +- 8 files changed, 295 insertions(+), 258 deletions(-) create mode 100644 crates/hir-expand/src/declarative.rs diff --git a/crates/hir-expand/src/ast_id_map.rs b/crates/hir-expand/src/ast_id_map.rs index 7bdd6db932..530f10a068 100644 --- a/crates/hir-expand/src/ast_id_map.rs +++ b/crates/hir-expand/src/ast_id_map.rs @@ -155,7 +155,14 @@ impl PartialEq for AstIdMap { impl Eq for AstIdMap {} impl AstIdMap { - pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap { + pub(crate) fn ast_id_map( + db: &dyn ExpandDatabase, + file_id: span::HirFileId, + ) -> triomphe::Arc { + triomphe::Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id))) + } + + fn from_source(node: &SyntaxNode) -> AstIdMap { assert!(node.parent().is_none()); let mut res = AstIdMap::default(); diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 2f8c0951b1..08542c6430 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -1,16 +1,14 @@ //! Defines database & queries for macro expansion. -use std::sync::OnceLock; - use base_db::{ salsa::{self, debug::DebugQueryTable}, - CrateId, Edition, FileId, SourceDatabase, VersionReq, + CrateId, FileId, SourceDatabase, }; use either::Either; use limit::Limit; use mbe::{syntax_node_to_token_tree, ValueResult}; use rustc_hash::FxHashSet; -use span::{Span, SyntaxContextId}; +use span::SyntaxContextId; use syntax::{ ast::{self, HasAttrs}, AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, @@ -19,13 +17,14 @@ use triomphe::Arc; use crate::{ ast_id_map::AstIdMap, - attrs::{collect_attrs, RawAttrs}, + attrs::collect_attrs, builtin_attr_macro::pseudo_derive_attr_expansion, builtin_fn_macro::EagerExpander, + declarative::DeclarativeMacroExpander, fixup::{self, reverse_fixups, SyntaxFixupUndoInfo}, hygiene::{ - apply_mark, span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt, - SyntaxContextData, Transparency, + span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt, + SyntaxContextData, }, proc_macro::ProcMacros, span_map::{RealSpanMap, SpanMap, SpanMapRef}, @@ -43,82 +42,6 @@ use crate::{ /// Actual max for `analysis-stats .` at some point: 30672. static TOKEN_LIMIT: Limit = Limit::new(1_048_576); -#[derive(Debug, Clone, Eq, PartialEq)] -/// Old-style `macro_rules` or the new macros 2.0 -pub struct DeclarativeMacroExpander { - pub mac: mbe::DeclarativeMacro, - pub transparency: Transparency, -} - -// FIXME: Remove this once we drop support for 1.76 -static REQUIREMENT: OnceLock = OnceLock::new(); - -impl DeclarativeMacroExpander { - pub fn expand( - &self, - db: &dyn ExpandDatabase, - tt: tt::Subtree, - call_id: MacroCallId, - ) -> ExpandResult { - let loc = db.lookup_intern_macro_call(call_id); - let toolchain = &db.crate_graph()[loc.def.krate].toolchain; - let new_meta_vars = toolchain.as_ref().map_or(false, |version| { - REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( - &base_db::Version { - pre: base_db::Prerelease::EMPTY, - build: base_db::BuildMetadata::EMPTY, - major: version.major, - minor: version.minor, - patch: version.patch, - }, - ) - }); - match self.mac.err() { - Some(e) => ExpandResult::new( - tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }), - ExpandError::other(format!("invalid macro definition: {e}")), - ), - None => self - .mac - .expand( - &tt, - |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency), - new_meta_vars, - loc.call_site, - ) - .map_err(Into::into), - } - } - - pub fn expand_unhygienic( - &self, - db: &dyn ExpandDatabase, - tt: tt::Subtree, - krate: CrateId, - call_site: Span, - ) -> ExpandResult { - let toolchain = &db.crate_graph()[krate].toolchain; - let new_meta_vars = toolchain.as_ref().map_or(false, |version| { - REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( - &base_db::Version { - pre: base_db::Prerelease::EMPTY, - build: base_db::BuildMetadata::EMPTY, - major: version.major, - minor: version.minor, - patch: version.patch, - }, - ) - }); - match self.mac.err() { - Some(e) => ExpandResult::new( - tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), - ExpandError::other(format!("invalid macro definition: {e}")), - ), - None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into), - } - } -} - #[derive(Debug, Clone, Eq, PartialEq)] pub enum TokenExpander { /// Old-style `macro_rules` or the new macros 2.0 @@ -141,6 +64,7 @@ pub trait ExpandDatabase: SourceDatabase { #[salsa::input] fn proc_macros(&self) -> Arc; + #[salsa::invoke(AstIdMap::ast_id_map)] fn ast_id_map(&self, file_id: HirFileId) -> Arc; /// Main public API -- parses a hir file, not caring whether it's a real @@ -156,8 +80,10 @@ pub trait ExpandDatabase: SourceDatabase { macro_file: MacroFileId, ) -> ExpandResult<(Parse, Arc)>; #[salsa::transparent] + #[salsa::invoke(SpanMap::new)] fn span_map(&self, file_id: HirFileId) -> SpanMap; + #[salsa::invoke(crate::span_map::real_span_map)] fn real_span_map(&self, file_id: FileId) -> Arc; /// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the @@ -173,6 +99,7 @@ pub trait ExpandDatabase: SourceDatabase { #[salsa::transparent] fn setup_syntax_context_root(&self) -> (); #[salsa::transparent] + #[salsa::invoke(crate::hygiene::dump_syntax_contexts)] fn dump_syntax_contexts(&self) -> String; /// Lowers syntactic macro call to a token tree representation. That's a firewall @@ -184,8 +111,10 @@ pub trait ExpandDatabase: SourceDatabase { ) -> ValueResult, SyntaxFixupUndoInfo)>, Arc>>; /// Fetches the expander for this macro. #[salsa::transparent] + #[salsa::invoke(TokenExpander::macro_expander)] fn macro_expander(&self, id: MacroDefId) -> TokenExpander; /// Fetches (and compiles) the expander of this decl macro. + #[salsa::invoke(DeclarativeMacroExpander::expander)] fn decl_macro_expander( &self, def_crate: CrateId, @@ -203,36 +132,6 @@ pub trait ExpandDatabase: SourceDatabase { ) -> ExpandResult>; } -#[inline] -pub fn span_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap { - match file_id.repr() { - HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)), - HirFileIdRepr::MacroFile(m) => { - SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1) - } - } -} - -pub fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc { - use syntax::ast::HasModuleItem; - let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)]; - let ast_id_map = db.ast_id_map(file_id.into()); - let tree = db.parse(file_id).tree(); - // FIXME: Descend into modules and other item containing items that are not annotated with attributes - // and allocate pairs for those as well. This gives us finer grained span anchors resulting in - // better incrementality - pairs.extend( - tree.items() - .map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())), - ); - - Arc::new(RealSpanMap::from_file( - file_id, - pairs.into_boxed_slice(), - tree.syntax().text_range().end(), - )) -} - /// This expands the given macro call, but with different arguments. This is /// used for completion, where we want to see what 'would happen' if we insert a /// token. The `token_to_map` mapped down into the expansion, with the mapped @@ -357,10 +256,6 @@ pub fn expand_speculative( Some((node.syntax_node(), token)) } -fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc { - Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id))) -} - fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode { match file_id.repr() { HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(), @@ -412,7 +307,10 @@ fn parse_macro_expansion_error( .map(|it| it.0.errors().to_vec().into_boxed_slice()) } -fn parse_with_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> (Parse, SpanMap) { +pub(crate) fn parse_with_map( + db: &dyn ExpandDatabase, + file_id: HirFileId, +) -> (Parse, SpanMap) { match file_id.repr() { HirFileIdRepr::FileId(file_id) => { (db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id))) @@ -581,100 +479,18 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet, -) -> Arc { - let crate_data = &db.crate_graph()[def_crate]; - let is_2021 = crate_data.edition >= Edition::Edition2021; - let (root, map) = parse_with_map(db, id.file_id); - let root = root.syntax_node(); - - let transparency = |node| { - // ... would be nice to have the item tree here - let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate); - match &*attrs - .iter() - .find(|it| { - it.path.as_ident().and_then(|it| it.as_str()) == Some("rustc_macro_transparency") - })? - .token_tree_value()? - .token_trees - { - [tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text { - "transparent" => Some(Transparency::Transparent), - "semitransparent" => Some(Transparency::SemiTransparent), - "opaque" => Some(Transparency::Opaque), - _ => None, - }, - _ => None, +impl TokenExpander { + fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander { + match id.kind { + MacroDefKind::Declarative(ast_id) => { + TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id)) + } + MacroDefKind::BuiltIn(expander, _) => TokenExpander::BuiltIn(expander), + MacroDefKind::BuiltInAttr(expander, _) => TokenExpander::BuiltInAttr(expander), + MacroDefKind::BuiltInDerive(expander, _) => TokenExpander::BuiltInDerive(expander), + MacroDefKind::BuiltInEager(expander, ..) => TokenExpander::BuiltInEager(expander), + MacroDefKind::ProcMacro(expander, ..) => TokenExpander::ProcMacro(expander), } - }; - let toolchain = crate_data.toolchain.as_ref(); - let new_meta_vars = toolchain.as_ref().map_or(false, |version| { - REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( - &base_db::Version { - pre: base_db::Prerelease::EMPTY, - build: base_db::BuildMetadata::EMPTY, - major: version.major, - minor: version.minor, - patch: version.patch, - }, - ) - }); - - let (mac, transparency) = match id.to_ptr(db).to_node(&root) { - ast::Macro::MacroRules(macro_rules) => ( - match macro_rules.token_tree() { - Some(arg) => { - let tt = mbe::syntax_node_to_token_tree( - arg.syntax(), - map.as_ref(), - map.span_for_range(macro_rules.macro_rules_token().unwrap().text_range()), - ); - - mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021, new_meta_vars) - } - None => mbe::DeclarativeMacro::from_err( - mbe::ParseError::Expected("expected a token tree".into()), - is_2021, - ), - }, - transparency(¯o_rules).unwrap_or(Transparency::SemiTransparent), - ), - ast::Macro::MacroDef(macro_def) => ( - match macro_def.body() { - Some(arg) => { - let tt = mbe::syntax_node_to_token_tree( - arg.syntax(), - map.as_ref(), - map.span_for_range(macro_def.macro_token().unwrap().text_range()), - ); - - mbe::DeclarativeMacro::parse_macro2(&tt, is_2021, new_meta_vars) - } - None => mbe::DeclarativeMacro::from_err( - mbe::ParseError::Expected("expected a token tree".into()), - is_2021, - ), - }, - transparency(¯o_def).unwrap_or(Transparency::Opaque), - ), - }; - Arc::new(DeclarativeMacroExpander { mac, transparency }) -} - -fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander { - match id.kind { - MacroDefKind::Declarative(ast_id) => { - TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id)) - } - MacroDefKind::BuiltIn(expander, _) => TokenExpander::BuiltIn(expander), - MacroDefKind::BuiltInAttr(expander, _) => TokenExpander::BuiltInAttr(expander), - MacroDefKind::BuiltInDerive(expander, _) => TokenExpander::BuiltInDerive(expander), - MacroDefKind::BuiltInEager(expander, ..) => TokenExpander::BuiltInEager(expander), - MacroDefKind::ProcMacro(expander, ..) => TokenExpander::ProcMacro(expander), } } @@ -862,40 +678,3 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> { fn setup_syntax_context_root(db: &dyn ExpandDatabase) { db.intern_syntax_context(SyntaxContextData::root()); } - -fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String { - let mut s = String::from("Expansions:"); - let mut entries = InternMacroCallLookupQuery.in_db(db).entries::>(); - entries.sort_by_key(|e| e.key); - for e in entries { - let id = e.key; - let expn_data = e.value.as_ref().unwrap(); - s.push_str(&format!( - "\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}", - id, - expn_data.kind.file_id(), - expn_data.call_site, - SyntaxContextId::ROOT, // FIXME expn_data.def_site, - expn_data.kind.descr(), - )); - } - - s.push_str("\n\nSyntaxContexts:\n"); - let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::>(); - entries.sort_by_key(|e| e.key); - for e in entries { - struct SyntaxContextDebug<'a>( - &'a dyn ExpandDatabase, - SyntaxContextId, - &'a SyntaxContextData, - ); - - impl<'a> std::fmt::Debug for SyntaxContextDebug<'a> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.2.fancy_debug(self.1, self.0, f) - } - } - stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap())); - } - s -} diff --git a/crates/hir-expand/src/declarative.rs b/crates/hir-expand/src/declarative.rs new file mode 100644 index 0000000000..ff1a9625e1 --- /dev/null +++ b/crates/hir-expand/src/declarative.rs @@ -0,0 +1,176 @@ +use std::sync::OnceLock; + +use base_db::{CrateId, Edition, VersionReq}; +use span::{MacroCallId, Span}; +use syntax::{ast, AstNode}; +use triomphe::Arc; + +use crate::{ + attrs::RawAttrs, + db::ExpandDatabase, + hygiene::{apply_mark, Transparency}, + tt, AstId, ExpandError, ExpandResult, +}; + +/// Old-style `macro_rules` or the new macros 2.0 +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct DeclarativeMacroExpander { + pub mac: mbe::DeclarativeMacro, + pub transparency: Transparency, +} + +// FIXME: Remove this once we drop support for 1.76 +static REQUIREMENT: OnceLock = OnceLock::new(); + +impl DeclarativeMacroExpander { + pub fn expand( + &self, + db: &dyn ExpandDatabase, + tt: tt::Subtree, + call_id: MacroCallId, + ) -> ExpandResult { + let loc = db.lookup_intern_macro_call(call_id); + let toolchain = &db.crate_graph()[loc.def.krate].toolchain; + let new_meta_vars = toolchain.as_ref().map_or(false, |version| { + REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( + &base_db::Version { + pre: base_db::Prerelease::EMPTY, + build: base_db::BuildMetadata::EMPTY, + major: version.major, + minor: version.minor, + patch: version.patch, + }, + ) + }); + match self.mac.err() { + Some(e) => ExpandResult::new( + tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }), + ExpandError::other(format!("invalid macro definition: {e}")), + ), + None => self + .mac + .expand( + &tt, + |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency), + new_meta_vars, + loc.call_site, + ) + .map_err(Into::into), + } + } + + pub fn expand_unhygienic( + &self, + db: &dyn ExpandDatabase, + tt: tt::Subtree, + krate: CrateId, + call_site: Span, + ) -> ExpandResult { + let toolchain = &db.crate_graph()[krate].toolchain; + let new_meta_vars = toolchain.as_ref().map_or(false, |version| { + REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( + &base_db::Version { + pre: base_db::Prerelease::EMPTY, + build: base_db::BuildMetadata::EMPTY, + major: version.major, + minor: version.minor, + patch: version.patch, + }, + ) + }); + match self.mac.err() { + Some(e) => ExpandResult::new( + tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), + ExpandError::other(format!("invalid macro definition: {e}")), + ), + None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into), + } + } + + pub(crate) fn expander( + db: &dyn ExpandDatabase, + def_crate: CrateId, + id: AstId, + ) -> Arc { + let crate_data = &db.crate_graph()[def_crate]; + let is_2021 = crate_data.edition >= Edition::Edition2021; + let (root, map) = crate::db::parse_with_map(db, id.file_id); + let root = root.syntax_node(); + + let transparency = |node| { + // ... would be nice to have the item tree here + let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate); + match &*attrs + .iter() + .find(|it| { + it.path.as_ident().and_then(|it| it.as_str()) + == Some("rustc_macro_transparency") + })? + .token_tree_value()? + .token_trees + { + [tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text { + "transparent" => Some(Transparency::Transparent), + "semitransparent" => Some(Transparency::SemiTransparent), + "opaque" => Some(Transparency::Opaque), + _ => None, + }, + _ => None, + } + }; + let toolchain = crate_data.toolchain.as_ref(); + let new_meta_vars = toolchain.as_ref().map_or(false, |version| { + REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( + &base_db::Version { + pre: base_db::Prerelease::EMPTY, + build: base_db::BuildMetadata::EMPTY, + major: version.major, + minor: version.minor, + patch: version.patch, + }, + ) + }); + + let (mac, transparency) = match id.to_ptr(db).to_node(&root) { + ast::Macro::MacroRules(macro_rules) => ( + match macro_rules.token_tree() { + Some(arg) => { + let tt = mbe::syntax_node_to_token_tree( + arg.syntax(), + map.as_ref(), + map.span_for_range( + macro_rules.macro_rules_token().unwrap().text_range(), + ), + ); + + mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021, new_meta_vars) + } + None => mbe::DeclarativeMacro::from_err( + mbe::ParseError::Expected("expected a token tree".into()), + is_2021, + ), + }, + transparency(¯o_rules).unwrap_or(Transparency::SemiTransparent), + ), + ast::Macro::MacroDef(macro_def) => ( + match macro_def.body() { + Some(arg) => { + let tt = mbe::syntax_node_to_token_tree( + arg.syntax(), + map.as_ref(), + map.span_for_range(macro_def.macro_token().unwrap().text_range()), + ); + + mbe::DeclarativeMacro::parse_macro2(&tt, is_2021, new_meta_vars) + } + None => mbe::DeclarativeMacro::from_err( + mbe::ParseError::Expected("expected a token tree".into()), + is_2021, + ), + }, + transparency(¯o_def).unwrap_or(Transparency::Opaque), + ), + }; + Arc::new(DeclarativeMacroExpander { mac, transparency }) + } +} diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs index 57921543c4..8ddaa3f303 100644 --- a/crates/hir-expand/src/hygiene.rs +++ b/crates/hir-expand/src/hygiene.rs @@ -245,3 +245,43 @@ pub fn marks_rev( }) .map(|ctx| ctx.outer_mark(db)) } + +pub(crate) fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String { + use crate::db::{InternMacroCallLookupQuery, InternSyntaxContextLookupQuery}; + use base_db::salsa::debug::DebugQueryTable; + + let mut s = String::from("Expansions:"); + let mut entries = InternMacroCallLookupQuery.in_db(db).entries::>(); + entries.sort_by_key(|e| e.key); + for e in entries { + let id = e.key; + let expn_data = e.value.as_ref().unwrap(); + s.push_str(&format!( + "\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}", + id, + expn_data.kind.file_id(), + expn_data.call_site, + SyntaxContextId::ROOT, // FIXME expn_data.def_site, + expn_data.kind.descr(), + )); + } + + s.push_str("\n\nSyntaxContexts:\n"); + let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::>(); + entries.sort_by_key(|e| e.key); + for e in entries { + struct SyntaxContextDebug<'a>( + &'a dyn ExpandDatabase, + SyntaxContextId, + &'a SyntaxContextData, + ); + + impl<'a> std::fmt::Debug for SyntaxContextDebug<'a> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.2.fancy_debug(self.1, self.0, f) + } + } + stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap())); + } + s +} diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index bd216ccca8..05f12527a4 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -11,16 +11,18 @@ pub mod attrs; pub mod builtin_attr_macro; pub mod builtin_derive_macro; pub mod builtin_fn_macro; +pub mod change; pub mod db; +pub mod declarative; pub mod eager; pub mod files; -pub mod change; pub mod hygiene; pub mod mod_path; pub mod name; pub mod proc_macro; pub mod quote; pub mod span_map; + mod fixup; use attrs::collect_attrs; @@ -167,7 +169,8 @@ pub struct MacroCallLoc { pub krate: CrateId, /// Some if this is a macro call for an eager macro. Note that this is `None` /// for the eager input macro file. - // FIXME: This seems bad to save in an interned structure + // FIXME: This is being interned, subtrees can vary quickly differ just slightly causing + // leakage problems here eager: Option>, pub kind: MacroCallKind, pub call_site: Span, @@ -220,7 +223,7 @@ pub enum MacroCallKind { }, Attr { ast_id: AstId, - // FIXME: This is being interned, subtrees can very quickly differ just slightly causing + // FIXME: This is being interned, subtrees can vary quickly differ just slightly causing // leakage problems here attr_args: Option>, /// Syntactical index of the invoking `#[attribute]`. diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs index 9b72e03410..01ccebea20 100644 --- a/crates/hir-expand/src/mod_path.rs +++ b/crates/hir-expand/src/mod_path.rs @@ -40,7 +40,7 @@ pub enum PathKind { Crate, /// Absolute path (::foo) Abs, - // FIXME: Remove this + // FIXME: Can we remove this somehow? /// `$crate` from macro expansion DollarCrate(CrateId), } @@ -59,7 +59,8 @@ impl ModPath { } pub fn from_segments(kind: PathKind, segments: impl IntoIterator) -> ModPath { - let segments = segments.into_iter().collect(); + let mut segments: SmallVec<_> = segments.into_iter().collect(); + segments.shrink_to_fit(); ModPath { kind, segments } } diff --git a/crates/hir-expand/src/span_map.rs b/crates/hir-expand/src/span_map.rs index 4ec6e657f9..8e624f5585 100644 --- a/crates/hir-expand/src/span_map.rs +++ b/crates/hir-expand/src/span_map.rs @@ -1,10 +1,12 @@ //! Span maps for real files and macro expansions. -use span::Span; -use syntax::TextRange; +use span::{FileId, HirFileId, HirFileIdRepr, Span}; +use syntax::{AstNode, TextRange}; use triomphe::Arc; pub use span::RealSpanMap; +use crate::db::ExpandDatabase; + pub type ExpansionSpanMap = span::SpanMap; /// Spanmap for a macro file or a real file @@ -34,7 +36,6 @@ impl mbe::SpanMapper for SpanMapRef<'_> { self.span_for_range(range) } } - impl SpanMap { pub fn span_for_range(&self, range: TextRange) -> Span { match self { @@ -53,6 +54,16 @@ impl SpanMap { Self::RealSpanMap(span_map) => SpanMapRef::RealSpanMap(span_map), } } + + #[inline] + pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap { + match file_id.repr() { + HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)), + HirFileIdRepr::MacroFile(m) => { + SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1) + } + } + } } impl SpanMapRef<'_> { @@ -63,3 +74,23 @@ impl SpanMapRef<'_> { } } } + +pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc { + use syntax::ast::HasModuleItem; + let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)]; + let ast_id_map = db.ast_id_map(file_id.into()); + let tree = db.parse(file_id).tree(); + // FIXME: Descend into modules and other item containing items that are not annotated with attributes + // and allocate pairs for those as well. This gives us finer grained span anchors resulting in + // better incrementality + pairs.extend( + tree.items() + .map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())), + ); + + Arc::new(RealSpanMap::from_file( + file_id, + pairs.into_boxed_slice(), + tree.syntax().text_range().end(), + )) +} diff --git a/rustfmt.toml b/rustfmt.toml index 71007de81b..20bf59547b 100644 --- a/rustfmt.toml +++ b/rustfmt.toml @@ -1,2 +1,2 @@ -reorder_modules = false +reorder_modules = true use_small_heuristics = "Max"