From 890eb17b4e1659b22f8113ff99506a2b0eefe5ff Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 29 Sep 2023 12:37:57 +0200 Subject: [PATCH] Replace ID based TokenMap with proper relative text-ranges / spans --- Cargo.lock | 2 + crates/base-db/src/fixture.rs | 33 +- crates/base-db/src/input.rs | 9 +- crates/base-db/src/lib.rs | 1 + crates/base-db/src/span.rs | 166 ++++ crates/cfg/src/tests.rs | 39 +- crates/hir-def/src/attr.rs | 89 ++- crates/hir-def/src/attr/tests.rs | 11 +- crates/hir-def/src/data.rs | 2 +- crates/hir-def/src/expander.rs | 17 +- crates/hir-def/src/generics.rs | 18 +- crates/hir-def/src/item_tree.rs | 7 +- crates/hir-def/src/item_tree/lower.rs | 210 ++++-- crates/hir-def/src/lib.rs | 18 +- .../hir-def/src/macro_expansion_tests/mbe.rs | 65 +- .../hir-def/src/macro_expansion_tests/mod.rs | 97 +-- crates/hir-def/src/nameres/collector.rs | 9 +- crates/hir-def/src/nameres/mod_resolution.rs | 2 +- .../hir-def/src/nameres/tests/incremental.rs | 28 +- crates/hir-expand/src/ast_id_map.rs | 18 +- crates/hir-expand/src/attrs.rs | 45 +- crates/hir-expand/src/builtin_attr_macro.rs | 6 +- crates/hir-expand/src/builtin_derive_macro.rs | 147 ++-- crates/hir-expand/src/builtin_fn_macro.rs | 39 +- crates/hir-expand/src/db.rs | 505 ++++++------- crates/hir-expand/src/eager.rs | 171 +++-- crates/hir-expand/src/hygiene.rs | 130 ++-- crates/hir-expand/src/lib.rs | 399 ++++------ crates/hir-expand/src/quote.rs | 39 +- crates/hir-ty/src/diagnostics/decl_check.rs | 2 +- crates/hir-ty/src/mir/eval.rs | 2 +- crates/hir/src/db.rs | 4 +- crates/hir/src/lib.rs | 2 +- crates/hir/src/semantics.rs | 21 +- crates/hir/src/semantics/source_to_def.rs | 2 +- crates/hir/src/source_analyzer.rs | 2 +- .../src/handlers/extract_module.rs | 2 +- .../src/handlers/fix_visibility.rs | 4 +- .../src/handlers/generate_constant.rs | 2 +- .../src/handlers/generate_enum_variant.rs | 2 +- .../src/handlers/generate_function.rs | 5 +- .../src/handlers/remove_unused_imports.rs | 2 +- .../replace_derive_with_manual_impl.rs | 2 +- crates/ide-completion/src/completions.rs | 2 +- crates/ide-completion/src/completions/mod_.rs | 2 +- crates/ide-db/src/apply_change.rs | 2 +- crates/ide-db/src/lib.rs | 2 +- crates/ide-db/src/rename.rs | 2 +- crates/ide-db/src/search.rs | 3 +- .../ide-db/src/test_data/test_doc_alias.txt | 28 +- .../test_symbol_index_collection.txt | 118 +-- .../src/handlers/missing_fields.rs | 2 +- .../src/handlers/missing_unsafe.rs | 1 + .../src/handlers/no_such_field.rs | 2 +- .../replace_filter_map_next_with_find_map.rs | 2 +- .../src/handlers/type_mismatch.rs | 2 +- .../src/handlers/unresolved_module.rs | 2 +- crates/ide/src/call_hierarchy.rs | 4 +- crates/ide/src/expand_macro.rs | 2 +- crates/ide/src/goto_definition.rs | 8 +- crates/ide/src/rename.rs | 2 +- crates/ide/src/runnables.rs | 2 +- crates/ide/src/static_index.rs | 2 +- .../ide/src/syntax_highlighting/highlight.rs | 2 +- crates/load-cargo/src/lib.rs | 38 +- crates/mbe/src/benchmark.rs | 67 +- crates/mbe/src/lib.rs | 126 +--- crates/mbe/src/syntax_bridge.rs | 707 ++++++------------ crates/mbe/src/syntax_bridge/tests.rs | 24 +- crates/mbe/src/token_map.rs | 198 +++-- crates/proc-macro-api/Cargo.toml | 1 + crates/proc-macro-api/src/msg/flat.rs | 14 + crates/rust-analyzer/src/cargo_target_spec.rs | 11 +- .../rust-analyzer/src/cli/analysis_stats.rs | 2 +- crates/rust-analyzer/src/cli/diagnostics.rs | 2 +- crates/syntax/src/lib.rs | 46 +- crates/syntax/src/parsing/reparsing.rs | 12 +- crates/syntax/src/tests.rs | 2 +- crates/tt/Cargo.toml | 1 + crates/tt/src/lib.rs | 43 +- 80 files changed, 1816 insertions(+), 2046 deletions(-) create mode 100644 crates/base-db/src/span.rs diff --git a/Cargo.lock b/Cargo.lock index 5a8d971c3d..90ee0810fa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1263,6 +1263,7 @@ dependencies = [ "serde_json", "snap", "stdx", + "text-size", "tracing", "triomphe", "tt", @@ -2010,6 +2011,7 @@ version = "0.0.0" dependencies = [ "smol_str", "stdx", + "text-size", ] [[package]] diff --git a/crates/base-db/src/fixture.rs b/crates/base-db/src/fixture.rs index 3da555a47a..7236b56f6d 100644 --- a/crates/base-db/src/fixture.rs +++ b/crates/base-db/src/fixture.rs @@ -8,11 +8,12 @@ use test_utils::{ ESCAPED_CURSOR_MARKER, }; use triomphe::Arc; -use tt::token_id::{Leaf, Subtree, TokenTree}; +use tt::{Leaf, Subtree, TokenTree}; use vfs::{file_set::FileSet, VfsPath}; use crate::{ input::{CrateName, CrateOrigin, LangCrateOrigin}, + span::SpanData, Change, CrateDisplayName, CrateGraph, CrateId, Dependency, DependencyKind, Edition, Env, FileId, FilePosition, FileRange, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacros, ReleaseChannel, SourceDatabaseExt, SourceRoot, SourceRootId, @@ -539,10 +540,10 @@ struct IdentityProcMacroExpander; impl ProcMacroExpander for IdentityProcMacroExpander { fn expand( &self, - subtree: &Subtree, - _: Option<&Subtree>, + subtree: &Subtree, + _: Option<&Subtree>, _: &Env, - ) -> Result { + ) -> Result, ProcMacroExpansionError> { Ok(subtree.clone()) } } @@ -553,10 +554,10 @@ struct AttributeInputReplaceProcMacroExpander; impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander { fn expand( &self, - _: &Subtree, - attrs: Option<&Subtree>, + _: &Subtree, + attrs: Option<&Subtree>, _: &Env, - ) -> Result { + ) -> Result, ProcMacroExpansionError> { attrs .cloned() .ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into())) @@ -568,11 +569,11 @@ struct MirrorProcMacroExpander; impl ProcMacroExpander for MirrorProcMacroExpander { fn expand( &self, - input: &Subtree, - _: Option<&Subtree>, + input: &Subtree, + _: Option<&Subtree>, _: &Env, - ) -> Result { - fn traverse(input: &Subtree) -> Subtree { + ) -> Result, ProcMacroExpansionError> { + fn traverse(input: &Subtree) -> Subtree { let mut token_trees = vec![]; for tt in input.token_trees.iter().rev() { let tt = match tt { @@ -595,13 +596,13 @@ struct ShortenProcMacroExpander; impl ProcMacroExpander for ShortenProcMacroExpander { fn expand( &self, - input: &Subtree, - _: Option<&Subtree>, + input: &Subtree, + _: Option<&Subtree>, _: &Env, - ) -> Result { + ) -> Result, ProcMacroExpansionError> { return Ok(traverse(input)); - fn traverse(input: &Subtree) -> Subtree { + fn traverse(input: &Subtree) -> Subtree { let token_trees = input .token_trees .iter() @@ -613,7 +614,7 @@ impl ProcMacroExpander for ShortenProcMacroExpander { Subtree { delimiter: input.delimiter, token_trees } } - fn modify_leaf(leaf: &Leaf) -> Leaf { + fn modify_leaf(leaf: &Leaf) -> Leaf { let mut leaf = leaf.clone(); match &mut leaf { Leaf::Literal(it) => { diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index e4f78321e2..2fa5c25c91 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -13,9 +13,10 @@ use la_arena::{Arena, Idx}; use rustc_hash::{FxHashMap, FxHashSet}; use syntax::SmolStr; use triomphe::Arc; -use tt::token_id::Subtree; use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath}; +use crate::span::SpanData; + // Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`, // then the crate for the proc-macro hasn't been build yet as the build data is missing. pub type ProcMacroPaths = FxHashMap, AbsPathBuf), String>>; @@ -255,10 +256,10 @@ pub enum ProcMacroKind { pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe { fn expand( &self, - subtree: &Subtree, - attrs: Option<&Subtree>, + subtree: &tt::Subtree, + attrs: Option<&tt::Subtree>, env: &Env, - ) -> Result; + ) -> Result, ProcMacroExpansionError>; } #[derive(Debug)] diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index 40cfab88af..6dc1629c3b 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs @@ -5,6 +5,7 @@ mod input; mod change; pub mod fixture; +pub mod span; use std::panic; diff --git a/crates/base-db/src/span.rs b/crates/base-db/src/span.rs new file mode 100644 index 0000000000..1072d937a3 --- /dev/null +++ b/crates/base-db/src/span.rs @@ -0,0 +1,166 @@ +use std::fmt; + +use salsa::InternId; +use vfs::FileId; + +pub type ErasedFileAstId = la_arena::Idx; + +// The first inde is always the root node's AstId +pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId = + la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(0)); + +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +pub struct SyntaxContext; + +pub type SpanData = tt::SpanData; + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct SpanAnchor { + pub file_id: HirFileId, + pub ast_id: ErasedFileAstId, +} + +impl fmt::Debug for SpanAnchor { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("SpanAnchor").field(&self.file_id).field(&self.ast_id.into_raw()).finish() + } +} + +impl tt::Span for SpanAnchor { + const DUMMY: Self = SpanAnchor { file_id: HirFileId(0), ast_id: ROOT_ERASED_FILE_AST_ID }; +} + +/// Input to the analyzer is a set of files, where each file is identified by +/// `FileId` and contains source code. However, another source of source code in +/// Rust are macros: each macro can be thought of as producing a "temporary +/// file". To assign an id to such a file, we use the id of the macro call that +/// produced the file. So, a `HirFileId` is either a `FileId` (source code +/// written by user), or a `MacroCallId` (source code produced by macro). +/// +/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file +/// containing the call plus the offset of the macro call in the file. Note that +/// this is a recursive definition! However, the size_of of `HirFileId` is +/// finite (because everything bottoms out at the real `FileId`) and small +/// (`MacroCallId` uses the location interning. You can check details here: +/// ). +/// +/// The two variants are encoded in a single u32 which are differentiated by the MSB. +/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a +/// `MacroCallId`. +#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct HirFileId(u32); + +impl From for u32 { + fn from(value: HirFileId) -> Self { + value.0 + } +} + +impl From for HirFileId { + fn from(value: u32) -> Self { + HirFileId(value) + } +} + +impl From for HirFileId { + fn from(value: MacroCallId) -> Self { + value.as_file() + } +} + +impl fmt::Debug for HirFileId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.repr().fmt(f) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct MacroFile { + pub macro_call_id: MacroCallId, +} + +/// `MacroCallId` identifies a particular macro invocation, like +/// `println!("Hello, {}", world)`. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct MacroCallId(salsa::InternId); +crate::impl_intern_key!(MacroCallId); + +impl MacroCallId { + pub fn as_file(self) -> HirFileId { + MacroFile { macro_call_id: self }.into() + } + + pub fn as_macro_file(self) -> MacroFile { + MacroFile { macro_call_id: self } + } +} + +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +pub enum HirFileIdRepr { + FileId(FileId), + MacroFile(MacroFile), +} + +impl fmt::Debug for HirFileIdRepr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::FileId(arg0) => f.debug_tuple("FileId").field(&arg0.0).finish(), + Self::MacroFile(arg0) => { + f.debug_tuple("MacroFile").field(&arg0.macro_call_id.0).finish() + } + } + } +} + +impl From for HirFileId { + fn from(FileId(id): FileId) -> Self { + assert!(id < Self::MAX_FILE_ID); + HirFileId(id) + } +} + +impl From for HirFileId { + fn from(MacroFile { macro_call_id: MacroCallId(id) }: MacroFile) -> Self { + let id = id.as_u32(); + assert!(id < Self::MAX_FILE_ID); + HirFileId(id | Self::MACRO_FILE_TAG_MASK) + } +} + +impl HirFileId { + const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK; + const MACRO_FILE_TAG_MASK: u32 = 1 << 31; + + #[inline] + pub fn is_macro(self) -> bool { + self.0 & Self::MACRO_FILE_TAG_MASK != 0 + } + + #[inline] + pub fn macro_file(self) -> Option { + match self.0 & Self::MACRO_FILE_TAG_MASK { + 0 => None, + _ => Some(MacroFile { + macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)), + }), + } + } + + #[inline] + pub fn file_id(self) -> Option { + match self.0 & Self::MACRO_FILE_TAG_MASK { + 0 => Some(FileId(self.0)), + _ => None, + } + } + + #[inline] + pub fn repr(self) -> HirFileIdRepr { + match self.0 & Self::MACRO_FILE_TAG_MASK { + 0 => HirFileIdRepr::FileId(FileId(self.0)), + _ => HirFileIdRepr::MacroFile(MacroFile { + macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)), + }), + } + } +} diff --git a/crates/cfg/src/tests.rs b/crates/cfg/src/tests.rs index bdc3f854e0..242929c006 100644 --- a/crates/cfg/src/tests.rs +++ b/crates/cfg/src/tests.rs @@ -2,36 +2,37 @@ use arbitrary::{Arbitrary, Unstructured}; use expect_test::{expect, Expect}; use mbe::syntax_node_to_token_tree; use syntax::{ast, AstNode}; +use tt::Span; use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +struct DummyFile; +impl Span for DummyFile { + const DUMMY: Self = DummyFile; +} + fn assert_parse_result(input: &str, expected: CfgExpr) { - let (tt, _) = { - let source_file = ast::SourceFile::parse(input).ok().unwrap(); - let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - syntax_node_to_token_tree(tt.syntax()) - }; + let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default()); let cfg = CfgExpr::parse(&tt); assert_eq!(cfg, expected); } fn check_dnf(input: &str, expect: Expect) { - let (tt, _) = { - let source_file = ast::SourceFile::parse(input).ok().unwrap(); - let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - syntax_node_to_token_tree(tt.syntax()) - }; + let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default()); let cfg = CfgExpr::parse(&tt); let actual = format!("#![cfg({})]", DnfExpr::new(cfg)); expect.assert_eq(&actual); } fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { - let (tt, _) = { - let source_file = ast::SourceFile::parse(input).ok().unwrap(); - let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - syntax_node_to_token_tree(tt.syntax()) - }; + let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default()); let cfg = CfgExpr::parse(&tt); let dnf = DnfExpr::new(cfg); let why_inactive = dnf.why_inactive(opts).unwrap().to_string(); @@ -40,11 +41,9 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { #[track_caller] fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) { - let (tt, _) = { - let source_file = ast::SourceFile::parse(input).ok().unwrap(); - let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - syntax_node_to_token_tree(tt.syntax()) - }; + let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default()); let cfg = CfgExpr::parse(&tt); let dnf = DnfExpr::new(cfg); let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::>(); diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index fa3025e030..35c9d63979 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -7,7 +7,10 @@ mod tests; use std::{hash::Hash, ops, slice::Iter as SliceIter}; -use base_db::CrateId; +use base_db::{ + span::{ErasedFileAstId, SpanAnchor}, + CrateId, +}; use cfg::{CfgExpr, CfgOptions}; use either::Either; use hir_expand::{ @@ -28,8 +31,8 @@ use crate::{ lang_item::LangItem, nameres::{ModuleOrigin, ModuleSource}, src::{HasChildSource, HasSource}, - AdtId, AssocItemLoc, AttrDefId, EnumId, GenericParamId, ItemLoc, LocalEnumVariantId, - LocalFieldId, Lookup, MacroId, VariantId, + AdtId, AssocItemLoc, AttrDefId, EnumId, GenericDefId, GenericParamId, ItemLoc, + LocalEnumVariantId, LocalFieldId, Lookup, MacroId, VariantId, }; #[derive(Default, Debug, Clone, PartialEq, Eq)] @@ -415,26 +418,48 @@ impl AttrsWithOwner { AttrDefId::StaticId(it) => attrs_from_item_tree_assoc(db, it), AttrDefId::FunctionId(it) => attrs_from_item_tree_assoc(db, it), AttrDefId::TypeAliasId(it) => attrs_from_item_tree_assoc(db, it), - AttrDefId::GenericParamId(it) => match it { - GenericParamId::ConstParamId(it) => { - let src = it.parent().child_source(db); - RawAttrs::from_attrs_owner( - db.upcast(), - src.with_value(&src.value[it.local_id()]), - ) + AttrDefId::GenericParamId(it) => { + let ast_id = |p| match p { + GenericDefId::AdtId(AdtId::StructId(it)) => { + erased_ast_id_from_item_tree(db, it) + } + GenericDefId::AdtId(AdtId::EnumId(it)) => erased_ast_id_from_item_tree(db, it), + GenericDefId::AdtId(AdtId::UnionId(it)) => erased_ast_id_from_item_tree(db, it), + GenericDefId::TraitId(it) => erased_ast_id_from_item_tree(db, it), + GenericDefId::TraitAliasId(it) => erased_ast_id_from_item_tree(db, it), + GenericDefId::ImplId(it) => erased_ast_id_from_item_tree(db, it), + GenericDefId::EnumVariantId(it) => erased_ast_id_from_item_tree(db, it.parent), + GenericDefId::TypeAliasId(it) => erased_ast_id_from_item_tree_assoc(db, it), + GenericDefId::FunctionId(it) => erased_ast_id_from_item_tree_assoc(db, it), + GenericDefId::ConstId(it) => erased_ast_id_from_item_tree_assoc(db, it), + }; + match it { + GenericParamId::ConstParamId(it) => { + let src = it.parent().child_source(db); + RawAttrs::from_attrs_owner( + db.upcast(), + SpanAnchor { file_id: src.file_id, ast_id: ast_id(it.parent()) }, + src.with_value(&src.value[it.local_id()]), + ) + } + GenericParamId::TypeParamId(it) => { + let src = it.parent().child_source(db); + RawAttrs::from_attrs_owner( + db.upcast(), + SpanAnchor { file_id: src.file_id, ast_id: ast_id(it.parent()) }, + src.with_value(&src.value[it.local_id()]), + ) + } + GenericParamId::LifetimeParamId(it) => { + let src = it.parent.child_source(db); + RawAttrs::from_attrs_owner( + db.upcast(), + SpanAnchor { file_id: src.file_id, ast_id: ast_id(it.parent) }, + src.with_value(&src.value[it.local_id]), + ) + } } - GenericParamId::TypeParamId(it) => { - let src = it.parent().child_source(db); - RawAttrs::from_attrs_owner( - db.upcast(), - src.with_value(&src.value[it.local_id()]), - ) - } - GenericParamId::LifetimeParamId(it) => { - let src = it.parent.child_source(db); - RawAttrs::from_attrs_owner(db.upcast(), src.with_value(&src.value[it.local_id])) - } - }, + } AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it), AttrDefId::ExternCrateId(it) => attrs_from_item_tree_loc(db, it), AttrDefId::UseId(it) => attrs_from_item_tree_loc(db, it), @@ -638,6 +663,26 @@ fn any_has_attrs( id.lookup(db).source(db).map(ast::AnyHasAttrs::new) } +fn erased_ast_id_from_item_tree( + db: &dyn DefDatabase, + lookup: impl Lookup>, +) -> ErasedFileAstId { + let id = lookup.lookup(db).id; + let tree = id.item_tree(db); + let mod_item = N::id_to_mod_item(id.value); + mod_item.ast_id(&tree).erase() +} + +fn erased_ast_id_from_item_tree_assoc( + db: &dyn DefDatabase, + lookup: impl Lookup>, +) -> ErasedFileAstId { + let id = lookup.lookup(db).id; + let tree = id.item_tree(db); + let mod_item = N::id_to_mod_item(id.value); + mod_item.ast_id(&tree).erase() +} + fn attrs_from_item_tree(db: &dyn DefDatabase, id: ItemTreeId) -> RawAttrs { let tree = id.item_tree(db); let mod_item = N::id_to_mod_item(id.value); diff --git a/crates/hir-def/src/attr/tests.rs b/crates/hir-def/src/attr/tests.rs index e4c8d446af..ad101e9bdf 100644 --- a/crates/hir-def/src/attr/tests.rs +++ b/crates/hir-def/src/attr/tests.rs @@ -1,17 +1,18 @@ //! This module contains tests for doc-expression parsing. //! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`. +use base_db::span::SpanAnchor; use mbe::syntax_node_to_token_tree; use syntax::{ast, AstNode}; +use tt::Span; use crate::attr::{DocAtom, DocExpr}; fn assert_parse_result(input: &str, expected: DocExpr) { - let (tt, _) = { - let source_file = ast::SourceFile::parse(input).ok().unwrap(); - let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - syntax_node_to_token_tree(tt.syntax()) - }; + let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt = + syntax_node_to_token_tree(tt.syntax(), SpanAnchor::DUMMY, 0.into(), &Default::default()); let cfg = DocExpr::parse(&tt); assert_eq!(cfg, expected); } diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index 72ccc17486..4083d49791 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -663,7 +663,7 @@ impl<'a> AssocItemCollector<'a> { self.module_id.local_id, MacroCallKind::Attr { ast_id, - attr_args: Arc::new((tt::Subtree::empty(), Default::default())), + attr_args: Arc::new(tt::Subtree::empty()), invoc_attr_index: attr.id, }, attr.path().clone(), diff --git a/crates/hir-def/src/expander.rs b/crates/hir-def/src/expander.rs index 6db8398bc9..b7cffb5762 100644 --- a/crates/hir-def/src/expander.rs +++ b/crates/hir-def/src/expander.rs @@ -1,6 +1,9 @@ //! Macro expansion utilities. -use base_db::CrateId; +use base_db::{ + span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID}, + CrateId, +}; use cfg::CfgOptions; use drop_bomb::DropBomb; use hir_expand::{ @@ -118,7 +121,17 @@ impl Expander { } pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs { - Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, &self.hygiene)) + Attrs::filter( + db, + self.krate, + RawAttrs::new( + db.upcast(), + // Usin `ROOT_ERASED_FILE_AST_ID` here is fine as this is only used for cfg checking + SpanAnchor { file_id: self.current_file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, + owner, + &self.hygiene, + ), + ) } pub(crate) fn cfg_options(&self) -> &CfgOptions { diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs index fac90e6630..6d656bf948 100644 --- a/crates/hir-def/src/generics.rs +++ b/crates/hir-def/src/generics.rs @@ -21,7 +21,7 @@ use crate::{ db::DefDatabase, dyn_map::{keys, DynMap}, expander::Expander, - item_tree::{AttrOwner, ItemTree}, + item_tree::ItemTree, lower::LowerCtx, nameres::{DefMap, MacroSubNs}, src::{HasChildSource, HasSource}, @@ -250,7 +250,10 @@ impl GenericParams { &mut self, lower_ctx: &LowerCtx<'_>, node: &dyn HasGenericParams, - add_param_attrs: impl FnMut(AttrOwner, ast::GenericParam), + add_param_attrs: impl FnMut( + Either, Idx>, + ast::GenericParam, + ), ) { if let Some(params) = node.generic_param_list() { self.fill_params(lower_ctx, params, add_param_attrs) @@ -275,7 +278,10 @@ impl GenericParams { &mut self, lower_ctx: &LowerCtx<'_>, params: ast::GenericParamList, - mut add_param_attrs: impl FnMut(AttrOwner, ast::GenericParam), + mut add_param_attrs: impl FnMut( + Either, Idx>, + ast::GenericParam, + ), ) { for type_or_const_param in params.type_or_const_params() { match type_or_const_param { @@ -297,7 +303,7 @@ impl GenericParams { type_param.type_bound_list(), Either::Left(type_ref), ); - add_param_attrs(idx.into(), ast::GenericParam::TypeParam(type_param)); + add_param_attrs(Either::Left(idx), ast::GenericParam::TypeParam(type_param)); } ast::TypeOrConstParam::Const(const_param) => { let name = const_param.name().map_or_else(Name::missing, |it| it.as_name()); @@ -310,7 +316,7 @@ impl GenericParams { default: ConstRef::from_const_param(lower_ctx, &const_param), }; let idx = self.type_or_consts.alloc(param.into()); - add_param_attrs(idx.into(), ast::GenericParam::ConstParam(const_param)); + add_param_attrs(Either::Left(idx), ast::GenericParam::ConstParam(const_param)); } } } @@ -325,7 +331,7 @@ impl GenericParams { lifetime_param.type_bound_list(), Either::Right(lifetime_ref), ); - add_param_attrs(idx.into(), ast::GenericParam::LifetimeParam(lifetime_param)); + add_param_attrs(Either::Right(idx), ast::GenericParam::LifetimeParam(lifetime_param)); } } diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index 70b96b2573..3bea91ee61 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -43,7 +43,10 @@ use std::{ }; use ast::{AstNode, HasName, StructKind}; -use base_db::CrateId; +use base_db::{ + span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID}, + CrateId, +}; use either::Either; use hir_expand::{ ast_id_map::{AstIdNode, FileAstId}, @@ -119,7 +122,7 @@ impl ItemTree { let mut item_tree = match_ast! { match syntax { ast::SourceFile(file) => { - top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.hygiene())); + top_attrs = Some(RawAttrs::new(db.upcast(), SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, &file, ctx.hygiene())); ctx.lower_module_items(&file) }, ast::MacroItems(items) => { diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs index c898eb5f92..807b2a7bf7 100644 --- a/crates/hir-def/src/item_tree/lower.rs +++ b/crates/hir-def/src/item_tree/lower.rs @@ -2,12 +2,14 @@ use std::collections::hash_map::Entry; +use base_db::span::ErasedFileAstId; use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId}; use syntax::ast::{self, HasModuleItem, HasTypeBounds}; use crate::{ generics::{GenericParams, TypeParamData, TypeParamProvenance}, type_ref::{LifetimeRef, TraitBoundModifier, TraitRef}, + LocalLifetimeParamId, LocalTypeOrConstParamId, }; use super::*; @@ -21,6 +23,7 @@ pub(super) struct Ctx<'a> { tree: ItemTree, source_ast_id_map: Arc, body_ctx: crate::lower::LowerCtx<'a>, + file: HirFileId, } impl<'a> Ctx<'a> { @@ -30,6 +33,7 @@ impl<'a> Ctx<'a> { tree: ItemTree::default(), source_ast_id_map: db.ast_id_map(file), body_ctx: crate::lower::LowerCtx::with_file_id(db, file), + file, } } @@ -77,9 +81,18 @@ impl<'a> Ctx<'a> { } pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree { - self.tree - .attrs - .insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.hygiene())); + self.tree.attrs.insert( + AttrOwner::TopLevel, + RawAttrs::new( + self.db.upcast(), + SpanAnchor { + file_id: self.file, + ast_id: self.source_ast_id_map.ast_id(block).erase(), + }, + block, + self.hygiene(), + ), + ); self.tree.top_level = block .statements() .filter_map(|stmt| match stmt { @@ -109,8 +122,7 @@ impl<'a> Ctx<'a> { } fn lower_mod_item(&mut self, item: &ast::Item) -> Option { - let attrs = RawAttrs::new(self.db.upcast(), item, self.hygiene()); - let item: ModItem = match item { + let mod_item: ModItem = match item { ast::Item::Struct(ast) => self.lower_struct(ast)?.into(), ast::Item::Union(ast) => self.lower_union(ast)?.into(), ast::Item::Enum(ast) => self.lower_enum(ast)?.into(), @@ -129,10 +141,15 @@ impl<'a> Ctx<'a> { ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(), ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(), }; + let attrs = RawAttrs::new( + self.db.upcast(), + SpanAnchor { file_id: self.file, ast_id: mod_item.ast_id(&self.tree).erase() }, + item, + self.hygiene(), + ); + self.add_attrs(mod_item.into(), attrs); - self.add_attrs(item.into(), attrs); - - Some(item) + Some(mod_item) } fn add_attrs(&mut self, item: AttrOwner, attrs: RawAttrs) { @@ -146,21 +163,37 @@ impl<'a> Ctx<'a> { } } - fn lower_assoc_item(&mut self, item: &ast::AssocItem) -> Option { - match item { + fn lower_assoc_item(&mut self, item_node: &ast::AssocItem) -> Option { + let item: AssocItem = match item_node { ast::AssocItem::Fn(ast) => self.lower_function(ast).map(Into::into), ast::AssocItem::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into), ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()), ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into), - } + }?; + let attrs = RawAttrs::new( + self.db.upcast(), + SpanAnchor { file_id: self.file, ast_id: item.ast_id(&self.tree).erase() }, + item_node, + self.hygiene(), + ); + self.add_attrs( + match item { + AssocItem::Function(it) => AttrOwner::ModItem(ModItem::Function(it)), + AssocItem::TypeAlias(it) => AttrOwner::ModItem(ModItem::TypeAlias(it)), + AssocItem::Const(it) => AttrOwner::ModItem(ModItem::Const(it)), + AssocItem::MacroCall(it) => AttrOwner::ModItem(ModItem::MacroCall(it)), + }, + attrs, + ); + Some(item) } fn lower_struct(&mut self, strukt: &ast::Struct) -> Option> { let visibility = self.lower_visibility(strukt); let name = strukt.name()?.as_name(); - let generic_params = self.lower_generic_params(HasImplicitSelf::No, strukt); - let fields = self.lower_fields(&strukt.kind()); let ast_id = self.source_ast_id_map.ast_id(strukt); + let generic_params = self.lower_generic_params(HasImplicitSelf::No, strukt, ast_id.erase()); + let fields = self.lower_fields(&strukt.kind()); let res = Struct { name, visibility, generic_params, fields, ast_id }; Some(id(self.data().structs.alloc(res))) } @@ -183,8 +216,20 @@ impl<'a> Ctx<'a> { let start = self.next_field_idx(); for field in fields.fields() { if let Some(data) = self.lower_record_field(&field) { + let ast_id = match data.ast_id { + FieldAstId::Record(it) => it.erase(), + FieldAstId::Tuple(it) => it.erase(), + }; let idx = self.data().fields.alloc(data); - self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene())); + self.add_attrs( + idx.into(), + RawAttrs::new( + self.db.upcast(), + SpanAnchor { file_id: self.file, ast_id }, + &field, + self.hygiene(), + ), + ); } } let end = self.next_field_idx(); @@ -204,8 +249,20 @@ impl<'a> Ctx<'a> { let start = self.next_field_idx(); for (i, field) in fields.fields().enumerate() { let data = self.lower_tuple_field(i, &field); + let ast_id = match data.ast_id { + FieldAstId::Record(it) => it.erase(), + FieldAstId::Tuple(it) => it.erase(), + }; let idx = self.data().fields.alloc(data); - self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene())); + self.add_attrs( + idx.into(), + RawAttrs::new( + self.db.upcast(), + SpanAnchor { file_id: self.file, ast_id }, + &field, + self.hygiene(), + ), + ); } let end = self.next_field_idx(); IdxRange::new(start..end) @@ -222,12 +279,12 @@ impl<'a> Ctx<'a> { fn lower_union(&mut self, union: &ast::Union) -> Option> { let visibility = self.lower_visibility(union); let name = union.name()?.as_name(); - let generic_params = self.lower_generic_params(HasImplicitSelf::No, union); + let ast_id = self.source_ast_id_map.ast_id(union); + let generic_params = self.lower_generic_params(HasImplicitSelf::No, union, ast_id.erase()); let fields = match union.record_field_list() { Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)), None => Fields::Record(IdxRange::new(self.next_field_idx()..self.next_field_idx())), }; - let ast_id = self.source_ast_id_map.ast_id(union); let res = Union { name, visibility, generic_params, fields, ast_id }; Some(id(self.data().unions.alloc(res))) } @@ -235,12 +292,12 @@ impl<'a> Ctx<'a> { fn lower_enum(&mut self, enum_: &ast::Enum) -> Option> { let visibility = self.lower_visibility(enum_); let name = enum_.name()?.as_name(); - let generic_params = self.lower_generic_params(HasImplicitSelf::No, enum_); + let ast_id = self.source_ast_id_map.ast_id(enum_); + let generic_params = self.lower_generic_params(HasImplicitSelf::No, enum_, ast_id.erase()); let variants = match &enum_.variant_list() { Some(variant_list) => self.lower_variants(variant_list), None => IdxRange::new(self.next_variant_idx()..self.next_variant_idx()), }; - let ast_id = self.source_ast_id_map.ast_id(enum_); let res = Enum { name, visibility, generic_params, variants, ast_id }; Some(id(self.data().enums.alloc(res))) } @@ -249,10 +306,16 @@ impl<'a> Ctx<'a> { let start = self.next_variant_idx(); for variant in variants.variants() { if let Some(data) = self.lower_variant(&variant) { + let ast_id = data.ast_id.erase(); let idx = self.data().variants.alloc(data); self.add_attrs( idx.into(), - RawAttrs::new(self.db.upcast(), &variant, self.hygiene()), + RawAttrs::new( + self.db.upcast(), + SpanAnchor { file_id: self.file, ast_id }, + &variant, + self.hygiene(), + ), ); } } @@ -303,28 +366,39 @@ impl<'a> Ctx<'a> { }); self.add_attrs( idx.into(), - RawAttrs::new(self.db.upcast(), &self_param, self.hygiene()), + RawAttrs::new( + self.db.upcast(), + SpanAnchor { file_id: self.file, ast_id: ast_id.erase() }, + &self_param, + self.hygiene(), + ), ); has_self_param = true; } for param in param_list.params() { + let ast_id = self.source_ast_id_map.ast_id(¶m); let idx = match param.dotdotdot_token() { - Some(_) => { - let ast_id = self.source_ast_id_map.ast_id(¶m); - self.data() - .params - .alloc(Param { type_ref: None, ast_id: ParamAstId::Param(ast_id) }) - } + Some(_) => self + .data() + .params + .alloc(Param { type_ref: None, ast_id: ParamAstId::Param(ast_id) }), None => { let type_ref = TypeRef::from_ast_opt(&self.body_ctx, param.ty()); let ty = Interned::new(type_ref); - let ast_id = self.source_ast_id_map.ast_id(¶m); self.data() .params .alloc(Param { type_ref: Some(ty), ast_id: ParamAstId::Param(ast_id) }) } }; - self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), ¶m, self.hygiene())); + self.add_attrs( + idx.into(), + RawAttrs::new( + self.db.upcast(), + SpanAnchor { file_id: self.file, ast_id: ast_id.erase() }, + ¶m, + self.hygiene(), + ), + ); } } let end_param = self.next_param_idx(); @@ -381,7 +455,8 @@ impl<'a> Ctx<'a> { ast_id, flags, }; - res.explicit_generic_params = self.lower_generic_params(HasImplicitSelf::No, func); + res.explicit_generic_params = + self.lower_generic_params(HasImplicitSelf::No, func, ast_id.erase()); Some(id(self.data().functions.alloc(res))) } @@ -394,8 +469,9 @@ impl<'a> Ctx<'a> { let type_ref = type_alias.ty().map(|it| self.lower_type_ref(&it)); let visibility = self.lower_visibility(type_alias); let bounds = self.lower_type_bounds(type_alias); - let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias); let ast_id = self.source_ast_id_map.ast_id(type_alias); + let generic_params = + self.lower_generic_params(HasImplicitSelf::No, type_alias, ast_id.erase()); let res = TypeAlias { name, visibility, bounds, generic_params, type_ref, ast_id }; Some(id(self.data().type_aliases.alloc(res))) } @@ -443,23 +519,20 @@ impl<'a> Ctx<'a> { fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option> { let name = trait_def.name()?.as_name(); let visibility = self.lower_visibility(trait_def); - let generic_params = - self.lower_generic_params(HasImplicitSelf::Yes(trait_def.type_bound_list()), trait_def); + let ast_id = self.source_ast_id_map.ast_id(trait_def); + let generic_params = self.lower_generic_params( + HasImplicitSelf::Yes(trait_def.type_bound_list()), + trait_def, + ast_id.erase(), + ); let is_auto = trait_def.auto_token().is_some(); let is_unsafe = trait_def.unsafe_token().is_some(); - let ast_id = self.source_ast_id_map.ast_id(trait_def); let items = trait_def .assoc_item_list() .into_iter() .flat_map(|list| list.assoc_items()) - .filter_map(|item| { - let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene()); - self.lower_assoc_item(&item).map(|item| { - self.add_attrs(ModItem::from(item).into(), attrs); - item - }) - }) + .filter_map(|item_node| self.lower_assoc_item(&item_node)) .collect(); let def = Trait { name, visibility, generic_params, is_auto, is_unsafe, items, ast_id }; @@ -472,20 +545,23 @@ impl<'a> Ctx<'a> { ) -> Option> { let name = trait_alias_def.name()?.as_name(); let visibility = self.lower_visibility(trait_alias_def); + let ast_id = self.source_ast_id_map.ast_id(trait_alias_def); let generic_params = self.lower_generic_params( HasImplicitSelf::Yes(trait_alias_def.type_bound_list()), trait_alias_def, + ast_id.erase(), ); - let ast_id = self.source_ast_id_map.ast_id(trait_alias_def); let alias = TraitAlias { name, visibility, generic_params, ast_id }; Some(id(self.data().trait_aliases.alloc(alias))) } fn lower_impl(&mut self, impl_def: &ast::Impl) -> Option> { + let ast_id = self.source_ast_id_map.ast_id(impl_def); // Note that trait impls don't get implicit `Self` unlike traits, because here they are a // type alias rather than a type parameter, so this is handled by the resolver. - let generic_params = self.lower_generic_params(HasImplicitSelf::No, impl_def); + let generic_params = + self.lower_generic_params(HasImplicitSelf::No, impl_def, ast_id.erase()); // FIXME: If trait lowering fails, due to a non PathType for example, we treat this impl // as if it was an non-trait impl. Ideally we want to create a unique missing ref that only // equals itself. @@ -499,14 +575,8 @@ impl<'a> Ctx<'a> { .assoc_item_list() .into_iter() .flat_map(|it| it.assoc_items()) - .filter_map(|item| { - let assoc = self.lower_assoc_item(&item)?; - let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene()); - self.add_attrs(ModItem::from(assoc).into(), attrs); - Some(assoc) - }) + .filter_map(|item| self.lower_assoc_item(&item)) .collect(); - let ast_id = self.source_ast_id_map.ast_id(impl_def); let res = Impl { generic_params, target_trait, self_ty, is_negative, is_unsafe, items, ast_id }; Some(id(self.data().impls.alloc(res))) @@ -572,15 +642,23 @@ impl<'a> Ctx<'a> { // (in other words, the knowledge that they're in an extern block must not be used). // This is because an extern block can contain macros whose ItemTree's top-level items // should be considered to be in an extern block too. - let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene()); - let id: ModItem = match item { - ast::ExternItem::Fn(ast) => self.lower_function(&ast)?.into(), - ast::ExternItem::Static(ast) => self.lower_static(&ast)?.into(), - ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(&ty)?.into(), - ast::ExternItem::MacroCall(call) => self.lower_macro_call(&call)?.into(), + let mod_item: ModItem = match &item { + ast::ExternItem::Fn(ast) => self.lower_function(ast)?.into(), + ast::ExternItem::Static(ast) => self.lower_static(ast)?.into(), + ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(ty)?.into(), + ast::ExternItem::MacroCall(call) => self.lower_macro_call(call)?.into(), }; - self.add_attrs(id.into(), attrs); - Some(id) + let attrs = RawAttrs::new( + self.db.upcast(), + SpanAnchor { + file_id: self.file, + ast_id: mod_item.ast_id(&self.tree).erase(), + }, + &item, + self.hygiene(), + ); + self.add_attrs(mod_item.into(), attrs); + Some(mod_item) }) .collect() }); @@ -593,6 +671,7 @@ impl<'a> Ctx<'a> { &mut self, has_implicit_self: HasImplicitSelf, node: &dyn ast::HasGenericParams, + owner_ast_id: ErasedFileAstId, ) -> Interned { let mut generics = GenericParams::default(); @@ -612,12 +691,21 @@ impl<'a> Ctx<'a> { generics.fill_bounds(&self.body_ctx, bounds, Either::Left(self_param)); } - let add_param_attrs = |item, param| { - let attrs = RawAttrs::new(self.db.upcast(), ¶m, self.body_ctx.hygiene()); + let add_param_attrs = |item: Either, + param| { + let attrs = RawAttrs::new( + self.db.upcast(), + SpanAnchor { file_id: self.file, ast_id: owner_ast_id }, + ¶m, + self.body_ctx.hygiene(), + ); // This is identical to the body of `Ctx::add_attrs()` but we can't call that here // because it requires `&mut self` and the call to `generics.fill()` below also // references `self`. - match self.tree.attrs.entry(item) { + match self.tree.attrs.entry(match item { + Either::Right(id) => id.into(), + Either::Left(id) => id.into(), + }) { Entry::Occupied(mut entry) => { *entry.get_mut() = entry.get().merge(attrs); } diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index fd8f64d670..5f09d7c481 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -84,7 +84,7 @@ use nameres::DefMap; use stdx::impl_from; use syntax::ast; -use ::tt::token_id as tt; +pub use hir_expand::tt; use crate::{ builtin_type::BuiltinType, @@ -1341,15 +1341,13 @@ fn attr_macro_as_call_id( def: MacroDefId, ) -> MacroCallId { let arg = match macro_attr.input.as_deref() { - Some(AttrInput::TokenTree(tt)) => ( - { - let mut tt = tt.0.clone(); - tt.delimiter = tt::Delimiter::UNSPECIFIED; - tt - }, - tt.1.clone(), - ), - _ => (tt::Subtree::empty(), Default::default()), + Some(AttrInput::TokenTree(tt)) => { + let mut tt = tt.as_ref().clone(); + tt.delimiter = tt::Delimiter::UNSPECIFIED; + tt + } + + _ => tt::Subtree::empty(), }; def.as_lazy_macro( diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs index d090621324..b5d70052a8 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -15,7 +15,6 @@ use crate::macro_expansion_tests::check; fn token_mapping_smoke_test() { check( r#" -// +tokenids macro_rules! f { ( struct $ident:ident ) => { struct $ident { @@ -27,23 +26,19 @@ macro_rules! f { // +tokenids f!(struct MyTraitMap2); "#, - expect![[r##" -// call ids will be shifted by Shift(30) -// +tokenids -macro_rules! f {#0 - (#1 struct#2 $#3ident#4:#5ident#6 )#1 =#7>#8 {#9 - struct#10 $#11ident#12 {#13 - map#14:#15 :#16:#17std#18:#19:#20collections#21:#22:#23HashSet#24<#25(#26)#26>#27,#28 - }#13 - }#9;#29 -}#0 + expect![[r#" +macro_rules! f { + ( struct $ident:ident ) => { + struct $ident { + map: ::std::collections::HashSet<()>, + } + }; +} -// // +tokenids -// f!(struct#1 MyTraitMap2#2); -struct#10 MyTraitMap2#32 {#13 - map#14:#15 ::std#18::collections#21::HashSet#24<#25(#26)#26>#27,#28 -}#13 -"##]], +struct#SpanAnchor(FileId(0), 1)@58..64 MyTraitMap2#SpanAnchor(FileId(0), 2)@23..34 {#SpanAnchor(FileId(0), 1)@72..73 + map#SpanAnchor(FileId(0), 1)@86..89:#SpanAnchor(FileId(0), 1)@89..90 ::std#SpanAnchor(FileId(0), 1)@93..96::collections#SpanAnchor(FileId(0), 1)@98..109::HashSet#SpanAnchor(FileId(0), 1)@111..118<#SpanAnchor(FileId(0), 1)@118..119(#SpanAnchor(FileId(0), 1)@119..120)#SpanAnchor(FileId(0), 1)@120..121>#SpanAnchor(FileId(0), 1)@121..122,#SpanAnchor(FileId(0), 1)@122..123 +}#SpanAnchor(FileId(0), 1)@132..133 +"#]], ); } @@ -71,31 +66,22 @@ f! { "#, - expect![[r##" -// call ids will be shifted by Shift(18) + expect![[r#" // +tokenids -macro_rules! f {#0 - (#1$#2(#3$#4tt#5:#6tt#7)#3*#8)#1 =#9>#10 {#11 - $#12(#13$#14tt#15)#13*#16 - }#11;#17 -}#0 +macro_rules! f { + ($($tt:tt)*) => { + $($tt)* + }; +} -// // +tokenids -// f! { -// fn#1 main#2() { -// 1#5;#6 -// 1.0#7;#8 -// let#9 x#10 =#11 1#12;#13 -// } -// } -fn#19 main#20(#21)#21 {#22 - 1#23;#24 - 1.0#25;#26 - let#27 x#28 =#29 1#30;#31 -}#22 +fn#SpanAnchor(FileId(0), 2)@22..24 main#SpanAnchor(FileId(0), 2)@25..29(#SpanAnchor(FileId(0), 2)@29..30)#SpanAnchor(FileId(0), 2)@30..31 {#SpanAnchor(FileId(0), 2)@32..33 + 1#SpanAnchor(FileId(0), 2)@42..43;#SpanAnchor(FileId(0), 2)@43..44 + 1.0#SpanAnchor(FileId(0), 2)@53..56;#SpanAnchor(FileId(0), 2)@56..57 + let#SpanAnchor(FileId(0), 2)@66..69 x#SpanAnchor(FileId(0), 2)@70..71 =#SpanAnchor(FileId(0), 2)@72..73 1#SpanAnchor(FileId(0), 2)@74..75;#SpanAnchor(FileId(0), 2)@75..76 +}#SpanAnchor(FileId(0), 2)@81..82 -"##]], +"#]], ); } @@ -150,8 +136,7 @@ macro_rules! identity { } fn main(foo: ()) { - // format_args/*+tokenids*/!("{} {} {}"#1,#2 format_args#3!#4("{}"#6,#7 0#8),#9 foo#10,#11 identity#12!#13(10#15),#16 "bar"#17) -builtin#4294967295 ##4294967295format_args#4294967295 (#0"{} {} {}"#1,#2 format_args#3!#4(#5"{}"#6,#7 0#8)#5,#9 foo#10,#11 identity#12!#13(#1410#15)#14,#16 "bar"#17)#0 + builtin#SpanAnchor(FileId(0), 0)@0..0 ##SpanAnchor(FileId(0), 0)@0..0format_args#SpanAnchor(FileId(0), 0)@0..0 (#SpanAnchor(FileId(0), 6)@25..26"{} {} {}"#SpanAnchor(FileId(0), 6)@26..36,#SpanAnchor(FileId(0), 6)@36..37 format_args#SpanAnchor(FileId(0), 6)@38..49!#SpanAnchor(FileId(0), 6)@49..50(#SpanAnchor(FileId(0), 6)@50..51"{}"#SpanAnchor(FileId(0), 6)@51..55,#SpanAnchor(FileId(0), 6)@55..56 0#SpanAnchor(FileId(0), 6)@57..58)#SpanAnchor(FileId(0), 6)@58..59,#SpanAnchor(FileId(0), 6)@59..60 foo#SpanAnchor(FileId(0), 6)@61..64,#SpanAnchor(FileId(0), 6)@64..65 identity#SpanAnchor(FileId(0), 6)@66..74!#SpanAnchor(FileId(0), 6)@74..75(#SpanAnchor(FileId(0), 6)@75..7610#SpanAnchor(FileId(0), 6)@76..78)#SpanAnchor(FileId(0), 6)@78..79,#SpanAnchor(FileId(0), 6)@79..80 "bar"#SpanAnchor(FileId(0), 6)@81..86)#SpanAnchor(FileId(0), 6)@86..87 } "##]], diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs index 8adced4e08..1a672b4605 100644 --- a/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -16,21 +16,16 @@ mod proc_macros; use std::{iter, ops::Range, sync}; -use ::mbe::TokenMap; use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase}; use expect_test::Expect; -use hir_expand::{ - db::{DeclarativeMacroExpander, ExpandDatabase}, - AstId, InFile, MacroFile, -}; +use hir_expand::{db::ExpandDatabase, HirFileIdExt, InFile, MacroFile, SpanMap}; use stdx::format_to; use syntax::{ ast::{self, edit::IndentLevel}, - AstNode, SyntaxElement, - SyntaxKind::{self, COMMENT, EOF, IDENT, LIFETIME_IDENT}, - SyntaxNode, TextRange, T, + AstNode, + SyntaxKind::{COMMENT, EOF, IDENT, LIFETIME_IDENT}, + SyntaxNode, T, }; -use tt::token_id::{Subtree, TokenId}; use crate::{ db::DefDatabase, @@ -39,6 +34,7 @@ use crate::{ resolver::HasResolver, src::HasSource, test_db::TestDB, + tt::Subtree, AdtId, AsMacroCall, Lookup, ModuleDefId, }; @@ -88,43 +84,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream let mut text_edits = Vec::new(); let mut expansions = Vec::new(); - for macro_ in source_file.syntax().descendants().filter_map(ast::Macro::cast) { - let mut show_token_ids = false; - for comment in macro_.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) { - show_token_ids |= comment.to_string().contains("+tokenids"); - } - if !show_token_ids { - continue; - } - - let call_offset = macro_.syntax().text_range().start().into(); - let file_ast_id = db.ast_id_map(source.file_id).ast_id(¯o_); - let ast_id = AstId::new(source.file_id, file_ast_id.upcast()); - - let DeclarativeMacroExpander { mac, def_site_token_map } = - &*db.decl_macro_expander(krate, ast_id); - assert_eq!(mac.err(), None); - let tt = match ¯o_ { - ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(), - ast::Macro::MacroDef(_) => unimplemented!(""), - }; - - let tt_start = tt.syntax().text_range().start(); - tt.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token).for_each( - |token| { - let range = token.text_range().checked_sub(tt_start).unwrap(); - if let Some(id) = def_site_token_map.token_by_range(range) { - let offset = (range.end() + tt_start).into(); - text_edits.push((offset..offset, format!("#{}", id.0))); - } - }, - ); - text_edits.push(( - call_offset..call_offset, - format!("// call ids will be shifted by {:?}\n", mac.shift()), - )); - } - for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) { let macro_call = InFile::new(source.file_id, ¯o_call); let res = macro_call @@ -138,10 +97,10 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream let macro_file = MacroFile { macro_call_id }; let mut expansion_result = db.parse_macro_expansion(macro_file); expansion_result.err = expansion_result.err.or(res.err); - expansions.push((macro_call.value.clone(), expansion_result, db.macro_arg(macro_call_id))); + expansions.push((macro_call.value.clone(), expansion_result)); } - for (call, exp, arg) in expansions.into_iter().rev() { + for (call, exp) in expansions.into_iter().rev() { let mut tree = false; let mut expect_errors = false; let mut show_token_ids = false; @@ -185,27 +144,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream } let range = call.syntax().text_range(); let range: Range = range.into(); - - if show_token_ids { - if let Some((tree, map, _)) = arg.value.as_deref() { - let tt_range = call.token_tree().unwrap().syntax().text_range(); - let mut ranges = Vec::new(); - extract_id_ranges(&mut ranges, map, tree); - for (range, id) in ranges { - let idx = (tt_range.start() + range.end()).into(); - text_edits.push((idx..idx, format!("#{}", id.0))); - } - } - text_edits.push((range.start..range.start, "// ".into())); - call.to_string().match_indices('\n').for_each(|(offset, _)| { - let offset = offset + 1 + range.start; - text_edits.push((offset..offset, "// ".into())); - }); - text_edits.push((range.end..range.end, "\n".into())); - text_edits.push((range.end..range.end, expn_text)); - } else { - text_edits.push((range, expn_text)); - } + text_edits.push((range, expn_text)); } text_edits.sort_by_key(|(range, _)| range.start); @@ -246,20 +185,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream expect.assert_eq(&expanded_text); } -fn extract_id_ranges(ranges: &mut Vec<(TextRange, TokenId)>, map: &TokenMap, tree: &Subtree) { - tree.token_trees.iter().for_each(|tree| match tree { - tt::TokenTree::Leaf(leaf) => { - let id = match leaf { - tt::Leaf::Literal(it) => it.span, - tt::Leaf::Punct(it) => it.span, - tt::Leaf::Ident(it) => it.span, - }; - ranges.extend(map.ranges_by_token(id, SyntaxKind::ERROR).map(|range| (range, id))); - } - tt::TokenTree::Subtree(tree) => extract_id_ranges(ranges, map, tree), - }); -} - fn reindent(indent: IndentLevel, pp: String) -> String { if !pp.contains('\n') { return pp; @@ -276,7 +201,7 @@ fn reindent(indent: IndentLevel, pp: String) -> String { res } -fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> String { +fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&SpanMap>) -> String { let mut res = String::new(); let mut prev_kind = EOF; let mut indent_level = 0; @@ -323,8 +248,8 @@ fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> Str prev_kind = curr_kind; format_to!(res, "{}", token); if let Some(map) = map { - if let Some(id) = map.token_by_range(token.text_range()) { - format_to!(res, "#{}", id.0); + if let Some(span) = map.span_for_range(token.text_range()) { + format_to!(res, "#{:?}@{:?}", span.anchor, span.range); } } } diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 2d4586146d..659e7ed503 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -5,6 +5,7 @@ use std::{cmp::Ordering, iter, mem}; +use ::tt::Span; use base_db::{CrateId, Dependency, Edition, FileId}; use cfg::{CfgExpr, CfgOptions}; use either::Either; @@ -85,8 +86,7 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI .enumerate() .map(|(idx, it)| { // FIXME: a hacky way to create a Name from string. - let name = - tt::Ident { text: it.name.clone(), span: tt::TokenId::unspecified() }; + let name = tt::Ident { text: it.name.clone(), span: tt::SpanData::DUMMY }; (name.as_name(), ProcMacroExpander::new(base_db::ProcMacroId(idx as u32))) }) .collect()) @@ -471,7 +471,7 @@ impl DefCollector<'_> { directive.module_id, MacroCallKind::Attr { ast_id: ast_id.ast_id, - attr_args: Arc::new((tt::Subtree::empty(), Default::default())), + attr_args: Arc::new(tt::Subtree::empty()), invoc_attr_index: attr.id, }, attr.path().clone(), @@ -2083,8 +2083,7 @@ impl ModCollector<'_, '_> { let name = match attrs.by_key("rustc_builtin_macro").string_value() { Some(it) => { // FIXME: a hacky way to create a Name from string. - name = - tt::Ident { text: it.clone(), span: tt::TokenId::unspecified() }.as_name(); + name = tt::Ident { text: it.clone(), span: tt::SpanData::DUMMY }.as_name(); &name } None => { diff --git a/crates/hir-def/src/nameres/mod_resolution.rs b/crates/hir-def/src/nameres/mod_resolution.rs index 2dcc2c30fe..22802433aa 100644 --- a/crates/hir-def/src/nameres/mod_resolution.rs +++ b/crates/hir-def/src/nameres/mod_resolution.rs @@ -1,7 +1,7 @@ //! This module resolves `mod foo;` declaration to file. use arrayvec::ArrayVec; use base_db::{AnchoredPath, FileId}; -use hir_expand::name::Name; +use hir_expand::{name::Name, HirFileIdExt}; use limit::Limit; use syntax::SmolStr; diff --git a/crates/hir-def/src/nameres/tests/incremental.rs b/crates/hir-def/src/nameres/tests/incremental.rs index 4a86f88e57..977745c476 100644 --- a/crates/hir-def/src/nameres/tests/incremental.rs +++ b/crates/hir-def/src/nameres/tests/incremental.rs @@ -66,7 +66,7 @@ fn typing_inside_a_function_should_not_invalidate_def_map() { #[test] fn typing_inside_a_macro_should_not_invalidate_def_map() { - let (mut db, pos) = TestDB::with_position( + check_def_map_is_not_recomputed( r" //- /lib.rs macro_rules! m { @@ -84,27 +84,15 @@ fn typing_inside_a_macro_should_not_invalidate_def_map() { //- /foo/bar.rs $0 m!(X); + + pub struct S {} + ", + r" + m!(Y); + + pub struct S {} ", ); - let krate = db.test_crate(); - { - let events = db.log_executed(|| { - let crate_def_map = db.crate_def_map(krate); - let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); - assert_eq!(module_data.scope.resolutions().count(), 1); - }); - assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}") - } - db.set_file_text(pos.file_id, Arc::from("m!(Y);")); - - { - let events = db.log_executed(|| { - let crate_def_map = db.crate_def_map(krate); - let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); - assert_eq!(module_data.scope.resolutions().count(), 1); - }); - assert!(!format!("{events:?}").contains("crate_def_map"), "{events:#?}") - } } #[test] diff --git a/crates/hir-expand/src/ast_id_map.rs b/crates/hir-expand/src/ast_id_map.rs index 4072650549..eb43ae37e0 100644 --- a/crates/hir-expand/src/ast_id_map.rs +++ b/crates/hir-expand/src/ast_id_map.rs @@ -12,11 +12,13 @@ use std::{ marker::PhantomData, }; -use la_arena::{Arena, Idx}; +use la_arena::{Arena, Idx, RawIdx}; use profile::Count; use rustc_hash::FxHasher; use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr}; +pub use base_db::span::ErasedFileAstId; + /// `AstId` points to an AST node in a specific file. pub struct FileAstId { raw: ErasedFileAstId, @@ -62,8 +64,6 @@ impl FileAstId { } } -pub type ErasedFileAstId = Idx; - pub trait AstIdNode: AstNode {} macro_rules! register_ast_id_node { (impl AstIdNode for $($ident:ident),+ ) => { @@ -129,6 +129,11 @@ impl AstIdMap { pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap { assert!(node.parent().is_none()); let mut res = AstIdMap::default(); + + // make sure to allocate the root node + if !should_alloc_id(node.kind()) { + res.alloc(node); + } // By walking the tree in breadth-first order we make sure that parents // get lower ids then children. That is, adding a new child does not // change parent's id. This means that, say, adding a new function to a @@ -155,6 +160,11 @@ impl AstIdMap { res } + /// The [`AstId`] of the root node + pub fn root(&self) -> SyntaxNodePtr { + self.arena[Idx::from_raw(RawIdx::from_u32(0))].clone() + } + pub fn ast_id(&self, item: &N) -> FileAstId { let raw = self.erased_ast_id(item.syntax()); FileAstId { raw, covariant: PhantomData } @@ -164,7 +174,7 @@ impl AstIdMap { AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap() } - pub(crate) fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr { + pub fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr { self.arena[id].clone() } diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs index 0ec2422b30..9652dd345a 100644 --- a/crates/hir-expand/src/attrs.rs +++ b/crates/hir-expand/src/attrs.rs @@ -1,7 +1,8 @@ //! A higher level attributes based on TokenTree, with also some shortcuts. use std::{fmt, ops}; -use base_db::CrateId; +use ::tt::Span; +use base_db::{span::SpanAnchor, CrateId}; use cfg::CfgExpr; use either::Either; use intern::Interned; @@ -39,11 +40,16 @@ impl ops::Deref for RawAttrs { impl RawAttrs { pub const EMPTY: Self = Self { entries: None }; - pub fn new(db: &dyn ExpandDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self { + pub fn new( + db: &dyn ExpandDatabase, + span_anchor: SpanAnchor, + owner: &dyn ast::HasAttrs, + hygiene: &Hygiene, + ) -> Self { let entries = collect_attrs(owner) .filter_map(|(id, attr)| match attr { Either::Left(attr) => { - attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id)) + attr.meta().and_then(|meta| Attr::from_src(db, span_anchor, meta, hygiene, id)) } Either::Right(comment) => comment.doc_comment().map(|doc| Attr { id, @@ -58,9 +64,13 @@ impl RawAttrs { Self { entries: if entries.is_empty() { None } else { Some(entries) } } } - pub fn from_attrs_owner(db: &dyn ExpandDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self { + pub fn from_attrs_owner( + db: &dyn ExpandDatabase, + span_anchor: SpanAnchor, + owner: InFile<&dyn ast::HasAttrs>, + ) -> Self { let hygiene = Hygiene::new(db, owner.file_id); - Self::new(db, owner.value, &hygiene) + Self::new(db, span_anchor, owner.value, &hygiene) } pub fn merge(&self, other: Self) -> Self { @@ -190,16 +200,17 @@ pub struct Attr { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum AttrInput { /// `#[attr = "string"]` + // FIXME: This is losing span Literal(SmolStr), /// `#[attr(subtree)]` - TokenTree(Box<(tt::Subtree, mbe::TokenMap)>), + TokenTree(Box), } impl fmt::Display for AttrInput { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()), - AttrInput::TokenTree(tt) => tt.0.fmt(f), + AttrInput::TokenTree(tt) => tt.fmt(f), } } } @@ -207,6 +218,7 @@ impl fmt::Display for AttrInput { impl Attr { fn from_src( db: &dyn ExpandDatabase, + span_anchor: SpanAnchor, ast: ast::Meta, hygiene: &Hygiene, id: AttrId, @@ -219,8 +231,13 @@ impl Attr { }; Some(Interned::new(AttrInput::Literal(value))) } else if let Some(tt) = ast.token_tree() { - let (tree, map) = syntax_node_to_token_tree(tt.syntax()); - Some(Interned::new(AttrInput::TokenTree(Box::new((tree, map))))) + // FIXME: We could also allocate ids for attributes and use the attribute itself as an anchor + let offset = + db.ast_id_map(span_anchor.file_id).get_raw(span_anchor.ast_id).text_range().start(); + // FIXME: Spanmap + let tree = + syntax_node_to_token_tree(tt.syntax(), span_anchor, offset, &Default::default()); + Some(Interned::new(AttrInput::TokenTree(Box::new(tree)))) } else { None }; @@ -233,10 +250,12 @@ impl Attr { hygiene: &Hygiene, id: AttrId, ) -> Option { - let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem); + // FIXME: Unecessary roundtrip tt -> ast -> tt + let (parse, _map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem); let ast = ast::Meta::cast(parse.syntax_node())?; - Self::from_src(db, ast, hygiene, id) + // FIXME: we discard spans here! + Self::from_src(db, SpanAnchor::DUMMY, ast, hygiene, id) } pub fn path(&self) -> &ModPath { @@ -256,7 +275,7 @@ impl Attr { /// #[path(ident)] pub fn single_ident_value(&self) -> Option<&tt::Ident> { match self.input.as_deref()? { - AttrInput::TokenTree(tt) => match &*tt.0.token_trees { + AttrInput::TokenTree(tt) => match &*tt.token_trees { [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident), _ => None, }, @@ -267,7 +286,7 @@ impl Attr { /// #[path TokenTree] pub fn token_tree_value(&self) -> Option<&Subtree> { match self.input.as_deref()? { - AttrInput::TokenTree(tt) => Some(&tt.0), + AttrInput::TokenTree(tt) => Some(tt), _ => None, } } diff --git a/crates/hir-expand/src/builtin_attr_macro.rs b/crates/hir-expand/src/builtin_attr_macro.rs index 4ee12e2f21..de8c0ac810 100644 --- a/crates/hir-expand/src/builtin_attr_macro.rs +++ b/crates/hir-expand/src/builtin_attr_macro.rs @@ -1,5 +1,7 @@ //! Builtin attributes. +use ::tt::Span; + use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind}; macro_rules! register_builtin { @@ -98,7 +100,7 @@ fn derive_attr_expand( ) -> ExpandResult { let loc = db.lookup_intern_macro_call(id); let derives = match &loc.kind { - MacroCallKind::Attr { attr_args, .. } if loc.def.is_attribute_derive() => &attr_args.0, + MacroCallKind::Attr { attr_args, .. } if loc.def.is_attribute_derive() => attr_args, _ => return ExpandResult::ok(tt::Subtree::empty()), }; pseudo_derive_attr_expansion(tt, derives) @@ -112,7 +114,7 @@ pub fn pseudo_derive_attr_expansion( tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char, spacing: tt::Spacing::Alone, - span: tt::TokenId::unspecified(), + span: tt::SpanData::DUMMY, })) }; diff --git a/crates/hir-expand/src/builtin_derive_macro.rs b/crates/hir-expand/src/builtin_derive_macro.rs index ecc8b407a9..16cce35c13 100644 --- a/crates/hir-expand/src/builtin_derive_macro.rs +++ b/crates/hir-expand/src/builtin_derive_macro.rs @@ -1,18 +1,20 @@ //! Builtin derives. -use ::tt::Ident; +use ::tt::Span; use base_db::{CrateOrigin, LangCrateOrigin}; use itertools::izip; -use mbe::TokenMap; use rustc_hash::FxHashSet; use stdx::never; use tracing::debug; use crate::{ name::{AsName, Name}, - tt::{self, TokenId}, + tt, SpanMap, +}; +use syntax::{ + ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds}, + TextSize, }; -use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds}; use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult, MacroCallId}; @@ -29,7 +31,7 @@ macro_rules! register_builtin { db: &dyn ExpandDatabase, id: MacroCallId, tt: &ast::Adt, - token_map: &TokenMap, + token_map: &SpanMap, ) -> ExpandResult { let expander = match *self { $( BuiltinDeriveExpander::$trait => $expand, )* @@ -71,7 +73,7 @@ enum VariantShape { } fn tuple_field_iterator(n: usize) -> impl Iterator { - (0..n).map(|it| Ident::new(format!("f{it}"), tt::TokenId::unspecified())) + (0..n).map(|it| tt::Ident::new(format!("f{it}"), tt::SpanData::DUMMY)) } impl VariantShape { @@ -117,7 +119,7 @@ impl VariantShape { } } - fn from(tm: &TokenMap, value: Option) -> Result { + fn from(tm: &SpanMap, value: Option) -> Result { let r = match value { None => VariantShape::Unit, Some(FieldList::RecordFieldList(it)) => VariantShape::Struct( @@ -189,8 +191,8 @@ struct BasicAdtInfo { associated_types: Vec, } -fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result { - let (name, generic_param_list, shape) = match &adt { +fn parse_adt(tm: &SpanMap, adt: &ast::Adt) -> Result { + let (name, generic_param_list, shape) = match adt { ast::Adt::Struct(it) => ( it.name(), it.generic_param_list(), @@ -234,21 +236,44 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result match this { Some(it) => { param_type_set.insert(it.as_name()); - mbe::syntax_node_to_token_tree(it.syntax()).0 + mbe::syntax_node_to_token_tree( + it.syntax(), + tm.span_for_range(it.syntax().first_token().unwrap().text_range()) + .unwrap() + .anchor, + TextSize::from(0), + tm, + ) } None => tt::Subtree::empty(), } }; let bounds = match ¶m { - ast::TypeOrConstParam::Type(it) => { - it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0) - } + ast::TypeOrConstParam::Type(it) => it.type_bound_list().map(|it| { + mbe::syntax_node_to_token_tree( + it.syntax(), + tm.span_for_range(it.syntax().first_token().unwrap().text_range()) + .unwrap() + .anchor, + TextSize::from(0), + tm, + ) + }), ast::TypeOrConstParam::Const(_) => None, }; let ty = if let ast::TypeOrConstParam::Const(param) = param { let ty = param .ty() - .map(|ty| mbe::syntax_node_to_token_tree(ty.syntax()).0) + .map(|ty| { + mbe::syntax_node_to_token_tree( + ty.syntax(), + tm.span_for_range(ty.syntax().first_token().unwrap().text_range()) + .unwrap() + .anchor, + TextSize::from(0), + tm, + ) + }) .unwrap_or_else(tt::Subtree::empty); Some(ty) } else { @@ -282,20 +307,26 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name(); param_type_set.contains(&name).then_some(p) }) - .map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0) + .map(|it| { + mbe::syntax_node_to_token_tree( + it.syntax(), + tm.span_for_range(it.syntax().first_token().unwrap().text_range()).unwrap().anchor, + TextSize::from(0), + tm, + ) + }) .collect(); let name_token = name_to_token(&tm, name)?; Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types }) } -fn name_to_token(token_map: &TokenMap, name: Option) -> Result { +fn name_to_token(token_map: &SpanMap, name: Option) -> Result { let name = name.ok_or_else(|| { debug!("parsed item has no name"); ExpandError::other("missing name") })?; - let name_token_id = - token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified); - let name_token = tt::Ident { span: name_token_id, text: name.text().into() }; + let span = token_map.span_for_range(name.syntax().text_range()).unwrap(); + let name_token = tt::Ident { span, text: name.text().into() }; Ok(name_token) } @@ -332,7 +363,7 @@ fn name_to_token(token_map: &TokenMap, name: Option) -> Result tt::Subtree, ) -> ExpandResult { @@ -393,7 +424,7 @@ fn copy_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &ast::Adt, - tm: &TokenMap, + tm: &SpanMap, ) -> ExpandResult { let krate = find_builtin_crate(db, id); expand_simple_derive(tt, tm, quote! { #krate::marker::Copy }, |_| quote! {}) @@ -403,16 +434,13 @@ fn clone_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &ast::Adt, - tm: &TokenMap, + tm: &SpanMap, ) -> ExpandResult { let krate = find_builtin_crate(db, id); expand_simple_derive(tt, tm, quote! { #krate::clone::Clone }, |adt| { if matches!(adt.shape, AdtShape::Union) { - let star = tt::Punct { - char: '*', - spacing: ::tt::Spacing::Alone, - span: tt::TokenId::unspecified(), - }; + let star = + tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span: tt::SpanData::DUMMY }; return quote! { fn clone(&self) -> Self { #star self @@ -420,11 +448,8 @@ fn clone_expand( }; } if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) { - let star = tt::Punct { - char: '*', - spacing: ::tt::Spacing::Alone, - span: tt::TokenId::unspecified(), - }; + let star = + tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span: tt::SpanData::DUMMY }; return quote! { fn clone(&self) -> Self { match #star self {} @@ -452,16 +477,14 @@ fn clone_expand( } /// This function exists since `quote! { => }` doesn't work. -fn fat_arrow() -> ::tt::Subtree { - let eq = - tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() }; +fn fat_arrow() -> tt::Subtree { + let eq = tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span: tt::SpanData::DUMMY }; quote! { #eq> } } /// This function exists since `quote! { && }` doesn't work. -fn and_and() -> ::tt::Subtree { - let and = - tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() }; +fn and_and() -> tt::Subtree { + let and = tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span: tt::SpanData::DUMMY }; quote! { #and& } } @@ -469,7 +492,7 @@ fn default_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &ast::Adt, - tm: &TokenMap, + tm: &SpanMap, ) -> ExpandResult { let krate = &find_builtin_crate(db, id); expand_simple_derive(tt, tm, quote! { #krate::default::Default }, |adt| { @@ -509,7 +532,7 @@ fn debug_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &ast::Adt, - tm: &TokenMap, + tm: &SpanMap, ) -> ExpandResult { let krate = &find_builtin_crate(db, id); expand_simple_derive(tt, tm, quote! { #krate::fmt::Debug }, |adt| { @@ -540,11 +563,8 @@ fn debug_expand( }, }; if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) { - let star = tt::Punct { - char: '*', - spacing: ::tt::Spacing::Alone, - span: tt::TokenId::unspecified(), - }; + let star = + tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span: tt::SpanData::DUMMY }; return quote! { fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result { match #star self {} @@ -590,7 +610,7 @@ fn hash_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &ast::Adt, - tm: &TokenMap, + tm: &SpanMap, ) -> ExpandResult { let krate = &find_builtin_crate(db, id); expand_simple_derive(tt, tm, quote! { #krate::hash::Hash }, |adt| { @@ -599,11 +619,8 @@ fn hash_expand( return quote! {}; } if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) { - let star = tt::Punct { - char: '*', - spacing: ::tt::Spacing::Alone, - span: tt::TokenId::unspecified(), - }; + let star = + tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span: tt::SpanData::DUMMY }; return quote! { fn hash(&self, ra_expand_state: &mut H) { match #star self {} @@ -644,7 +661,7 @@ fn eq_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &ast::Adt, - tm: &TokenMap, + tm: &SpanMap, ) -> ExpandResult { let krate = find_builtin_crate(db, id); expand_simple_derive(tt, tm, quote! { #krate::cmp::Eq }, |_| quote! {}) @@ -654,7 +671,7 @@ fn partial_eq_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &ast::Adt, - tm: &TokenMap, + tm: &SpanMap, ) -> ExpandResult { let krate = find_builtin_crate(db, id); expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialEq }, |adt| { @@ -674,14 +691,14 @@ fn partial_eq_expand( } [first, rest @ ..] => { let rest = rest.iter().map(|it| { - let t1 = Ident::new(format!("{}_self", it.text), it.span); - let t2 = Ident::new(format!("{}_other", it.text), it.span); + let t1 = tt::Ident::new(format!("{}_self", it.text), it.span); + let t2 = tt::Ident::new(format!("{}_other", it.text), it.span); let and_and = and_and(); quote!(#and_and #t1 .eq( #t2 )) }); let first = { - let t1 = Ident::new(format!("{}_self", first.text), first.span); - let t2 = Ident::new(format!("{}_other", first.text), first.span); + let t1 = tt::Ident::new(format!("{}_self", first.text), first.span); + let t2 = tt::Ident::new(format!("{}_other", first.text), first.span); quote!(#t1 .eq( #t2 )) }; quote!(#first ##rest) @@ -708,11 +725,11 @@ fn self_and_other_patterns( name: &tt::Ident, ) -> (Vec, Vec) { let self_patterns = adt.shape.as_pattern_map(name, |it| { - let t = Ident::new(format!("{}_self", it.text), it.span); + let t = tt::Ident::new(format!("{}_self", it.text), it.span); quote!(#t) }); let other_patterns = adt.shape.as_pattern_map(name, |it| { - let t = Ident::new(format!("{}_other", it.text), it.span); + let t = tt::Ident::new(format!("{}_other", it.text), it.span); quote!(#t) }); (self_patterns, other_patterns) @@ -722,7 +739,7 @@ fn ord_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &ast::Adt, - tm: &TokenMap, + tm: &SpanMap, ) -> ExpandResult { let krate = &find_builtin_crate(db, id); expand_simple_derive(tt, tm, quote! { #krate::cmp::Ord }, |adt| { @@ -752,8 +769,8 @@ fn ord_expand( |(pat1, pat2, fields)| { let mut body = quote!(#krate::cmp::Ordering::Equal); for f in fields.into_iter().rev() { - let t1 = Ident::new(format!("{}_self", f.text), f.span); - let t2 = Ident::new(format!("{}_other", f.text), f.span); + let t1 = tt::Ident::new(format!("{}_self", f.text), f.span); + let t2 = tt::Ident::new(format!("{}_other", f.text), f.span); body = compare(krate, quote!(#t1), quote!(#t2), body); } let fat_arrow = fat_arrow(); @@ -784,7 +801,7 @@ fn partial_ord_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &ast::Adt, - tm: &TokenMap, + tm: &SpanMap, ) -> ExpandResult { let krate = &find_builtin_crate(db, id); expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialOrd }, |adt| { @@ -817,8 +834,8 @@ fn partial_ord_expand( |(pat1, pat2, fields)| { let mut body = quote!(#krate::option::Option::Some(#krate::cmp::Ordering::Equal)); for f in fields.into_iter().rev() { - let t1 = Ident::new(format!("{}_self", f.text), f.span); - let t2 = Ident::new(format!("{}_other", f.text), f.span); + let t1 = tt::Ident::new(format!("{}_self", f.text), f.span); + let t2 = tt::Ident::new(format!("{}_other", f.text), f.span); body = compare(krate, quote!(#t1), quote!(#t2), body); } let fat_arrow = fat_arrow(); diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index a04de10b89..adbe49473a 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -1,17 +1,22 @@ //! Builtin macro -use base_db::{AnchoredPath, Edition, FileId}; +use base_db::{ + span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID}, + AnchoredPath, Edition, FileId, +}; use cfg::CfgExpr; use either::Either; -use mbe::{parse_exprs_with_sep, parse_to_token_tree, TokenMap}; +use mbe::{parse_exprs_with_sep, parse_to_token_tree}; use syntax::{ ast::{self, AstToken}, SmolStr, }; use crate::{ - db::ExpandDatabase, name, quote, tt, EagerCallInfo, ExpandError, ExpandResult, MacroCallId, - MacroCallLoc, + db::ExpandDatabase, + name, quote, + tt::{self, Span}, + EagerCallInfo, ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc, }; macro_rules! register_builtin { @@ -110,7 +115,7 @@ register_builtin! { } const DOLLAR_CRATE: tt::Ident = - tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::TokenId::unspecified() }; + tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::SpanData::DUMMY }; fn module_path_expand( _db: &dyn ExpandDatabase, @@ -131,7 +136,7 @@ fn line_expand( delimiter: tt::Delimiter::unspecified(), token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: "0u32".into(), - span: tt::Span::UNSPECIFIED, + span: tt::SpanData::DUMMY, }))], }) } @@ -179,7 +184,7 @@ fn assert_expand( token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', spacing: tt::Spacing::Alone, - span: tt::TokenId::unspecified(), + span: tt::SpanData::DUMMY, }))], }; let cond = cond.clone(); @@ -446,7 +451,7 @@ fn concat_bytes_expand( } } } - let ident = tt::Ident { text: bytes.join(", ").into(), span: tt::TokenId::unspecified() }; + let ident = tt::Ident { text: bytes.join(", ").into(), span: tt::SpanData::DUMMY }; ExpandResult { value: quote!([#ident]), err } } @@ -494,7 +499,7 @@ fn concat_idents_expand( } } } - let ident = tt::Ident { text: ident.into(), span: tt::TokenId::unspecified() }; + let ident = tt::Ident { text: ident.into(), span: tt::SpanData::DUMMY }; ExpandResult { value: quote!(#ident), err } } @@ -533,15 +538,16 @@ fn include_expand( _tt: &tt::Subtree, ) -> ExpandResult { match db.include_expand(arg_id) { - Ok((res, _)) => ExpandResult::ok(res.0.clone()), + Ok((res, _)) => ExpandResult::ok(res.as_ref().clone()), Err(e) => ExpandResult::new(tt::Subtree::empty(), e), } } +// FIXME: Check if this is still needed now after the token map rewrite pub(crate) fn include_arg_to_tt( db: &dyn ExpandDatabase, arg_id: MacroCallId, -) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> { +) -> Result<(triomphe::Arc, FileId), ExpandError> { let loc = db.lookup_intern_macro_call(arg_id); let Some(EagerCallInfo { arg, arg_id, .. }) = loc.eager.as_deref() else { panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager); @@ -549,9 +555,12 @@ pub(crate) fn include_arg_to_tt( let path = parse_string(&arg.0)?; let file_id = relative_file(db, *arg_id, &path, false)?; - let (subtree, map) = - parse_to_token_tree(&db.file_text(file_id)).ok_or(mbe::ExpandError::ConversionError)?; - Ok((triomphe::Arc::new((subtree, map)), file_id)) + let subtree = parse_to_token_tree( + &db.file_text(file_id), + SpanAnchor { file_id: file_id.into(), ast_id: ROOT_ERASED_FILE_AST_ID }, + ) + .ok_or(mbe::ExpandError::ConversionError)?; + Ok((triomphe::Arc::new(subtree), file_id)) } fn include_bytes_expand( @@ -568,7 +577,7 @@ fn include_bytes_expand( delimiter: tt::Delimiter::unspecified(), token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: r#"b"""#.into(), - span: tt::TokenId::unspecified(), + span: tt::SpanData::DUMMY, }))], }; ExpandResult::ok(res) diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index ff0d279d8c..32ba7b2f91 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -1,22 +1,25 @@ //! Defines database & queries for macro expansion. -use base_db::{salsa, CrateId, Edition, SourceDatabase}; +use base_db::{ + salsa, + span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID}, + CrateId, Edition, SourceDatabase, +}; use either::Either; use limit::Limit; -use mbe::{syntax_node_to_token_tree, ValueResult}; -use rustc_hash::FxHashSet; +use mbe::{map_from_syntax_node, syntax_node_to_token_tree, ValueResult}; use syntax::{ ast::{self, HasAttrs, HasDocComments}, - AstNode, GreenNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, + AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, TextSize, T, }; use triomphe::Arc; use crate::{ ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion, - builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander, + builtin_fn_macro::EagerExpander, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, - MacroDefKind, MacroFile, ProcMacroExpander, + MacroDefKind, MacroFile, ProcMacroExpander, SpanMap, SyntaxContext, SyntaxContextId, }; /// Total limit on the number of tokens produced by any macro invocation. @@ -30,8 +33,7 @@ static TOKEN_LIMIT: Limit = Limit::new(1_048_576); #[derive(Debug, Clone, Eq, PartialEq)] /// Old-style `macro_rules` or the new macros 2.0 pub struct DeclarativeMacroExpander { - pub mac: mbe::DeclarativeMacro, - pub def_site_token_map: mbe::TokenMap, + pub mac: mbe::DeclarativeMacro, } impl DeclarativeMacroExpander { @@ -41,21 +43,14 @@ impl DeclarativeMacroExpander { tt::Subtree::empty(), ExpandError::other(format!("invalid macro definition: {e}")), ), - None => self.mac.expand(tt).map_err(Into::into), + None => self.mac.expand(&tt).map_err(Into::into), } } - - pub fn map_id_down(&self, token_id: tt::TokenId) -> tt::TokenId { - self.mac.map_id_down(token_id) - } - - pub fn map_id_up(&self, token_id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { - self.mac.map_id_up(token_id) - } } #[derive(Debug, Clone, Eq, PartialEq)] pub enum TokenExpander { + /// Old-style `macro_rules` or the new macros 2.0 DeclarativeMacro(Arc), /// Stuff like `line!` and `file!`. BuiltIn(BuiltinFnLikeExpander), @@ -69,31 +64,6 @@ pub enum TokenExpander { ProcMacro(ProcMacroExpander), } -// FIXME: Get rid of these methods -impl TokenExpander { - pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { - match self { - TokenExpander::DeclarativeMacro(expander) => expander.map_id_down(id), - TokenExpander::BuiltIn(..) - | TokenExpander::BuiltInEager(..) - | TokenExpander::BuiltInAttr(..) - | TokenExpander::BuiltInDerive(..) - | TokenExpander::ProcMacro(..) => id, - } - } - - pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { - match self { - TokenExpander::DeclarativeMacro(expander) => expander.map_id_up(id), - TokenExpander::BuiltIn(..) - | TokenExpander::BuiltInEager(..) - | TokenExpander::BuiltInAttr(..) - | TokenExpander::BuiltInDerive(..) - | TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call), - } - } -} - #[salsa::query_group(ExpandDatabaseStorage)] pub trait ExpandDatabase: SourceDatabase { fn ast_id_map(&self, file_id: HirFileId) -> Arc; @@ -109,7 +79,7 @@ pub trait ExpandDatabase: SourceDatabase { fn parse_macro_expansion( &self, macro_file: MacroFile, - ) -> ExpandResult<(Parse, Arc)>; + ) -> ExpandResult<(Parse, Arc)>; /// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the /// reason why we use salsa at all. @@ -118,23 +88,16 @@ pub trait ExpandDatabase: SourceDatabase { /// to be incremental. #[salsa::interned] fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId; + #[salsa::interned] + fn intern_syntax_context(&self, ctx: SyntaxContext) -> SyntaxContextId; - /// Lowers syntactic macro call to a token tree representation. - #[salsa::transparent] + /// Lowers syntactic macro call to a token tree representation. That's a firewall + /// query, only typing in the macro call itself changes the returned + /// subtree. fn macro_arg( &self, id: MacroCallId, - ) -> ValueResult< - Option>, - Arc>, - >; - /// Extracts syntax node, corresponding to a macro call. That's a firewall - /// query, only typing in the macro call itself changes the returned - /// subtree. - fn macro_arg_node( - &self, - id: MacroCallId, - ) -> ValueResult, Arc>>; + ) -> ValueResult>, Arc>>; /// Fetches the expander for this macro. #[salsa::transparent] fn macro_expander(&self, id: MacroDefId) -> TokenExpander; @@ -152,10 +115,7 @@ pub trait ExpandDatabase: SourceDatabase { fn include_expand( &self, arg_id: MacroCallId, - ) -> Result< - (triomphe::Arc<(::tt::Subtree<::tt::TokenId>, mbe::TokenMap)>, base_db::FileId), - ExpandError, - >; + ) -> Result<(triomphe::Arc, base_db::FileId), ExpandError>; /// Special case of the previous query for procedural macros. We can't LRU /// proc macros, since they are not deterministic in general, and /// non-determinism breaks salsa in a very, very, very bad way. @@ -181,21 +141,19 @@ pub fn expand_speculative( token_to_map: SyntaxToken, ) -> Option<(SyntaxNode, SyntaxToken)> { let loc = db.lookup_intern_macro_call(actual_macro_call); - let token_range = token_to_map.text_range(); + let file_id = loc.kind.file_id(); // Build the subtree and token mapping for the speculative args - let censor = censor_for_macro_input(&loc, speculative_args); - let mut fixups = fixup::fixup_syntax(speculative_args); - fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new()))); - let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications( + let _censor = censor_for_macro_input(&loc, speculative_args); + let mut tt = mbe::syntax_node_to_token_tree( speculative_args, - fixups.token_map, - fixups.next_id, - fixups.replace, - fixups.append, + // we don't leak these spans into any query so its fine to make them absolute + SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, + TextSize::new(0), + &Default::default(), ); - let (attr_arg, token_id) = match loc.kind { + let attr_arg = match loc.kind { MacroCallKind::Attr { invoc_attr_index, .. } => { let attr = if loc.def.is_attribute_derive() { // for pseudo-derive expansion we actually pass the attribute itself only @@ -210,48 +168,27 @@ pub fn expand_speculative( }?; match attr.token_tree() { Some(token_tree) => { - let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax()); - tree.delimiter = tt::Delimiter::unspecified(); + let mut tree = syntax_node_to_token_tree( + token_tree.syntax(), + SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, + TextSize::new(0), + &Default::default(), + ); + tree.delimiter = tt::Delimiter::UNSPECIFIED; - let shift = mbe::Shift::new(&tt); - shift.shift_all(&mut tree); - - let token_id = if token_tree.syntax().text_range().contains_range(token_range) { - let attr_input_start = - token_tree.left_delimiter_token()?.text_range().start(); - let range = token_range.checked_sub(attr_input_start)?; - let token_id = shift.shift(map.token_by_range(range)?); - Some(token_id) - } else { - None - }; - (Some(tree), token_id) + Some(tree) } - _ => (None, None), - } - } - _ => (None, None), - }; - let token_id = match token_id { - Some(token_id) => token_id, - // token wasn't inside an attribute input so it has to be in the general macro input - None => { - let range = token_range.checked_sub(speculative_args.text_range().start())?; - let token_id = spec_args_tmap.token_by_range(range)?; - match loc.def.kind { - MacroDefKind::Declarative(it) => { - db.decl_macro_expander(loc.krate, it).map_id_down(token_id) - } - _ => token_id, + _ => None, } } + _ => None, }; // Do the actual expansion, we need to directly expand the proc macro due to the attribute args // Otherwise the expand query will fetch the non speculative attribute args and pass those instead. - let mut speculative_expansion = match loc.def.kind { + let speculative_expansion = match loc.def.kind { MacroDefKind::ProcMacro(expander, ..) => { - tt.delimiter = tt::Delimiter::unspecified(); + tt.delimiter = tt::Delimiter::UNSPECIFIED; expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref()) } MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => { @@ -260,7 +197,17 @@ pub fn expand_speculative( MacroDefKind::BuiltInDerive(expander, ..) => { // this cast is a bit sus, can we avoid losing the typedness here? let adt = ast::Adt::cast(speculative_args.clone()).unwrap(); - expander.expand(db, actual_macro_call, &adt, &spec_args_tmap) + expander.expand( + db, + actual_macro_call, + &adt, + &map_from_syntax_node( + speculative_args, + // we don't leak these spans into any query so its fine to make them absolute + SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, + TextSize::new(0), + ), + ) } MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt), MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into), @@ -271,12 +218,14 @@ pub fn expand_speculative( }; let expand_to = macro_expand_to(db, actual_macro_call); - fixup::reverse_fixups(&mut speculative_expansion.value, &spec_args_tmap, &fixups.undo_info); - let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to); + let (node, rev_tmap) = token_tree_to_syntax_node(db, &speculative_expansion.value, expand_to); let syntax_node = node.syntax_node(); let token = rev_tmap - .ranges_by_token(token_id, token_to_map.kind()) + .ranges_with_span(tt::SpanData { + range: token_to_map.text_range(), + anchor: SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, + }) .filter_map(|range| syntax_node.covering_element(range).into_token()) .min_by_key(|t| { // prefer tokens of the same kind and text @@ -293,7 +242,7 @@ fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc { fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode { match file_id.repr() { - HirFileIdRepr::FileId(file_id) => db.parse(file_id).tree().syntax().clone(), + HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(), HirFileIdRepr::MacroFile(macro_file) => { db.parse_macro_expansion(macro_file).value.0.syntax_node() } @@ -315,7 +264,7 @@ fn parse_or_expand_with_err( fn parse_macro_expansion( db: &dyn ExpandDatabase, macro_file: MacroFile, -) -> ExpandResult<(Parse, Arc)> { +) -> ExpandResult<(Parse, Arc)> { let _p = profile::span("parse_macro_expansion"); let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id); @@ -324,7 +273,7 @@ fn parse_macro_expansion( tracing::debug!("expanded = {}", tt.as_debug_string()); tracing::debug!("kind = {:?}", expand_to); - let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to); + let (parse, rev_token_map) = token_tree_to_syntax_node(db, &tt, expand_to); ExpandResult { value: (parse, Arc::new(rev_token_map)), err } } @@ -340,48 +289,119 @@ fn parse_macro_expansion_error( fn macro_arg( db: &dyn ExpandDatabase, id: MacroCallId, -) -> ValueResult< - Option>, - Arc>, -> { - let loc = db.lookup_intern_macro_call(id); - - if let Some(EagerCallInfo { arg, arg_id: _, error: _ }) = loc.eager.as_deref() { - return ValueResult::ok(Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default())))); - } - - let ValueResult { value, err } = db.macro_arg_node(id); - let Some(arg) = value else { - return ValueResult { value: None, err }; +) -> ValueResult>, Arc>> { + let mismatched_delimiters = |arg: &SyntaxNode| { + let first = arg.first_child_or_token().map_or(T![.], |it| it.kind()); + let last = arg.last_child_or_token().map_or(T![.], |it| it.kind()); + let well_formed_tt = + matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}'])); + if !well_formed_tt { + // Don't expand malformed (unbalanced) macro invocations. This is + // less than ideal, but trying to expand unbalanced macro calls + // sometimes produces pathological, deeply nested code which breaks + // all kinds of things. + // + // Some day, we'll have explicit recursion counters for all + // recursive things, at which point this code might be removed. + cov_mark::hit!(issue9358_bad_macro_stack_overflow); + Some(Arc::new(Box::new([SyntaxError::new( + "unbalanced token tree".to_owned(), + arg.text_range(), + )]) as Box<[_]>)) + } else { + None + } }; + let loc = db.lookup_intern_macro_call(id); + if let Some(EagerCallInfo { arg, .. }) = matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) + .then(|| loc.eager.as_deref()) + .flatten() + { + ValueResult::ok(Some(Arc::new(arg.0.clone()))) + } else { + let (parse, map) = match loc.kind.file_id().repr() { + HirFileIdRepr::FileId(file_id) => { + (db.parse(file_id).to_syntax(), Arc::new(Default::default())) + } + HirFileIdRepr::MacroFile(macro_file) => { + let (parse, map) = db.parse_macro_expansion(macro_file).value; + (parse, map) + } + }; + let root = parse.syntax_node(); - let node = SyntaxNode::new_root(arg); - let censor = censor_for_macro_input(&loc, &node); - let mut fixups = fixup::fixup_syntax(&node); - fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new()))); - let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications( - &node, - fixups.token_map, - fixups.next_id, - fixups.replace, - fixups.append, - ); + let (syntax, offset, ast_id) = match loc.kind { + MacroCallKind::FnLike { ast_id, .. } => { + let node = &ast_id.to_ptr(db).to_node(&root); + let offset = node.syntax().text_range().start(); + match node.token_tree().map(|it| it.syntax().clone()) { + Some(tt) => { + if let Some(e) = mismatched_delimiters(&tt) { + return ValueResult::only_err(e); + } + (tt, offset, ast_id.value.erase()) + } + None => { + return ValueResult::only_err(Arc::new(Box::new([ + SyntaxError::new_at_offset("missing token tree".to_owned(), offset), + ]))); + } + } + } + MacroCallKind::Derive { ast_id, .. } => { + let syntax_node = ast_id.to_ptr(db).to_node(&root).syntax().clone(); + let offset = syntax_node.text_range().start(); + (syntax_node, offset, ast_id.value.erase()) + } + MacroCallKind::Attr { ast_id, .. } => { + let syntax_node = ast_id.to_ptr(db).to_node(&root).syntax().clone(); + let offset = syntax_node.text_range().start(); + (syntax_node, offset, ast_id.value.erase()) + } + }; + let censor = censor_for_macro_input(&loc, &syntax); + // let mut fixups = fixup::fixup_syntax(&node); + // fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new()))); + // let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications( + // &node, + // fixups.token_map, + // fixups.next_id, + // fixups.replace, + // fixups.append, + // ); + let mut tt = mbe::syntax_node_to_token_tree_censored( + &syntax, + SpanAnchor { file_id: loc.kind.file_id(), ast_id }, + offset, + &map, + censor, + ); - if loc.def.is_proc_macro() { - // proc macros expect their inputs without parentheses, MBEs expect it with them included - tt.delimiter = tt::Delimiter::unspecified(); - } - let val = Some(Arc::new((tt, tmap, fixups.undo_info))); - match err { - Some(err) => ValueResult::new(val, err), - None => ValueResult::ok(val), + if loc.def.is_proc_macro() { + // proc macros expect their inputs without parentheses, MBEs expect it with them included + tt.delimiter = tt::Delimiter::UNSPECIFIED; + } + + if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) { + match parse.errors() { + [] => ValueResult::ok(Some(Arc::new(tt))), + errors => ValueResult::new( + Some(Arc::new(tt)), + // Box::<[_]>::from(res.errors()), not stable yet + Arc::new(errors.to_vec().into_boxed_slice()), + ), + } + } else { + ValueResult::ok(Some(Arc::new(tt))) + } } } +// FIXME: Censoring info should be calculated by the caller! Namely by name resolution /// Certain macro calls expect some nodes in the input to be preprocessed away, namely: /// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped /// - attributes expect the invoking attribute to be stripped -fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet { +fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> Vec { // FIXME: handle `cfg_attr` (|| { let censor = match loc.kind { @@ -417,103 +437,56 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet ValueResult, Arc>> { - let err = || -> Arc> { - Arc::new(Box::new([SyntaxError::new_at_offset( - "invalid macro call".to_owned(), - syntax::TextSize::from(0), - )])) - }; - let loc = db.lookup_intern_macro_call(id); - let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind { - let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() { - Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::MacroEagerInput).0) - } else { - loc.kind - .arg(db) - .and_then(|arg| ast::TokenTree::cast(arg.value)) - .map(|tt| tt.reparse_as_comma_separated_expr().to_syntax()) - }; - match res { - Some(res) if res.errors().is_empty() => res.syntax_node(), - Some(res) => { - return ValueResult::new( - Some(res.syntax_node().green().into()), - // Box::<[_]>::from(res.errors()), not stable yet - Arc::new(res.errors().to_vec().into_boxed_slice()), - ); - } - None => return ValueResult::only_err(err()), - } - } else { - match loc.kind.arg(db) { - Some(res) => res.value, - None => return ValueResult::only_err(err()), - } - }; - if matches!(loc.kind, MacroCallKind::FnLike { .. }) { - let first = arg.first_child_or_token().map_or(T![.], |it| it.kind()); - let last = arg.last_child_or_token().map_or(T![.], |it| it.kind()); - let well_formed_tt = - matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}'])); - if !well_formed_tt { - // Don't expand malformed (unbalanced) macro invocations. This is - // less than ideal, but trying to expand unbalanced macro calls - // sometimes produces pathological, deeply nested code which breaks - // all kinds of things. - // - // Some day, we'll have explicit recursion counters for all - // recursive things, at which point this code might be removed. - cov_mark::hit!(issue9358_bad_macro_stack_overflow); - return ValueResult::only_err(Arc::new(Box::new([SyntaxError::new( - "unbalanced token tree".to_owned(), - arg.text_range(), - )]))); - } - } - ValueResult::ok(Some(arg.green().into())) -} - fn decl_macro_expander( db: &dyn ExpandDatabase, def_crate: CrateId, id: AstId, ) -> Arc { let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021; - let (mac, def_site_token_map) = match id.to_node(db) { + let (root, map) = match id.file_id.repr() { + HirFileIdRepr::FileId(file_id) => { + (db.parse(file_id).syntax_node(), Arc::new(Default::default())) + } + HirFileIdRepr::MacroFile(macro_file) => { + let (parse, map) = db.parse_macro_expansion(macro_file).value; + (parse.syntax_node(), map) + } + }; + let mac = match id.to_ptr(db).to_node(&root) { ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() { Some(arg) => { - let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax()); + let tt = mbe::syntax_node_to_token_tree( + arg.syntax(), + SpanAnchor { file_id: id.file_id, ast_id: id.value.erase() }, + macro_rules.syntax().text_range().start(), + &map, + ); let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021); - (mac, def_site_token_map) + mac } - None => ( - mbe::DeclarativeMacro::from_err( - mbe::ParseError::Expected("expected a token tree".into()), - is_2021, - ), - Default::default(), + None => mbe::DeclarativeMacro::from_err( + mbe::ParseError::Expected("expected a token tree".into()), + is_2021, ), }, ast::Macro::MacroDef(macro_def) => match macro_def.body() { Some(arg) => { - let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax()); + let tt = mbe::syntax_node_to_token_tree( + arg.syntax(), + SpanAnchor { file_id: id.file_id, ast_id: id.value.erase() }, + macro_def.syntax().text_range().start(), + &map, + ); let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021); - (mac, def_site_token_map) + mac } - None => ( - mbe::DeclarativeMacro::from_err( - mbe::ParseError::Expected("expected a token tree".into()), - is_2021, - ), - Default::default(), + None => mbe::DeclarativeMacro::from_err( + mbe::ParseError::Expected("expected a token tree".into()), + is_2021, ), }, }; - Arc::new(DeclarativeMacroExpander { mac, def_site_token_map }) + Arc::new(DeclarativeMacroExpander { mac }) } fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander { @@ -536,25 +509,37 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult return db.expand_proc_macro(id), MacroDefKind::BuiltInDerive(expander, ..) => { - let arg = db.macro_arg_node(id).value.unwrap(); + // FIXME: add firewall query for this? + let hir_file_id = loc.kind.file_id(); + let (root, map) = match hir_file_id.repr() { + HirFileIdRepr::FileId(file_id) => (db.parse(file_id).syntax_node(), None), + HirFileIdRepr::MacroFile(macro_file) => { + let (parse, map) = db.parse_macro_expansion(macro_file).value; + (parse.syntax_node(), Some(map)) + } + }; + let MacroCallKind::Derive { ast_id, .. } = loc.kind else { unreachable!() }; + let node = ast_id.to_ptr(db).to_node(&root); - let node = SyntaxNode::new_root(arg); - let censor = censor_for_macro_input(&loc, &node); - let mut fixups = fixup::fixup_syntax(&node); - fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new()))); - let (tmap, _) = mbe::syntax_node_to_token_map_with_modifications( + // FIXME: we might need to remove the spans from the input to the derive macro here + let _censor = censor_for_macro_input(&loc, node.syntax()); + let _t; + expander.expand( + db, + id, &node, - fixups.token_map, - fixups.next_id, - fixups.replace, - fixups.append, - ); - - // this cast is a bit sus, can we avoid losing the typedness here? - let adt = ast::Adt::cast(node).unwrap(); - let mut res = expander.expand(db, id, &adt, &tmap); - fixup::reverse_fixups(&mut res.value, &tmap, &fixups.undo_info); - res + match &map { + Some(map) => map, + None => { + _t = map_from_syntax_node( + node.syntax(), + SpanAnchor { file_id: hir_file_id, ast_id: ast_id.value.erase() }, + node.syntax().text_range().start(), + ); + &_t + } + }, + ) } _ => { let ValueResult { value, err } = db.macro_arg(id); @@ -570,8 +555,8 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult { db.decl_macro_expander(loc.def.krate, id).expand(arg.clone()) } @@ -583,11 +568,8 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult { - let mut arg = arg.clone(); - fixup::reverse_fixups(&mut arg, arg_tm, undo_info); - return ExpandResult { - value: Arc::new(arg), + value: Arc::new(arg.clone()), err: err.map(|err| { let mut buf = String::new(); for err in &**err { @@ -603,9 +585,7 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult it.expand(db, id, &arg).map_err(Into::into), MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg), _ => unreachable!(), - }; - fixup::reverse_fixups(&mut res.value, arg_tm, undo_info); - res + } } }; @@ -626,6 +606,7 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult ExpandResult> { + // FIXME: Syntax fix ups let loc = db.lookup_intern_macro_call(id); let Some(macro_arg) = db.macro_arg(id).value else { return ExpandResult { @@ -639,32 +620,24 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult expander, _ => unreachable!(), }; let attr_arg = match &loc.kind { - MacroCallKind::Attr { attr_args, .. } => { - let mut attr_args = attr_args.0.clone(); - mbe::Shift::new(arg_tt).shift_all(&mut attr_args); - Some(attr_args) - } + MacroCallKind::Attr { attr_args, .. } => Some(&**attr_args), _ => None, }; - let ExpandResult { value: mut tt, err } = - expander.expand(db, loc.def.krate, loc.krate, arg_tt, attr_arg.as_ref()); + let ExpandResult { value: tt, err } = + expander.expand(db, loc.def.krate, loc.krate, ¯o_arg, attr_arg); // Set a hard limit for the expanded tt if let Err(value) = check_tt_count(&tt) { return value; } - fixup::reverse_fixups(&mut tt, arg_tm, undo_info); - ExpandResult { value: Arc::new(tt), err } } @@ -677,9 +650,10 @@ fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo { } fn token_tree_to_syntax_node( + db: &dyn ExpandDatabase, tt: &tt::Subtree, expand_to: ExpandTo, -) -> (Parse, mbe::TokenMap) { +) -> (Parse, SpanMap) { let entry_point = match expand_to { ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts, ExpandTo::Items => mbe::TopEntryPoint::MacroItems, @@ -687,7 +661,18 @@ fn token_tree_to_syntax_node( ExpandTo::Type => mbe::TopEntryPoint::Type, ExpandTo::Expr => mbe::TopEntryPoint::Expr, }; - mbe::token_tree_to_syntax_node(tt, entry_point) + let mut tm = mbe::token_tree_to_syntax_node(tt, entry_point); + // now what the hell is going on here + tm.1.span_map.sort_by(|(_, a), (_, b)| { + a.anchor.file_id.cmp(&b.anchor.file_id).then_with(|| { + let map = db.ast_id_map(a.anchor.file_id); + map.get_raw(a.anchor.ast_id) + .text_range() + .start() + .cmp(&map.get_raw(b.anchor.ast_id).text_range().start()) + }) + }); + tm } fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult>> { diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs index 4110f28475..ae5f26b5d3 100644 --- a/crates/hir-expand/src/eager.rs +++ b/crates/hir-expand/src/eager.rs @@ -18,8 +18,11 @@ //! //! //! See the full discussion : -use base_db::CrateId; -use rustc_hash::{FxHashMap, FxHashSet}; +use base_db::{ + span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID}, + CrateId, +}; +use rustc_hash::FxHashMap; use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent}; use triomphe::Arc; @@ -29,7 +32,7 @@ use crate::{ hygiene::Hygiene, mod_path::ModPath, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind, - MacroCallLoc, MacroDefId, MacroDefKind, + MacroCallLoc, MacroDefId, MacroDefKind, SpanMap, }; pub fn expand_eager_macro_input( @@ -54,15 +57,15 @@ pub fn expand_eager_macro_input( eager: None, kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr }, }); - let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } = + let ExpandResult { value: (arg_exp, _arg_exp_map), err: parse_err } = db.parse_macro_expansion(arg_id.as_macro_file()); // we need this map here as the expansion of the eager input fake file loses whitespace ... - let mut ws_mapping = FxHashMap::default(); - if let Some((_, tm, _)) = db.macro_arg(arg_id).value.as_deref() { - ws_mapping.extend(tm.entries().filter_map(|(id, range)| { - Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range)) - })); - } + // let mut ws_mapping = FxHashMap::default(); + // if let Some((tm)) = db.macro_arg(arg_id).value.as_deref() { + // ws_mapping.extend(tm.entries().filter_map(|(id, range)| { + // Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range)) + // })); + // } let ExpandResult { value: expanded_eager_input, err } = { eager_macro_recur( @@ -75,49 +78,55 @@ pub fn expand_eager_macro_input( }; let err = parse_err.or(err); - let Some((expanded_eager_input, mapping)) = expanded_eager_input else { + let Some((expanded_eager_input, _mapping)) = expanded_eager_input else { return ExpandResult { value: None, err }; }; - let (mut subtree, expanded_eager_input_token_map) = - mbe::syntax_node_to_token_tree(&expanded_eager_input); + let mut subtree = mbe::syntax_node_to_token_tree( + &expanded_eager_input, + // is this right? + SpanAnchor { file_id: arg_id.as_file(), ast_id: ROOT_ERASED_FILE_AST_ID }, + TextSize::new(0), + // FIXME: Spans! `eager_macro_recur` needs to fill out a span map for us + &Default::default(), + ); - let og_tmap = if let Some(tt) = macro_call.value.token_tree() { - let mut ids_used = FxHashSet::default(); - let mut og_tmap = mbe::syntax_node_to_token_map(tt.syntax()); - // The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside - // so we need to remap them to the original input of the eager macro. - subtree.visit_ids(&mut |id| { - // Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix + // let og_tmap = if let Some(tt) = macro_call.value.token_tree() { + // let mut ids_used = FxHashSet::default(); + // let mut og_tmap = mbe::syntax_node_to_token_map(tt.syntax()); + // // The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside + // // so we need to remap them to the original input of the eager macro. + // subtree.visit_ids(&mut |id| { + // // Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix - if let Some(range) = expanded_eager_input_token_map - .first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE) - { - // remap from expanded eager input to eager input expansion - if let Some(og_range) = mapping.get(&range) { - // remap from eager input expansion to original eager input - if let Some(&og_range) = ws_mapping.get(og_range) { - if let Some(og_token) = og_tmap.token_by_range(og_range) { - ids_used.insert(og_token); - return og_token; - } - } - } - } - tt::TokenId::UNSPECIFIED - }); - og_tmap.filter(|id| ids_used.contains(&id)); - og_tmap - } else { - Default::default() - }; - subtree.delimiter = crate::tt::Delimiter::unspecified(); + // if let Some(range) = expanded_eager_input_token_map + // .first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE) + // { + // // remap from expanded eager input to eager input expansion + // if let Some(og_range) = mapping.get(&range) { + // // remap from eager input expansion to original eager input + // if let Some(&og_range) = ws_mapping.get(og_range) { + // if let Some(og_token) = og_tmap.token_by_range(og_range) { + // ids_used.insert(og_token); + // return og_token; + // } + // } + // } + // } + // tt::TokenId::UNSPECIFIED + // }); + // og_tmap.filter(|id| ids_used.contains(&id)); + // og_tmap + // } else { + // Default::default() + // }; + subtree.delimiter = crate::tt::Delimiter::UNSPECIFIED; let loc = MacroCallLoc { def, krate, eager: Some(Box::new(EagerCallInfo { - arg: Arc::new((subtree, og_tmap)), + arg: Arc::new((subtree,)), arg_id, error: err.clone(), })), @@ -132,7 +141,7 @@ fn lazy_expand( def: &MacroDefId, macro_call: InFile, krate: CrateId, -) -> ExpandResult<(InFile>, Arc)> { +) -> ExpandResult<(InFile>, Arc)> { let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value); let expand_to = ExpandTo::from_call_site(¯o_call.value); @@ -214,19 +223,19 @@ fn eager_macro_recur( let ExpandResult { value, err: err2 } = db.parse_macro_expansion(call_id.as_macro_file()); - if let Some(tt) = call.token_tree() { - let call_tt_start = tt.syntax().text_range().start(); - let call_start = - apply_offset(call.syntax().text_range().start(), offset); - if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() { - mapping.extend(arg_map.entries().filter_map(|(tid, range)| { - value - .1 - .first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE) - .map(|r| (r + call_start, range + call_tt_start)) - })); - } - } + // if let Some(tt) = call.token_tree() { + // let call_tt_start = tt.syntax().text_range().start(); + // let call_start = + // apply_offset(call.syntax().text_range().start(), offset); + // if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() { + // mapping.extend(arg_map.entries().filter_map(|(tid, range)| { + // value + // .1 + // .first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE) + // .map(|r| (r + call_start, range + call_tt_start)) + // })); + // } + // } ExpandResult { value: Some(value.0.syntax_node().clone_for_update()), @@ -241,13 +250,8 @@ fn eager_macro_recur( | MacroDefKind::BuiltInAttr(..) | MacroDefKind::BuiltInDerive(..) | MacroDefKind::ProcMacro(..) => { - let ExpandResult { value: (parse, tm), err } = + let ExpandResult { value: (parse, _tm), err } = lazy_expand(db, &def, curr.with_value(call.clone()), krate); - let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind { - Some(db.decl_macro_expander(def.krate, ast_id)) - } else { - None - }; // replace macro inside let hygiene = Hygiene::new(db, parse.file_id); @@ -261,24 +265,29 @@ fn eager_macro_recur( ); let err = err.or(error); - if let Some(tt) = call.token_tree() { - let call_tt_start = tt.syntax().text_range().start(); - let call_start = apply_offset(call.syntax().text_range().start(), offset); - if let Some((_tt, arg_map, _)) = parse - .file_id - .macro_file() - .and_then(|id| db.macro_arg(id.macro_call_id).value) - .as_deref() - { - mapping.extend(arg_map.entries().filter_map(|(tid, range)| { - tm.first_range_by_token( - decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid), - syntax::SyntaxKind::TOMBSTONE, - ) - .map(|r| (r + call_start, range + call_tt_start)) - })); - } - } + // if let Some(tt) = call.token_tree() { + // let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind { + // Some(db.decl_macro_expander(def.krate, ast_id)) + // } else { + // None + // }; + // let call_tt_start = tt.syntax().text_range().start(); + // let call_start = apply_offset(call.syntax().text_range().start(), offset); + // if let Some((_tt, arg_map, _)) = parse + // .file_id + // .macro_file() + // .and_then(|id| db.macro_arg(id.macro_call_id).value) + // .as_deref() + // { + // mapping.extend(arg_map.entries().filter_map(|(tid, range)| { + // tm.first_range_by_token( + // decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid), + // syntax::SyntaxKind::TOMBSTONE, + // ) + // .map(|r| (r + call_start, range + call_tt_start)) + // })); + // } + // } // FIXME: Do we need to re-use _m here? ExpandResult { value: value.map(|(n, _m)| n), err } } diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs index ca65db1136..ce421d3dcd 100644 --- a/crates/hir-expand/src/hygiene.rs +++ b/crates/hir-expand/src/hygiene.rs @@ -5,18 +5,16 @@ use base_db::CrateId; use db::TokenExpander; use either::Either; -use mbe::Origin; use syntax::{ ast::{self, HasDocComments}, - AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize, + AstNode, SyntaxNode, TextRange, TextSize, }; use triomphe::Arc; use crate::{ db::{self, ExpandDatabase}, - fixup, name::{AsName, Name}, - HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile, + HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile, SpanMap, }; #[derive(Clone, Debug)] @@ -50,23 +48,25 @@ impl Hygiene { Either::Left(name_ref.as_name()) } - pub fn local_inner_macros(&self, db: &dyn ExpandDatabase, path: ast::Path) -> Option { - let mut token = path.syntax().first_token()?.text_range(); + pub fn local_inner_macros(&self, _db: &dyn ExpandDatabase, path: ast::Path) -> Option { + let mut _token = path.syntax().first_token()?.text_range(); let frames = self.frames.as_ref()?; - let mut current = &frames.0; + let mut _current = &frames.0; - loop { - let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?; - if origin == Origin::Def { - return if current.local_inner { - frames.root_crate(db, path.syntax()) - } else { - None - }; - } - current = current.call_site.as_ref()?; - token = mapped.value; - } + // FIXME: Hygiene ... + return None; + // loop { + // let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?; + // if origin == Origin::Def { + // return if current.local_inner { + // frames.root_crate(db, path.syntax()) + // } else { + // None + // }; + // } + // current = current.call_site.as_ref()?; + // token = mapped.value; + // } } } @@ -92,31 +92,33 @@ impl HygieneFrames { HygieneFrames(Arc::new(HygieneFrame::new(db, file_id))) } - fn root_crate(&self, db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option { - let mut token = node.first_token()?.text_range(); - let mut result = self.0.krate; - let mut current = self.0.clone(); + fn root_crate(&self, _db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option { + let mut _token = node.first_token()?.text_range(); + let mut _result = self.0.krate; + let mut _current = self.0.clone(); - while let Some((mapped, origin)) = - current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token)) - { - result = current.krate; + return None; - let site = match origin { - Origin::Def => ¤t.def_site, - Origin::Call => ¤t.call_site, - }; + // while let Some((mapped, origin)) = + // current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token)) + // { + // result = current.krate; - let site = match site { - None => break, - Some(it) => it, - }; + // let site = match origin { + // Origin::Def => ¤t.def_site, + // Origin::Call => ¤t.call_site, + // }; - current = site.clone(); - token = mapped.value; - } + // let site = match site { + // None => break, + // Some(it) => it, + // }; - result + // current = site.clone(); + // token = mapped.value; + // } + + // result } } @@ -127,45 +129,18 @@ struct HygieneInfo { attr_input_or_mac_def_start: Option>, macro_def: TokenExpander, - macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>, - macro_arg_shift: mbe::Shift, - exp_map: Arc, + macro_arg: Arc, + exp_map: Arc, } impl HygieneInfo { - fn map_ident_up( + fn _map_ident_up( &self, - db: &dyn ExpandDatabase, - token: TextRange, - ) -> Option<(InFile, Origin)> { - let token_id = self.exp_map.token_by_range(token)?; - let (mut token_id, origin) = self.macro_def.map_id_up(token_id); - - let loc = db.lookup_intern_macro_call(self.file.macro_call_id); - - let (token_map, tt) = match &loc.kind { - MacroCallKind::Attr { attr_args, .. } => match self.macro_arg_shift.unshift(token_id) { - Some(unshifted) => { - token_id = unshifted; - (&attr_args.1, self.attr_input_or_mac_def_start?) - } - None => (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())), - }, - _ => match origin { - mbe::Origin::Call => { - (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())) - } - mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def_start) { - (TokenExpander::DeclarativeMacro(expander), Some(tt)) => { - (&expander.def_site_token_map, *tt) - } - _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"), - }, - }, - }; - - let range = token_map.first_range_by_token(token_id, SyntaxKind::IDENT)?; - Some((tt.with_value(range + tt.value), origin)) + _db: &dyn ExpandDatabase, + _token: TextRange, + ) -> Option> { + // self.exp_map.token_by_range(token).map(|span| InFile::new(span.anchor, span.range)) + None } } @@ -197,18 +172,13 @@ fn make_hygiene_info( let macro_def = db.macro_expander(loc.def); let (_, exp_map) = db.parse_macro_expansion(macro_file).value; let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { - Arc::new(( - tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() }, - Default::default(), - Default::default(), - )) + Arc::new(tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() }) }); HygieneInfo { file: macro_file, attr_input_or_mac_def_start: attr_input_or_mac_def .map(|it| it.map(|tt| tt.syntax().text_range().start())), - macro_arg_shift: mbe::Shift::new(¯o_arg.0), macro_arg, macro_def, exp_map, diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 4be55126b8..bd5796e000 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -18,21 +18,13 @@ pub mod quote; pub mod eager; pub mod mod_path; pub mod attrs; -mod fixup; +// mod fixup; -use mbe::TokenMap; -pub use mbe::{Origin, ValueResult}; - -use ::tt::token_id as tt; use triomphe::Arc; use std::{fmt, hash::Hash, iter}; -use base_db::{ - impl_intern_key, - salsa::{self, InternId}, - CrateId, FileId, FileRange, ProcMacroKind, -}; +use base_db::{span::HirFileIdRepr, CrateId, FileId, FileRange, ProcMacroKind}; use either::Either; use syntax::{ algo::{self, skip_trivia_token}, @@ -51,6 +43,25 @@ use crate::{ proc_macro::ProcMacroExpander, }; +pub use base_db::span::{HirFileId, MacroCallId, MacroFile}; +pub use mbe::ValueResult; + +pub type SpanMap = ::mbe::TokenMap; +pub type DeclarativeMacro = ::mbe::DeclarativeMacro; + +pub mod tt { + pub use base_db::span::SpanData; + pub use tt::{DelimiterKind, Spacing, Span}; + + pub type Delimiter = ::tt::Delimiter; + pub type Subtree = ::tt::Subtree; + pub type Leaf = ::tt::Leaf; + pub type Literal = ::tt::Literal; + pub type Punct = ::tt::Punct; + pub type Ident = ::tt::Ident; + pub type TokenTree = ::tt::TokenTree; +} + pub type ExpandResult = ValueResult; #[derive(Debug, PartialEq, Eq, Clone, Hash)] @@ -86,42 +97,43 @@ impl fmt::Display for ExpandError { } } -/// Input to the analyzer is a set of files, where each file is identified by -/// `FileId` and contains source code. However, another source of source code in -/// Rust are macros: each macro can be thought of as producing a "temporary -/// file". To assign an id to such a file, we use the id of the macro call that -/// produced the file. So, a `HirFileId` is either a `FileId` (source code -/// written by user), or a `MacroCallId` (source code produced by macro). -/// -/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file -/// containing the call plus the offset of the macro call in the file. Note that -/// this is a recursive definition! However, the size_of of `HirFileId` is -/// finite (because everything bottoms out at the real `FileId`) and small -/// (`MacroCallId` uses the location interning. You can check details here: -/// ). -/// -/// The two variants are encoded in a single u32 which are differentiated by the MSB. -/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a -/// `MacroCallId`. -#[derive(Clone, Copy, PartialEq, Eq, Hash)] -pub struct HirFileId(u32); - -impl fmt::Debug for HirFileId { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.repr().fmt(f) - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct MacroFile { - pub macro_call_id: MacroCallId, -} - /// `MacroCallId` identifies a particular macro invocation, like /// `println!("Hello, {}", world)`. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct MacroCallId(salsa::InternId); -impl_intern_key!(MacroCallId); +pub struct SyntaxContextId(base_db::salsa::InternId); +base_db::impl_intern_key!(SyntaxContextId); + +#[derive(Debug, Clone, Hash, PartialEq, Eq)] +pub struct SyntaxContext { + outer_expn: HirFileId, + outer_transparency: Transparency, + parent: SyntaxContextId, + /// This context, but with all transparent and semi-transparent expansions filtered away. + opaque: SyntaxContextId, + /// This context, but with all transparent expansions filtered away. + opaque_and_semitransparent: SyntaxContextId, + /// Name of the crate to which `$crate` with this context would resolve. + dollar_crate_name: name::Name, +} + +/// A property of a macro expansion that determines how identifiers +/// produced by that expansion are resolved. +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug)] +pub enum Transparency { + /// Identifier produced by a transparent expansion is always resolved at call-site. + /// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this. + Transparent, + /// Identifier produced by a semi-transparent expansion may be resolved + /// either at call-site or at definition-site. + /// If it's a local variable, label or `$crate` then it's resolved at def-site. + /// Otherwise it's resolved at call-site. + /// `macro_rules` macros behave like this, built-in macros currently behave like this too, + /// but that's an implementation detail. + SemiTransparent, + /// Identifier produced by an opaque expansion is always resolved at definition-site. + /// Def-site spans in procedural macros, identifiers from `macro` by default use this. + Opaque, +} #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct MacroCallLoc { @@ -154,7 +166,7 @@ pub enum MacroDefKind { #[derive(Debug, Clone, PartialEq, Eq, Hash)] struct EagerCallInfo { /// The expanded argument of the eager macro. - arg: Arc<(tt::Subtree, TokenMap)>, + arg: Arc<(tt::Subtree,)>, /// Call id of the eager macro's input file (this is the macro file for its fully expanded input). arg_id: MacroCallId, error: Option, @@ -178,7 +190,7 @@ pub enum MacroCallKind { }, Attr { ast_id: AstId, - attr_args: Arc<(tt::Subtree, mbe::TokenMap)>, + attr_args: Arc, /// Syntactical index of the invoking `#[attribute]`. /// /// Outer attributes are counted first, then inner attributes. This does not support @@ -187,34 +199,40 @@ pub enum MacroCallKind { }, } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -enum HirFileIdRepr { - FileId(FileId), - MacroFile(MacroFile), -} - -impl From for HirFileId { - fn from(FileId(id): FileId) -> Self { - assert!(id < Self::MAX_FILE_ID); - HirFileId(id) - } -} - -impl From for HirFileId { - fn from(MacroFile { macro_call_id: MacroCallId(id) }: MacroFile) -> Self { - let id = id.as_u32(); - assert!(id < Self::MAX_FILE_ID); - HirFileId(id | Self::MACRO_FILE_TAG_MASK) - } -} - -impl HirFileId { - const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK; - const MACRO_FILE_TAG_MASK: u32 = 1 << 31; - +pub trait HirFileIdExt { /// For macro-expansion files, returns the file original source file the /// expansion originated from. - pub fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId { + fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId; + fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32; + + /// If this is a macro call, returns the syntax node of the call. + fn call_node(self, db: &dyn db::ExpandDatabase) -> Option>; + + /// If this is a macro call, returns the syntax node of the very first macro call this file resides in. + fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<(FileId, SyntaxNode)>; + + /// Return expansion information if it is a macro-expansion file + fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option; + + fn as_builtin_derive_attr_node(&self, db: &dyn db::ExpandDatabase) + -> Option>; + fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool; + fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool; + + /// Return whether this file is an include macro + fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool; + + fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool; + /// Return whether this file is an attr macro + fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool; + + /// Return whether this file is the pseudo expansion of the derive attribute. + /// See [`crate::builtin_attr_macro::derive_attr_expand`]. + fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool; +} + +impl HirFileIdExt for HirFileId { + fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId { let mut file_id = self; loop { match file_id.repr() { @@ -231,7 +249,7 @@ impl HirFileId { } } - pub fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 { + fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 { let mut level = 0; let mut curr = self; while let Some(macro_file) = curr.macro_file() { @@ -243,15 +261,13 @@ impl HirFileId { level } - /// If this is a macro call, returns the syntax node of the call. - pub fn call_node(self, db: &dyn db::ExpandDatabase) -> Option> { + fn call_node(self, db: &dyn db::ExpandDatabase) -> Option> { let macro_file = self.macro_file()?; let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); Some(loc.to_node(db)) } - /// If this is a macro call, returns the syntax node of the very first macro call this file resides in. - pub fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<(FileId, SyntaxNode)> { + fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<(FileId, SyntaxNode)> { let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db); loop { match call.file_id.repr() { @@ -264,12 +280,12 @@ impl HirFileId { } /// Return expansion information if it is a macro-expansion file - pub fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option { + fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option { let macro_file = self.macro_file()?; ExpansionInfo::new(db, macro_file) } - pub fn as_builtin_derive_attr_node( + fn as_builtin_derive_attr_node( &self, db: &dyn db::ExpandDatabase, ) -> Option> { @@ -282,7 +298,7 @@ impl HirFileId { Some(attr.with_value(ast::Attr::cast(attr.value.clone())?)) } - pub fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool { match self.macro_file() { Some(macro_file) => { matches!( @@ -294,7 +310,7 @@ impl HirFileId { } } - pub fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool { match self.macro_file() { Some(macro_file) => { matches!( @@ -306,8 +322,7 @@ impl HirFileId { } } - /// Return whether this file is an include macro - pub fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool { match self.macro_file() { Some(macro_file) => { db.lookup_intern_macro_call(macro_file.macro_call_id).def.is_include() @@ -316,7 +331,7 @@ impl HirFileId { } } - pub fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool { match self.macro_file() { Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); @@ -326,8 +341,7 @@ impl HirFileId { } } - /// Return whether this file is an attr macro - pub fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool { match self.macro_file() { Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); @@ -337,9 +351,7 @@ impl HirFileId { } } - /// Return whether this file is the pseudo expansion of the derive attribute. - /// See [`crate::builtin_attr_macro::derive_attr_expand`]. - pub fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool { match self.macro_file() { Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); @@ -348,38 +360,6 @@ impl HirFileId { None => false, } } - - #[inline] - pub fn is_macro(self) -> bool { - self.0 & Self::MACRO_FILE_TAG_MASK != 0 - } - - #[inline] - pub fn macro_file(self) -> Option { - match self.0 & Self::MACRO_FILE_TAG_MASK { - 0 => None, - _ => Some(MacroFile { - macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)), - }), - } - } - - #[inline] - pub fn file_id(self) -> Option { - match self.0 & Self::MACRO_FILE_TAG_MASK { - 0 => Some(FileId(self.0)), - _ => None, - } - } - - fn repr(self) -> HirFileIdRepr { - match self.0 & Self::MACRO_FILE_TAG_MASK { - 0 => HirFileIdRepr::FileId(FileId(self.0)), - _ => HirFileIdRepr::MacroFile(MacroFile { - macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)), - }), - } - } } impl MacroDefId { @@ -587,16 +567,6 @@ impl MacroCallKind { } } -impl MacroCallId { - pub fn as_file(self) -> HirFileId { - MacroFile { macro_call_id: self }.into() - } - - pub fn as_macro_file(self) -> MacroFile { - MacroFile { macro_call_id: self } - } -} - /// ExpansionInfo mainly describes how to map text range between src and expanded macro #[derive(Debug, Clone, PartialEq, Eq)] pub struct ExpansionInfo { @@ -607,11 +577,8 @@ pub struct ExpansionInfo { attr_input_or_mac_def: Option>, macro_def: TokenExpander, - macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>, - /// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg - /// and as such we need to shift tokens if they are part of an attributes input instead of their item. - macro_arg_shift: mbe::Shift, - exp_map: Arc, + macro_arg: Arc, + exp_map: Arc, } impl ExpansionInfo { @@ -640,69 +607,33 @@ impl ExpansionInfo { pub fn map_token_down( &self, db: &dyn db::ExpandDatabase, - item: Option, token: InFile<&SyntaxToken>, // FIXME: use this for range mapping, so that we can resolve inline format args _relative_token_offset: Option, ) -> Option> + '_> { assert_eq!(token.file_id, self.arg.file_id); - let token_id_in_attr_input = if let Some(item) = item { - // check if we are mapping down in an attribute input - // this is a special case as attributes can have two inputs - let call_id = self.expanded.file_id.macro_call_id; - let loc = db.lookup_intern_macro_call(call_id); - - let token_range = token.value.text_range(); - match &loc.kind { - MacroCallKind::Attr { attr_args, invoc_attr_index, .. } => { - // FIXME: handle `cfg_attr` - let attr = item - .doc_comments_and_attrs() - .nth(invoc_attr_index.ast_index()) - .and_then(Either::left)?; - match attr.token_tree() { - Some(token_tree) - if token_tree.syntax().text_range().contains_range(token_range) => - { - let attr_input_start = - token_tree.left_delimiter_token()?.text_range().start(); - let relative_range = - token.value.text_range().checked_sub(attr_input_start)?; - // shift by the item's tree's max id - let token_id = attr_args.1.token_by_range(relative_range)?; - - let token_id = if loc.def.is_attribute_derive() { - // we do not shift for `#[derive]`, as we only need to downmap the derive attribute tokens - token_id - } else { - self.macro_arg_shift.shift(token_id) - }; - Some(token_id) - } - _ => None, - } - } - _ => None, - } + let span_map = &self.exp_map.span_map; + let (start, end) = if span_map + .first() + .map_or(false, |(_, span)| span.anchor.file_id == token.file_id) + { + (0, span_map.partition_point(|a| a.1.anchor.file_id == token.file_id)) } else { - None + let start = span_map.partition_point(|a| a.1.anchor.file_id != token.file_id); + ( + start, + start + span_map[start..].partition_point(|a| a.1.anchor.file_id == token.file_id), + ) }; - - let token_id = match token_id_in_attr_input { - Some(token_id) => token_id, - // the token is not inside `an attribute's input so do the lookup in the macro_arg as usual - None => { - let relative_range = - token.value.text_range().checked_sub(self.arg.value.text_range().start())?; - let token_id = self.macro_arg.1.token_by_range(relative_range)?; - // conditionally shift the id by a declarative macro definition - self.macro_def.map_id_down(token_id) - } - }; - - let tokens = self - .exp_map - .ranges_by_token(token_id, token.value.kind()) + let token_text_range = token.value.text_range(); + let ast_id_map = db.ast_id_map(token.file_id); + let tokens = span_map[start..end] + .iter() + .filter_map(move |(range, span)| { + let offset = ast_id_map.get_raw(span.anchor.ast_id).text_range().start(); + let abs_range = span.range + offset; + token_text_range.eq(&abs_range).then_some(*range) + }) .flat_map(move |range| self.expanded.value.covering_element(range).into_token()); Some(tokens.map(move |token| InFile::new(self.expanded.file_id.into(), token))) @@ -713,60 +644,18 @@ impl ExpansionInfo { &self, db: &dyn db::ExpandDatabase, token: InFile<&SyntaxToken>, - ) -> Option<(InFile, Origin)> { - assert_eq!(token.file_id, self.expanded.file_id.into()); - // Fetch the id through its text range, - let token_id = self.exp_map.token_by_range(token.value.text_range())?; - // conditionally unshifting the id to accommodate for macro-rules def site - let (mut token_id, origin) = self.macro_def.map_id_up(token_id); - - let call_id = self.expanded.file_id.macro_call_id; - let loc = db.lookup_intern_macro_call(call_id); - - // Special case: map tokens from `include!` expansions to the included file - if loc.def.is_include() { - if let Ok((tt_and_map, file_id)) = db.include_expand(call_id) { - let range = tt_and_map.1.first_range_by_token(token_id, token.value.kind())?; - let source = db.parse(file_id); - - let token = source.syntax_node().covering_element(range).into_token()?; - - return Some((InFile::new(file_id.into(), token), Origin::Call)); - } - } - - // Attributes are a bit special for us, they have two inputs, the input tokentree and the annotated item. - let (token_map, tt) = match &loc.kind { - MacroCallKind::Attr { attr_args, .. } => { - if loc.def.is_attribute_derive() { - (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned()) - } else { - // try unshifting the token id, if unshifting fails, the token resides in the non-item attribute input - // note that the `TokenExpander::map_id_up` earlier only unshifts for declarative macros, so we don't double unshift with this - match self.macro_arg_shift.unshift(token_id) { - Some(unshifted) => { - token_id = unshifted; - (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned()) - } - None => (&self.macro_arg.1, self.arg.clone()), - } - } - } - _ => match origin { - mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()), - mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def) { - (TokenExpander::DeclarativeMacro(expander), Some(tt)) => { - (&expander.def_site_token_map, tt.syntax().cloned()) - } - _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"), - }, - }, - }; - - let range = token_map.first_range_by_token(token_id, token.value.kind())?; - let token = - tt.value.covering_element(range + tt.value.text_range().start()).into_token()?; - Some((tt.with_value(token), origin)) + ) -> Option> { + self.exp_map.span_for_range(token.value.text_range()).and_then(|span| { + let anchor = + db.ast_id_map(span.anchor.file_id).get_raw(span.anchor.ast_id).text_range().start(); + InFile::new( + span.anchor.file_id, + db.parse_or_expand(span.anchor.file_id) + .covering_element(span.range + anchor) + .into_token(), + ) + .transpose() + }) } fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFile) -> Option { @@ -779,11 +668,7 @@ impl ExpansionInfo { let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() }; let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { - Arc::new(( - tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() }, - Default::default(), - Default::default(), - )) + Arc::new(tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() }) }); let def = loc.def.ast_id().left().and_then(|id| { @@ -814,7 +699,6 @@ impl ExpansionInfo { expanded, arg: arg_tt, attr_input_or_mac_def, - macro_arg_shift: mbe::Shift::new(¯o_arg.0), macro_arg, macro_def, exp_map, @@ -1018,7 +902,7 @@ impl InFile<&SyntaxNode> { impl InFile { pub fn upmap(self, db: &dyn db::ExpandDatabase) -> Option> { let expansion = self.file_id.expansion_info(db)?; - expansion.map_token_up(db, self.as_ref()).map(|(it, _)| it) + expansion.map_token_up(db, self.as_ref()) } /// Falls back to the macro call range if the node cannot be mapped up fully. @@ -1067,6 +951,7 @@ impl From> for InFile { } } +// FIXME: Get rid of this fn ascend_node_border_tokens( db: &dyn db::ExpandDatabase, InFile { file_id, value: node }: InFile<&SyntaxNode>, @@ -1090,13 +975,13 @@ fn ascend_call_token( token: InFile, ) -> Option> { let mut mapping = expansion.map_token_up(db, token.as_ref())?; - while let (mapped, Origin::Call) = mapping { - match mapped.file_id.expansion_info(db) { - Some(info) => mapping = info.map_token_up(db, mapped.as_ref())?, - None => return Some(mapped), + + loop { + match mapping.file_id.expansion_info(db) { + Some(info) => mapping = info.map_token_up(db, mapping.as_ref())?, + None => return Some(mapping), } } - None } impl InFile { diff --git a/crates/hir-expand/src/quote.rs b/crates/hir-expand/src/quote.rs index ab3809abc7..9dd4965c15 100644 --- a/crates/hir-expand/src/quote.rs +++ b/crates/hir-expand/src/quote.rs @@ -18,8 +18,8 @@ macro_rules! __quote { crate::tt::Subtree { delimiter: crate::tt::Delimiter { kind: crate::tt::DelimiterKind::$delim, - open: crate::tt::TokenId::unspecified(), - close: crate::tt::TokenId::unspecified(), + open: ::DUMMY, + close: ::DUMMY, }, token_trees: $crate::quote::IntoTt::to_tokens(children), } @@ -32,7 +32,7 @@ macro_rules! __quote { crate::tt::Leaf::Punct(crate::tt::Punct { char: $first, spacing: crate::tt::Spacing::Alone, - span: crate::tt::TokenId::unspecified(), + span: ::DUMMY, }).into() ] } @@ -44,12 +44,12 @@ macro_rules! __quote { crate::tt::Leaf::Punct(crate::tt::Punct { char: $first, spacing: crate::tt::Spacing::Joint, - span: crate::tt::TokenId::unspecified(), + span: ::DUMMY, }).into(), crate::tt::Leaf::Punct(crate::tt::Punct { char: $sec, spacing: crate::tt::Spacing::Alone, - span: crate::tt::TokenId::unspecified(), + span: ::DUMMY, }).into() ] } @@ -89,7 +89,7 @@ macro_rules! __quote { vec![ { crate::tt::Leaf::Ident(crate::tt::Ident { text: stringify!($tt).into(), - span: crate::tt::TokenId::unspecified(), + span: ::DUMMY, }).into() }] }; @@ -195,20 +195,22 @@ macro_rules! impl_to_to_tokentrees { } impl_to_to_tokentrees! { - u32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} }; - usize => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} }; - i32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} }; - bool => self { crate::tt::Ident{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} }; + u32 => self { crate::tt::Literal{text: self.to_string().into(), span: ::DUMMY} }; + usize => self { crate::tt::Literal{text: self.to_string().into(), span: ::DUMMY} }; + i32 => self { crate::tt::Literal{text: self.to_string().into(), span: ::DUMMY} }; + bool => self { crate::tt::Ident{text: self.to_string().into(), span: ::DUMMY} }; crate::tt::Leaf => self { self }; crate::tt::Literal => self { self }; crate::tt::Ident => self { self }; crate::tt::Punct => self { self }; - &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}}; - String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}} + &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: ::DUMMY}}; + String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: ::DUMMY}} } #[cfg(test)] mod tests { + use expect_test::expect; + #[test] fn test_quote_delimiters() { assert_eq!(quote!({}).to_string(), "{}"); @@ -231,7 +233,10 @@ mod tests { } fn mk_ident(name: &str) -> crate::tt::Ident { - crate::tt::Ident { text: name.into(), span: crate::tt::TokenId::unspecified() } + crate::tt::Ident { + text: name.into(), + span: ::DUMMY, + } } #[test] @@ -241,7 +246,9 @@ mod tests { let quoted = quote!(#a); assert_eq!(quoted.to_string(), "hello"); let t = format!("{quoted:?}"); - assert_eq!(t, "SUBTREE $$ 4294967295 4294967295\n IDENT hello 4294967295"); + expect![[r#" + SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor { file_id: FileId(0), ast_id: Idx::>(0) } } SpanData { range: 0..0, anchor: SpanAnchor { file_id: FileId(0), ast_id: Idx::>(0) } } + IDENT hello SpanData { range: 0..0, anchor: SpanAnchor { file_id: FileId(0), ast_id: Idx::>(0) } }"#]].assert_eq(&t); } #[test] @@ -273,8 +280,8 @@ mod tests { let list = crate::tt::Subtree { delimiter: crate::tt::Delimiter { kind: crate::tt::DelimiterKind::Brace, - open: crate::tt::TokenId::unspecified(), - close: crate::tt::TokenId::unspecified(), + open: ::DUMMY, + close: ::DUMMY, }, token_trees: fields.collect(), }; diff --git a/crates/hir-ty/src/diagnostics/decl_check.rs b/crates/hir-ty/src/diagnostics/decl_check.rs index f4c079b48c..c2ff487ef9 100644 --- a/crates/hir-ty/src/diagnostics/decl_check.rs +++ b/crates/hir-ty/src/diagnostics/decl_check.rs @@ -24,7 +24,7 @@ use hir_def::{ }; use hir_expand::{ name::{AsName, Name}, - HirFileId, + HirFileId, HirFileIdExt, }; use stdx::{always, never}; use syntax::{ diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs index 62efb85851..fbfb6ff8cd 100644 --- a/crates/hir-ty/src/mir/eval.rs +++ b/crates/hir-ty/src/mir/eval.rs @@ -21,7 +21,7 @@ use hir_def::{ AdtId, ConstId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, StaticId, VariantId, }; -use hir_expand::{mod_path::ModPath, InFile}; +use hir_expand::{mod_path::ModPath, HirFileIdExt, InFile}; use intern::Interned; use la_arena::ArenaMap; use rustc_hash::{FxHashMap, FxHashSet}; diff --git a/crates/hir/src/db.rs b/crates/hir/src/db.rs index 936581bfe3..f4129e736e 100644 --- a/crates/hir/src/db.rs +++ b/crates/hir/src/db.rs @@ -6,7 +6,7 @@ pub use hir_def::db::*; pub use hir_expand::db::{ AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage, - ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgNodeQuery, - MacroExpandQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, + ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgQuery, MacroExpandQuery, + ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, }; pub use hir_ty::db::*; diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 1bfbf7212b..50d88b4cf8 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -124,7 +124,7 @@ pub use { hir_expand::{ attrs::{Attr, AttrId}, name::{known, Name}, - ExpandResult, HirFileId, InFile, MacroFile, Origin, + tt, ExpandResult, HirFileId, HirFileIdExt, InFile, MacroFile, }, hir_ty::{ display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite}, diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index a42e0978b2..758e6118aa 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -15,7 +15,7 @@ use hir_def::{ type_ref::Mutability, AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId, }; -use hir_expand::{db::ExpandDatabase, name::AsName, ExpansionInfo, MacroCallId}; +use hir_expand::{db::ExpandDatabase, name::AsName, ExpansionInfo, HirFileIdExt, MacroCallId}; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::{smallvec, SmallVec}; @@ -549,7 +549,7 @@ impl<'db> SemanticsImpl<'db> { let mut mcache = self.macro_call_cache.borrow_mut(); let mut process_expansion_for_token = - |stack: &mut SmallVec<_>, macro_file, item, token: InFile<&_>| { + |stack: &mut SmallVec<_>, macro_file, token: InFile<&_>| { let expansion_info = cache .entry(macro_file) .or_insert_with(|| macro_file.expansion_info(self.db.upcast())) @@ -562,7 +562,6 @@ impl<'db> SemanticsImpl<'db> { let mapped_tokens = expansion_info.map_token_down( self.db.upcast(), - item, token, relative_token_offset, )?; @@ -587,17 +586,12 @@ impl<'db> SemanticsImpl<'db> { // Don't force populate the dyn cache for items that don't have an attribute anyways return None; } - Some((ctx.item_to_macro_call(token.with_value(item.clone()))?, item)) + Some(ctx.item_to_macro_call(token.with_value(item.clone()))?) }) }); - if let Some((call_id, item)) = containing_attribute_macro_call { + if let Some(call_id) = containing_attribute_macro_call { let file_id = call_id.as_file(); - return process_expansion_for_token( - &mut stack, - file_id, - Some(item), - token.as_ref(), - ); + return process_expansion_for_token(&mut stack, file_id, token.as_ref()); } // Then check for token trees, that means we are either in a function-like macro or @@ -622,7 +616,7 @@ impl<'db> SemanticsImpl<'db> { it } }; - process_expansion_for_token(&mut stack, file_id, None, token.as_ref()) + process_expansion_for_token(&mut stack, file_id, token.as_ref()) } else if let Some(meta) = ast::Meta::cast(parent) { // attribute we failed expansion for earlier, this might be a derive invocation // or derive helper attribute @@ -647,7 +641,6 @@ impl<'db> SemanticsImpl<'db> { return process_expansion_for_token( &mut stack, file_id, - Some(adt.into()), token.as_ref(), ); } @@ -679,13 +672,11 @@ impl<'db> SemanticsImpl<'db> { let id = self.db.ast_id_map(token.file_id).ast_id(&adt); let helpers = def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?; - let item = Some(adt.into()); let mut res = None; for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) { res = res.or(process_expansion_for_token( &mut stack, derive.as_file(), - item.clone(), token.as_ref(), )); } diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs index aabda36556..5b20c87315 100644 --- a/crates/hir/src/semantics/source_to_def.rs +++ b/crates/hir/src/semantics/source_to_def.rs @@ -97,7 +97,7 @@ use hir_def::{ FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId, }; -use hir_expand::{attrs::AttrId, name::AsName, HirFileId, MacroCallId}; +use hir_expand::{attrs::AttrId, name::AsName, HirFileId, HirFileIdExt, MacroCallId}; use rustc_hash::FxHashMap; use smallvec::SmallVec; use stdx::{impl_from, never}; diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 55c2f8324c..8fc7f2c05d 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -30,7 +30,7 @@ use hir_expand::{ mod_path::path, name, name::{AsName, Name}, - HirFileId, InFile, + HirFileId, HirFileIdExt, InFile, }; use hir_ty::{ diagnostics::{ diff --git a/crates/ide-assists/src/handlers/extract_module.rs b/crates/ide-assists/src/handlers/extract_module.rs index 6839c5820d..4b9fedc7e8 100644 --- a/crates/ide-assists/src/handlers/extract_module.rs +++ b/crates/ide-assists/src/handlers/extract_module.rs @@ -3,7 +3,7 @@ use std::{ iter, }; -use hir::{HasSource, ModuleSource}; +use hir::{HasSource, HirFileIdExt, ModuleSource}; use ide_db::{ assists::{AssistId, AssistKind}, base_db::FileId, diff --git a/crates/ide-assists/src/handlers/fix_visibility.rs b/crates/ide-assists/src/handlers/fix_visibility.rs index c9f272474e..204e796fa2 100644 --- a/crates/ide-assists/src/handlers/fix_visibility.rs +++ b/crates/ide-assists/src/handlers/fix_visibility.rs @@ -1,4 +1,6 @@ -use hir::{db::HirDatabase, HasSource, HasVisibility, ModuleDef, PathResolution, ScopeDef}; +use hir::{ + db::HirDatabase, HasSource, HasVisibility, HirFileIdExt, ModuleDef, PathResolution, ScopeDef, +}; use ide_db::base_db::FileId; use syntax::{ ast::{self, edit_in_place::HasVisibilityEdit, make, HasVisibility as _}, diff --git a/crates/ide-assists/src/handlers/generate_constant.rs b/crates/ide-assists/src/handlers/generate_constant.rs index eccd7675fb..a4e8e7388f 100644 --- a/crates/ide-assists/src/handlers/generate_constant.rs +++ b/crates/ide-assists/src/handlers/generate_constant.rs @@ -1,5 +1,5 @@ use crate::assist_context::{AssistContext, Assists}; -use hir::{HasVisibility, HirDisplay, Module}; +use hir::{HasVisibility, HirDisplay, HirFileIdExt, Module}; use ide_db::{ assists::{AssistId, AssistKind}, base_db::{FileId, Upcast}, diff --git a/crates/ide-assists/src/handlers/generate_enum_variant.rs b/crates/ide-assists/src/handlers/generate_enum_variant.rs index 184f523e01..be7a5e6c8b 100644 --- a/crates/ide-assists/src/handlers/generate_enum_variant.rs +++ b/crates/ide-assists/src/handlers/generate_enum_variant.rs @@ -1,4 +1,4 @@ -use hir::{HasSource, HirDisplay, InFile}; +use hir::{HasSource, HirDisplay, HirFileIdExt, InFile}; use ide_db::assists::{AssistId, AssistKind}; use syntax::{ ast::{self, make, HasArgList}, diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index f74fc5df4b..a113c817f7 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -1,5 +1,6 @@ use hir::{ - Adt, AsAssocItem, HasSource, HirDisplay, Module, PathResolution, Semantics, Type, TypeInfo, + Adt, AsAssocItem, HasSource, HirDisplay, HirFileIdExt, Module, PathResolution, Semantics, Type, + TypeInfo, }; use ide_db::{ base_db::FileId, @@ -510,7 +511,7 @@ fn assoc_fn_target_info( } fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize { - match &target { + match target { GeneratedFunctionTarget::BehindItem(it) => it.text_range().end(), GeneratedFunctionTarget::InEmptyItemList(it) => it.text_range().start() + TextSize::of('{'), } diff --git a/crates/ide-assists/src/handlers/remove_unused_imports.rs b/crates/ide-assists/src/handlers/remove_unused_imports.rs index 5fcab8c02b..10076e60c3 100644 --- a/crates/ide-assists/src/handlers/remove_unused_imports.rs +++ b/crates/ide-assists/src/handlers/remove_unused_imports.rs @@ -1,6 +1,6 @@ use std::collections::{hash_map::Entry, HashMap}; -use hir::{InFile, Module, ModuleSource}; +use hir::{HirFileIdExt, InFile, Module, ModuleSource}; use ide_db::{ base_db::FileRange, defs::Definition, diff --git a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index 69a4e748b7..1b373bcb8c 100644 --- a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -1,4 +1,4 @@ -use hir::{InFile, ModuleDef}; +use hir::{HirFileIdExt, InFile, ModuleDef}; use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator}; use itertools::Itertools; use syntax::{ diff --git a/crates/ide-completion/src/completions.rs b/crates/ide-completion/src/completions.rs index 7d38c638a8..f49abcbae9 100644 --- a/crates/ide-completion/src/completions.rs +++ b/crates/ide-completion/src/completions.rs @@ -683,7 +683,7 @@ pub(super) fn complete_name_ref( ctx: &CompletionContext<'_>, NameRefContext { nameref, kind }: &NameRefContext, ) { - match kind { + match dbg!(kind) { NameRefKind::Path(path_ctx) => { flyimport::import_on_the_fly_path(acc, ctx, path_ctx); diff --git a/crates/ide-completion/src/completions/mod_.rs b/crates/ide-completion/src/completions/mod_.rs index 1e09894059..5d138eea46 100644 --- a/crates/ide-completion/src/completions/mod_.rs +++ b/crates/ide-completion/src/completions/mod_.rs @@ -2,7 +2,7 @@ use std::iter; -use hir::{Module, ModuleSource}; +use hir::{HirFileIdExt, Module, ModuleSource}; use ide_db::{ base_db::{SourceDatabaseExt, VfsPath}, FxHashSet, RootDatabase, SymbolKind, diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs index a0b05c87ae..65eaa6510f 100644 --- a/crates/ide-db/src/apply_change.rs +++ b/crates/ide-db/src/apply_change.rs @@ -99,7 +99,7 @@ impl RootDatabase { hir::db::AstIdMapQuery hir::db::ParseMacroExpansionQuery hir::db::InternMacroCallQuery - hir::db::MacroArgNodeQuery + hir::db::MacroArgQuery hir::db::DeclMacroExpanderQuery hir::db::MacroExpandQuery hir::db::ExpandProcMacroQuery diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs index 226def4d52..258d893b47 100644 --- a/crates/ide-db/src/lib.rs +++ b/crates/ide-db/src/lib.rs @@ -204,7 +204,7 @@ impl RootDatabase { hir_db::AstIdMapQuery // hir_db::ParseMacroExpansionQuery // hir_db::InternMacroCallQuery - hir_db::MacroArgNodeQuery + hir_db::MacroArgQuery hir_db::DeclMacroExpanderQuery // hir_db::MacroExpandQuery hir_db::ExpandProcMacroQuery diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index 353a9749a3..cc9038fdfa 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -24,7 +24,7 @@ use std::fmt; use base_db::{AnchoredPathBuf, FileId, FileRange}; use either::Either; -use hir::{FieldSource, HasSource, InFile, ModuleSource, Semantics}; +use hir::{FieldSource, HasSource, HirFileIdExt, InFile, ModuleSource, Semantics}; use stdx::never; use syntax::{ ast::{self, HasName}, diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index 22438a203b..68f2ad4945 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -8,7 +8,8 @@ use std::mem; use base_db::{salsa::Database, FileId, FileRange, SourceDatabase, SourceDatabaseExt}; use hir::{ - AsAssocItem, DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility, + AsAssocItem, DefWithBody, HasAttrs, HasSource, HirFileIdExt, InFile, ModuleSource, Semantics, + Visibility, }; use memchr::memmem::Finder; use nohash_hasher::IntMap; diff --git a/crates/ide-db/src/test_data/test_doc_alias.txt b/crates/ide-db/src/test_data/test_doc_alias.txt index 7834c66033..72a6eb5eab 100644 --- a/crates/ide-db/src/test_data/test_doc_alias.txt +++ b/crates/ide-db/src/test_data/test_doc_alias.txt @@ -21,9 +21,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -50,9 +48,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -79,9 +75,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -108,9 +102,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -137,9 +129,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -166,9 +156,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -195,9 +183,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, diff --git a/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/crates/ide-db/src/test_data/test_symbol_index_collection.txt index 87ad5844c6..375ac55981 100644 --- a/crates/ide-db/src/test_data/test_symbol_index_collection.txt +++ b/crates/ide-db/src/test_data/test_symbol_index_collection.txt @@ -19,9 +19,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: TYPE_ALIAS, @@ -46,9 +44,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: CONST, @@ -73,9 +69,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: CONST, @@ -102,9 +96,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: ENUM, @@ -131,9 +123,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -160,9 +150,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: MACRO_DEF, @@ -187,9 +175,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STATIC, @@ -216,9 +202,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -245,11 +229,7 @@ ), loc: DeclarationLocation { hir_file_id: MacroFile( - MacroFile { - macro_call_id: MacroCallId( - 0, - ), - }, + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -276,9 +256,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -307,9 +285,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -338,9 +314,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -365,9 +339,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: TRAIT, @@ -394,9 +366,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -423,9 +393,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: UNION, @@ -452,9 +420,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: MODULE, @@ -481,9 +447,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: MODULE, @@ -510,9 +474,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: MACRO_RULES, @@ -537,9 +499,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: FN, @@ -566,9 +526,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: MACRO_RULES, @@ -593,9 +551,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: FN, @@ -622,9 +578,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -649,9 +603,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: FN, @@ -691,9 +643,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -731,9 +681,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 1, - ), + 1, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -760,9 +708,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 1, - ), + 1, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -789,9 +735,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 1, - ), + 1, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -818,9 +762,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 1, - ), + 1, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -847,9 +789,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 1, - ), + 1, ), ptr: SyntaxNodePtr { kind: USE_TREE, diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs index d7dca1083a..cb38bc54d7 100644 --- a/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs @@ -1,7 +1,7 @@ use either::Either; use hir::{ db::{ExpandDatabase, HirDatabase}, - known, AssocItem, HirDisplay, InFile, Type, + known, AssocItem, HirDisplay, HirFileIdExt, InFile, Type, }; use ide_db::{ assists::Assist, famous_defs::FamousDefs, imports::import_assets::item_for_path_search, diff --git a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs index 0f695b2745..f93a35cf18 100644 --- a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs +++ b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs @@ -1,4 +1,5 @@ use hir::db::ExpandDatabase; +use hir::HirFileIdExt; use ide_db::{assists::Assist, source_change::SourceChange}; use syntax::{ast, SyntaxNode}; use syntax::{match_ast, AstNode}; diff --git a/crates/ide-diagnostics/src/handlers/no_such_field.rs b/crates/ide-diagnostics/src/handlers/no_such_field.rs index ee8a9c9579..0abcbffe72 100644 --- a/crates/ide-diagnostics/src/handlers/no_such_field.rs +++ b/crates/ide-diagnostics/src/handlers/no_such_field.rs @@ -1,5 +1,5 @@ use either::Either; -use hir::{db::ExpandDatabase, HasSource, HirDisplay, Semantics}; +use hir::{db::ExpandDatabase, HasSource, HirDisplay, HirFileIdExt, Semantics}; use ide_db::{base_db::FileId, source_change::SourceChange, RootDatabase}; use syntax::{ ast::{self, edit::IndentLevel, make}, diff --git a/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs b/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs index d15233d15c..258ac6cd82 100644 --- a/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs +++ b/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs @@ -1,4 +1,4 @@ -use hir::{db::ExpandDatabase, InFile}; +use hir::{db::ExpandDatabase, HirFileIdExt, InFile}; use ide_db::source_change::SourceChange; use syntax::{ ast::{self, HasArgList}, diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs index c92d92ceae..fd00535d0c 100644 --- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -1,4 +1,4 @@ -use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, InFile, Type}; +use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, HirFileIdExt, InFile, Type}; use ide_db::{famous_defs::FamousDefs, source_change::SourceChange}; use syntax::{ ast::{self, BlockExpr, ExprStmt}, diff --git a/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/crates/ide-diagnostics/src/handlers/unresolved_module.rs index be24e50c98..bbbd21741e 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_module.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_module.rs @@ -1,4 +1,4 @@ -use hir::db::ExpandDatabase; +use hir::{db::ExpandDatabase, HirFileIdExt}; use ide_db::{assists::Assist, base_db::AnchoredPathBuf, source_change::FileSystemEdit}; use itertools::Itertools; use syntax::AstNode; diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs index f834f2ce59..6f41f51f80 100644 --- a/crates/ide/src/call_hierarchy.rs +++ b/crates/ide/src/call_hierarchy.rs @@ -149,7 +149,7 @@ mod tests { fn check_hierarchy( ra_fixture: &str, - expected: Expect, + expected_nav: Expect, expected_incoming: Expect, expected_outgoing: Expect, ) { @@ -158,7 +158,7 @@ mod tests { let mut navs = analysis.call_hierarchy(pos).unwrap().unwrap().info; assert_eq!(navs.len(), 1); let nav = navs.pop().unwrap(); - expected.assert_eq(&nav.debug_render()); + expected_nav.assert_eq(&nav.debug_render()); let item_pos = FilePosition { file_id: nav.file_id, offset: nav.focus_or_full_range().start() }; diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs index 3220774567..f7c6a0139e 100644 --- a/crates/ide/src/expand_macro.rs +++ b/crates/ide/src/expand_macro.rs @@ -1,4 +1,4 @@ -use hir::Semantics; +use hir::{HirFileIdExt, Semantics}; use ide_db::{ base_db::FileId, helpers::pick_best_token, syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase, diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index e09b9f3914..59e8578cf1 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -60,13 +60,13 @@ pub(crate) fn goto_definition( .into_iter() .filter_map(|token| { let parent = token.parent()?; - if let Some(tt) = ast::TokenTree::cast(parent) { + if let Some(tt) = ast::TokenTree::cast(parent.clone()) { if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), file_id) { return Some(vec![x]); } } Some( - IdentClass::classify_token(sema, &token)? + IdentClass::classify_node(sema, &parent)? .definitions() .into_iter() .flat_map(|def| { @@ -392,6 +392,8 @@ fn bar() { ); } + // FIXME: We should emit two targets here, one for the identifier in the declaration, one for + // the macro call #[test] fn goto_def_for_macro_defined_fn_no_arg() { check( @@ -399,10 +401,10 @@ fn bar() { //- /lib.rs macro_rules! define_fn { () => (fn foo() {}) + //^^^ } define_fn!(); -//^^^^^^^^^^^^^ fn bar() { $0foo(); diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs index ac9df5ed6d..ad1eb24997 100644 --- a/crates/ide/src/rename.rs +++ b/crates/ide/src/rename.rs @@ -4,7 +4,7 @@ //! tests. This module also implements a couple of magic tricks, like renaming //! `self` and to `self` (to switch between associated function and method). -use hir::{AsAssocItem, InFile, Semantics}; +use hir::{AsAssocItem, HirFileIdExt, InFile, Semantics}; use ide_db::{ base_db::FileId, defs::{Definition, NameClass, NameRefClass}, diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index 07cdddd15f..954a364c78 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs @@ -2,7 +2,7 @@ use std::fmt; use ast::HasName; use cfg::CfgExpr; -use hir::{db::HirDatabase, AsAssocItem, HasAttrs, HasSource, Semantics}; +use hir::{db::HirDatabase, AsAssocItem, HasAttrs, HasSource, HirFileIdExt, Semantics}; use ide_assists::utils::test_related_attribute; use ide_db::{ base_db::{FilePosition, FileRange}, diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index aabd26da28..7ee50f7a67 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -3,7 +3,7 @@ use std::collections::HashMap; -use hir::{db::HirDatabase, Crate, Module}; +use hir::{db::HirDatabase, Crate, HirFileIdExt, Module}; use ide_db::helpers::get_definition; use ide_db::{ base_db::{FileId, FileRange, SourceDatabaseExt}, diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs index 7d00282fc1..1bffab29cf 100644 --- a/crates/ide/src/syntax_highlighting/highlight.rs +++ b/crates/ide/src/syntax_highlighting/highlight.rs @@ -1,6 +1,6 @@ //! Computes color for a single element. -use hir::{AsAssocItem, HasVisibility, Semantics}; +use hir::{AsAssocItem, HasVisibility, HirFileIdExt, Semantics}; use ide_db::{ defs::{Definition, IdentClass, NameClass, NameRefClass}, FxHashMap, RootDatabase, SymbolKind, diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs index 68b592ffaa..4d13190949 100644 --- a/crates/load-cargo/src/lib.rs +++ b/crates/load-cargo/src/lib.rs @@ -4,13 +4,12 @@ // to run rust-analyzer as a library. use std::{collections::hash_map::Entry, mem, path::Path, sync}; -use ::tt::token_id as tt; use crossbeam_channel::{unbounded, Receiver}; use ide::{AnalysisHost, Change, SourceRoot}; use ide_db::{ base_db::{ - CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, - ProcMacroLoadResult, ProcMacros, + span::SpanData, CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, + ProcMacroKind, ProcMacroLoadResult, ProcMacros, }, FxHashMap, }; @@ -374,16 +373,19 @@ struct Expander(proc_macro_api::ProcMacro); impl ProcMacroExpander for Expander { fn expand( &self, - subtree: &tt::Subtree, - attrs: Option<&tt::Subtree>, + subtree: &tt::Subtree, + attrs: Option<&tt::Subtree>, env: &Env, - ) -> Result { - let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect(); - match self.0.expand(subtree, attrs, env) { - Ok(Ok(subtree)) => Ok(subtree), - Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)), - Err(err) => Err(ProcMacroExpansionError::System(err.to_string())), - } + ) -> Result, ProcMacroExpansionError> { + let _ = (subtree, attrs, env); + + // let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect(); + // match self.0.expand(subtree, attrs, env) { + // Ok(Ok(subtree)) => Ok(subtree), + // Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)), + // Err(err) => Err(ProcMacroExpansionError::System(err.to_string())), + // } + todo!() } } @@ -394,10 +396,10 @@ struct IdentityExpander; impl ProcMacroExpander for IdentityExpander { fn expand( &self, - subtree: &tt::Subtree, - _: Option<&tt::Subtree>, + subtree: &tt::Subtree, + _: Option<&tt::Subtree>, _: &Env, - ) -> Result { + ) -> Result, ProcMacroExpansionError> { Ok(subtree.clone()) } } @@ -409,10 +411,10 @@ struct EmptyExpander; impl ProcMacroExpander for EmptyExpander { fn expand( &self, - _: &tt::Subtree, - _: Option<&tt::Subtree>, + _: &tt::Subtree, + _: Option<&tt::Subtree>, _: &Env, - ) -> Result { + ) -> Result, ProcMacroExpansionError> { Ok(tt::Subtree::empty()) } } diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index 19cb20354b..4f60e90773 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -6,13 +6,19 @@ use syntax::{ AstNode, SmolStr, }; use test_utils::{bench, bench_fixture, skip_slow_tests}; -use tt::{Span, TokenId}; +use tt::{Span, SpanData}; use crate::{ parser::{MetaVarKind, Op, RepeatKind, Separator}, syntax_node_to_token_tree, DeclarativeMacro, }; +#[derive(PartialEq, Eq, Clone, Copy, Debug)] +struct DummyFile; +impl Span for DummyFile { + const DUMMY: Self = DummyFile; +} + #[test] fn benchmark_parse_macro_rules() { if skip_slow_tests() { @@ -39,7 +45,7 @@ fn benchmark_expand_macro_rules() { invocations .into_iter() .map(|(id, tt)| { - let res = rules[&id].expand(tt); + let res = rules[&id].expand(&tt); assert!(res.err.is_none()); res.value.token_trees.len() }) @@ -48,14 +54,14 @@ fn benchmark_expand_macro_rules() { assert_eq!(hash, 69413); } -fn macro_rules_fixtures() -> FxHashMap { +fn macro_rules_fixtures() -> FxHashMap>> { macro_rules_fixtures_tt() .into_iter() .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true))) .collect() } -fn macro_rules_fixtures_tt() -> FxHashMap> { +fn macro_rules_fixtures_tt() -> FxHashMap>> { let fixture = bench_fixture::numerous_macro_rules(); let source_file = ast::SourceFile::parse(&fixture).ok().unwrap(); @@ -65,7 +71,12 @@ fn macro_rules_fixtures_tt() -> FxHashMap> { .filter_map(ast::MacroRules::cast) .map(|rule| { let id = rule.name().unwrap().to_string(); - let (def_tt, _) = syntax_node_to_token_tree(rule.token_tree().unwrap().syntax()); + let def_tt = syntax_node_to_token_tree( + rule.token_tree().unwrap().syntax(), + DummyFile, + 0.into(), + &Default::default(), + ); (id, def_tt) }) .collect() @@ -73,8 +84,8 @@ fn macro_rules_fixtures_tt() -> FxHashMap> { /// Generate random invocation fixtures from rules fn invocation_fixtures( - rules: &FxHashMap, -) -> Vec<(String, tt::Subtree)> { + rules: &FxHashMap>>, +) -> Vec<(String, tt::Subtree>)> { let mut seed = 123456789; let mut res = Vec::new(); @@ -96,8 +107,8 @@ fn invocation_fixtures( loop { let mut subtree = tt::Subtree { delimiter: tt::Delimiter { - open: tt::TokenId::DUMMY, - close: tt::TokenId::DUMMY, + open: SpanData::DUMMY, + close: SpanData::DUMMY, kind: tt::DelimiterKind::Invisible, }, token_trees: vec![], @@ -105,7 +116,7 @@ fn invocation_fixtures( for op in rule.lhs.iter() { collect_from_op(op, &mut subtree, &mut seed); } - if it.expand(subtree.clone()).err.is_none() { + if it.expand(&subtree).err.is_none() { res.push((name.clone(), subtree)); break; } @@ -119,7 +130,11 @@ fn invocation_fixtures( } return res; - fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) { + fn collect_from_op( + op: &Op>, + parent: &mut tt::Subtree>, + seed: &mut usize, + ) { return match op { Op::Var { kind, .. } => match kind.as_ref() { Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")), @@ -205,32 +220,22 @@ fn invocation_fixtures( *seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c); *seed } - fn make_ident(ident: &str) -> tt::TokenTree { - tt::Leaf::Ident(tt::Ident { span: tt::TokenId::DUMMY, text: SmolStr::new(ident) }) + fn make_ident(ident: &str) -> tt::TokenTree> { + tt::Leaf::Ident(tt::Ident { span: SpanData::DUMMY, text: SmolStr::new(ident) }).into() + } + fn make_punct(char: char) -> tt::TokenTree> { + tt::Leaf::Punct(tt::Punct { span: SpanData::DUMMY, char, spacing: tt::Spacing::Alone }) .into() } - fn make_punct(char: char) -> tt::TokenTree { - tt::Leaf::Punct(tt::Punct { - span: tt::TokenId::DUMMY, - char, - spacing: tt::Spacing::Alone, - }) - .into() - } - fn make_literal(lit: &str) -> tt::TokenTree { - tt::Leaf::Literal(tt::Literal { span: tt::TokenId::DUMMY, text: SmolStr::new(lit) }) - .into() + fn make_literal(lit: &str) -> tt::TokenTree> { + tt::Leaf::Literal(tt::Literal { span: SpanData::DUMMY, text: SmolStr::new(lit) }).into() } fn make_subtree( kind: tt::DelimiterKind, - token_trees: Option>>, - ) -> tt::TokenTree { + token_trees: Option>>>, + ) -> tt::TokenTree> { tt::Subtree { - delimiter: tt::Delimiter { - open: tt::TokenId::DUMMY, - close: tt::TokenId::DUMMY, - kind, - }, + delimiter: tt::Delimiter { open: SpanData::DUMMY, close: SpanData::DUMMY, kind }, token_trees: token_trees.unwrap_or_default(), } .into() diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 4659980293..43543479eb 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -19,7 +19,7 @@ mod benchmark; mod token_map; use stdx::impl_from; -use tt::{Span, TokenId}; +use tt::Span; use std::fmt; @@ -34,10 +34,8 @@ pub use tt::{Delimiter, DelimiterKind, Punct}; pub use crate::{ syntax_bridge::{ - parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_map, - syntax_node_to_token_map_with_modifications, syntax_node_to_token_tree, - syntax_node_to_token_tree_with_modifications, token_tree_to_syntax_node, SyntheticToken, - SyntheticTokenId, + map_from_syntax_node, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, + syntax_node_to_token_tree_censored, token_tree_to_syntax_node, }, token_map::TokenMap, }; @@ -125,10 +123,8 @@ impl fmt::Display for CountError { /// `tt::TokenTree`, but there's a crucial difference: in macro rules, `$ident` /// and `$()*` have special meaning (see `Var` and `Repeat` data structures) #[derive(Clone, Debug, PartialEq, Eq)] -pub struct DeclarativeMacro { - rules: Box<[Rule]>, - /// Highest id of the token we have in TokenMap - shift: Shift, +pub struct DeclarativeMacro { + rules: Box<[Rule]>, // This is used for correctly determining the behavior of the pat fragment // FIXME: This should be tracked by hygiene of the fragment identifier! is_2021: bool, @@ -141,91 +137,13 @@ struct Rule { rhs: MetaTemplate, } -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct Shift(u32); - -impl Shift { - pub fn new(tt: &tt::Subtree) -> Shift { - // Note that TokenId is started from zero, - // We have to add 1 to prevent duplication. - let value = max_id(tt).map_or(0, |it| it + 1); - return Shift(value); - - // Find the max token id inside a subtree - fn max_id(subtree: &tt::Subtree) -> Option { - let filter = - |tt: &_| match tt { - tt::TokenTree::Subtree(subtree) => { - let tree_id = max_id(subtree); - if subtree.delimiter.open != tt::TokenId::unspecified() { - Some(tree_id.map_or(subtree.delimiter.open.0, |t| { - t.max(subtree.delimiter.open.0) - })) - } else { - tree_id - } - } - tt::TokenTree::Leaf(leaf) => { - let &(tt::Leaf::Ident(tt::Ident { span, .. }) - | tt::Leaf::Punct(tt::Punct { span, .. }) - | tt::Leaf::Literal(tt::Literal { span, .. })) = leaf; - - (span != tt::TokenId::unspecified()).then_some(span.0) - } - }; - subtree.token_trees.iter().filter_map(filter).max() - } - } - - /// Shift given TokenTree token id - pub fn shift_all(self, tt: &mut tt::Subtree) { - for t in &mut tt.token_trees { - match t { - tt::TokenTree::Leaf( - tt::Leaf::Ident(tt::Ident { span, .. }) - | tt::Leaf::Punct(tt::Punct { span, .. }) - | tt::Leaf::Literal(tt::Literal { span, .. }), - ) => *span = self.shift(*span), - tt::TokenTree::Subtree(tt) => { - tt.delimiter.open = self.shift(tt.delimiter.open); - tt.delimiter.close = self.shift(tt.delimiter.close); - self.shift_all(tt) - } - } - } - } - - pub fn shift(self, id: tt::TokenId) -> tt::TokenId { - if id == tt::TokenId::unspecified() { - id - } else { - tt::TokenId(id.0 + self.0) - } - } - - pub fn unshift(self, id: tt::TokenId) -> Option { - id.0.checked_sub(self.0).map(tt::TokenId) - } -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq)] -pub enum Origin { - Def, - Call, -} - -impl DeclarativeMacro { - pub fn from_err(err: ParseError, is_2021: bool) -> DeclarativeMacro { - DeclarativeMacro { - rules: Box::default(), - shift: Shift(0), - is_2021, - err: Some(Box::new(err)), - } +impl DeclarativeMacro { + pub fn from_err(err: ParseError, is_2021: bool) -> DeclarativeMacro { + DeclarativeMacro { rules: Box::default(), is_2021, err: Some(Box::new(err)) } } /// The old, `macro_rules! m {}` flavor. - pub fn parse_macro_rules(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro { + pub fn parse_macro_rules(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro { // Note: this parsing can be implemented using mbe machinery itself, by // matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing // manually seems easier. @@ -257,11 +175,11 @@ impl DeclarativeMacro { } } - DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err } + DeclarativeMacro { rules: rules.into_boxed_slice(), is_2021, err } } /// The new, unstable `macro m {}` flavor. - pub fn parse_macro2(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro { + pub fn parse_macro2(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro { let mut src = TtIter::new(tt); let mut rules = Vec::new(); let mut err = None; @@ -308,31 +226,15 @@ impl DeclarativeMacro { } } - DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err } - } - - pub fn expand(&self, mut tt: tt::Subtree) -> ExpandResult> { - self.shift.shift_all(&mut tt); - expander::expand_rules(&self.rules, &tt, self.is_2021) + DeclarativeMacro { rules: rules.into_boxed_slice(), is_2021, err } } pub fn err(&self) -> Option<&ParseError> { self.err.as_deref() } - pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { - self.shift.shift(id) - } - - pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, Origin) { - match self.shift.unshift(id) { - Some(id) => (id, Origin::Call), - None => (id, Origin::Def), - } - } - - pub fn shift(&self) -> Shift { - self.shift + pub fn expand(&self, tt: &tt::Subtree) -> ExpandResult> { + expander::expand_rules(&self.rules, &tt, self.is_2021) } } diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 01aab6b659..c8c2e5dcd5 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -1,16 +1,15 @@ //! Conversions between [`SyntaxNode`] and [`tt::TokenTree`]. -use rustc_hash::FxHashMap; -use stdx::{always, non_empty_vec::NonEmptyVec}; +use stdx::non_empty_vec::NonEmptyVec; use syntax::{ ast::{self, make::tokens::doc_comment}, - AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind, + AstToken, NodeOrToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T, }; use tt::{ buffer::{Cursor, TokenBuffer}, - TokenId, + Span, SpanData, }; use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap}; @@ -20,75 +19,37 @@ mod tests; /// Convert the syntax node to a `TokenTree` (what macro /// will consume). -pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) { - let (subtree, token_map, _) = syntax_node_to_token_tree_with_modifications( - node, - Default::default(), - 0, - Default::default(), - Default::default(), - ); - (subtree, token_map) -} - -/// Convert the syntax node to a `TokenTree` (what macro will consume) -/// with the censored range excluded. -pub fn syntax_node_to_token_tree_with_modifications( +/// `anchor` and `anchor_offset` are used to convert the node's spans +/// to relative spans, relative to the passed anchor. +/// `map` is used to resolve the converted spans accordingly. +/// TODO: Flesh out the doc comment more thoroughly +pub fn syntax_node_to_token_tree( node: &SyntaxNode, - existing_token_map: TokenMap, - next_id: u32, - replace: FxHashMap>, - append: FxHashMap>, -) -> (tt::Subtree, TokenMap, u32) { - let global_offset = node.text_range().start(); - let mut c = Converter::new(node, global_offset, existing_token_map, next_id, replace, append); - let subtree = convert_tokens(&mut c); - c.id_alloc.map.shrink_to_fit(); - always!(c.replace.is_empty(), "replace: {:?}", c.replace); - always!(c.append.is_empty(), "append: {:?}", c.append); - (subtree, c.id_alloc.map, c.id_alloc.next_id) + anchor: SpanAnchor, + anchor_offset: TextSize, + map: &TokenMap>, +) -> tt::Subtree> +where + SpanData: Span, +{ + assert!(anchor_offset <= node.text_range().start()); + let mut c = Converter::new(node, anchor_offset, anchor, vec![], map); + convert_tokens(&mut c) } -/// Convert the syntax node to a `TokenTree` (what macro -/// will consume). -pub fn syntax_node_to_token_map(node: &SyntaxNode) -> TokenMap { - syntax_node_to_token_map_with_modifications( - node, - Default::default(), - 0, - Default::default(), - Default::default(), - ) - .0 -} - -/// Convert the syntax node to a `TokenTree` (what macro will consume) -/// with the censored range excluded. -pub fn syntax_node_to_token_map_with_modifications( +pub fn syntax_node_to_token_tree_censored( node: &SyntaxNode, - existing_token_map: TokenMap, - next_id: u32, - replace: FxHashMap>, - append: FxHashMap>, -) -> (TokenMap, u32) { - let global_offset = node.text_range().start(); - let mut c = Converter::new(node, global_offset, existing_token_map, next_id, replace, append); - collect_tokens(&mut c); - c.id_alloc.map.shrink_to_fit(); - always!(c.replace.is_empty(), "replace: {:?}", c.replace); - always!(c.append.is_empty(), "append: {:?}", c.append); - (c.id_alloc.map, c.id_alloc.next_id) -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct SyntheticTokenId(pub u32); - -#[derive(Debug, Clone)] -pub struct SyntheticToken { - pub kind: SyntaxKind, - pub text: SmolStr, - pub range: TextRange, - pub id: SyntheticTokenId, + anchor: SpanAnchor, + anchor_offset: TextSize, + map: &TokenMap>, + censored: Vec, +) -> tt::Subtree> +where + SpanData: Span, +{ + assert!(anchor_offset <= node.text_range().start()); + let mut c = Converter::new(node, anchor_offset, anchor, censored, map); + convert_tokens(&mut c) } // The following items are what `rustc` macro can be parsed into : @@ -103,10 +64,13 @@ pub struct SyntheticToken { // * AssocItems(SmallVec<[ast::AssocItem; 1]>) // * ForeignItems(SmallVec<[ast::ForeignItem; 1]> -pub fn token_tree_to_syntax_node( - tt: &tt::Subtree, +pub fn token_tree_to_syntax_node( + tt: &tt::Subtree>, entry_point: parser::TopEntryPoint, -) -> (Parse, TokenMap) { +) -> (Parse, TokenMap>) +where + SpanData: Span, +{ let buffer = match tt { tt::Subtree { delimiter: tt::Delimiter { kind: tt::DelimiterKind::Invisible, .. }, @@ -133,29 +97,40 @@ pub fn token_tree_to_syntax_node( tree_sink.finish() } +pub fn map_from_syntax_node( + node: &SyntaxNode, + anchor: SpanAnchor, + anchor_offset: TextSize, +) -> TokenMap> +where + SpanAnchor: Copy, + SpanData: Span, +{ + let mut map = TokenMap::default(); + node.descendants_with_tokens().filter_map(NodeOrToken::into_token).for_each(|t| { + map.insert(t.text_range(), SpanData { range: t.text_range() - anchor_offset, anchor }); + }); + map +} + /// Convert a string to a `TokenTree` -pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> { +pub fn parse_to_token_tree( + text: &str, + file_id: SpanAnchor, +) -> Option>> +where + SpanData: Span, +{ let lexed = parser::LexedStr::new(text); if lexed.errors().next().is_some() { return None; } - - let mut conv = RawConverter { - lexed, - pos: 0, - id_alloc: TokenIdAlloc { - map: Default::default(), - global_offset: TextSize::default(), - next_id: 0, - }, - }; - - let subtree = convert_tokens(&mut conv); - Some((subtree, conv.id_alloc.map)) + let mut conv = RawConverter { lexed, pos: 0, _offset: TextSize::default(), file_id }; + Some(convert_tokens(&mut conv)) } /// Split token tree with separate expr: $($e:expr)SEP* -pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec> { +pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec> { if tt.token_trees.is_empty() { return Vec::new(); } @@ -191,47 +166,33 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec(conv: &mut C) -> tt::Subtree { - struct StackEntry { - subtree: tt::Subtree, - idx: usize, - open_range: TextRange, - } - - let entry = StackEntry { - subtree: tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }, - // never used (delimiter is `None`) - idx: !0, - open_range: TextRange::empty(TextSize::of('.')), - }; +fn convert_tokens>( + conv: &mut C, +) -> tt::Subtree> +where + SpanData: Span, + SpanAnchor: Copy, +{ + let entry = tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }; let mut stack = NonEmptyVec::new(entry); + let anchor = conv.anchor(); loop { - let StackEntry { subtree, .. } = stack.last_mut(); + let subtree = stack.last_mut(); let result = &mut subtree.token_trees; - let (token, range) = match conv.bump() { - Some(it) => it, - None => break, - }; - let synth_id = token.synthetic_id(conv); + let Some((token, rel_range, abs_range)) = conv.bump() else { break }; let kind = token.kind(conv); if kind == COMMENT { - // Since `convert_doc_comment` can fail, we need to peek the next id, so that we can - // figure out which token id to use for the doc comment, if it is converted successfully. - let next_id = conv.id_alloc().peek_next_id(); - if let Some(tokens) = conv.convert_doc_comment(&token, next_id) { - let id = conv.id_alloc().alloc(range, synth_id); - debug_assert_eq!(id, next_id); + if let Some(tokens) = conv.convert_doc_comment( + &token, + conv.span_for(abs_range).unwrap_or(SpanData { range: rel_range, anchor }), + ) { result.extend(tokens); } continue; } let tt = if kind.is_punct() && kind != UNDERSCORE { - if synth_id.is_none() { - assert_eq!(range.len(), TextSize::of('.')); - } - let expected = match subtree.delimiter.kind { tt::DelimiterKind::Parenthesis => Some(T![')']), tt::DelimiterKind::Brace => Some(T!['}']), @@ -241,9 +202,11 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { if let Some(expected) = expected { if kind == expected { - if let Some(entry) = stack.pop() { - conv.id_alloc().close_delim(entry.idx, Some(range)); - stack.last_mut().subtree.token_trees.push(entry.subtree.into()); + if let Some(mut subtree) = stack.pop() { + subtree.delimiter.close = conv + .span_for(abs_range) + .unwrap_or(SpanData { range: rel_range, anchor }); + stack.last_mut().token_trees.push(subtree.into()); } continue; } @@ -257,12 +220,18 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { }; if let Some(kind) = delim { - let (id, idx) = conv.id_alloc().open_delim(range, synth_id); let subtree = tt::Subtree { - delimiter: tt::Delimiter { open: id, close: tt::TokenId::UNSPECIFIED, kind }, + delimiter: tt::Delimiter { + // FIXME: Open and close spans + open: conv + .span_for(abs_range) + .unwrap_or(SpanData { range: rel_range, anchor }), + close: Span::DUMMY, + kind, + }, token_trees: vec![], }; - stack.push(StackEntry { subtree, idx, open_range: range }); + stack.push(subtree); continue; } @@ -279,39 +248,43 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { tt::Leaf::from(tt::Punct { char, spacing, - span: conv.id_alloc().alloc(range, synth_id), + span: conv.span_for(abs_range).unwrap_or(SpanData { range: rel_range, anchor }), }) .into() } else { macro_rules! make_leaf { ($i:ident) => { tt::$i { - span: conv.id_alloc().alloc(range, synth_id), + span: conv + .span_for(abs_range) + .unwrap_or(SpanData { range: rel_range, anchor }), text: token.to_text(conv), } .into() }; } - let leaf: tt::Leaf = match kind { + let leaf: tt::Leaf<_> = match kind { T![true] | T![false] => make_leaf!(Ident), IDENT => make_leaf!(Ident), UNDERSCORE => make_leaf!(Ident), k if k.is_keyword() => make_leaf!(Ident), k if k.is_literal() => make_leaf!(Literal), + // FIXME: Check whether span splitting works as intended LIFETIME_IDENT => { let char_unit = TextSize::of('\''); - let r = TextRange::at(range.start(), char_unit); + let r = TextRange::at(rel_range.start(), char_unit); let apostrophe = tt::Leaf::from(tt::Punct { char: '\'', spacing: tt::Spacing::Joint, - span: conv.id_alloc().alloc(r, synth_id), + span: conv.span_for(abs_range).unwrap_or(SpanData { range: r, anchor }), }); result.push(apostrophe.into()); - let r = TextRange::at(range.start() + char_unit, range.len() - char_unit); + let r = + TextRange::at(rel_range.start() + char_unit, rel_range.len() - char_unit); let ident = tt::Leaf::from(tt::Ident { text: SmolStr::new(&token.to_text(conv)[1..]), - span: conv.id_alloc().alloc(r, synth_id), + span: conv.span_for(abs_range).unwrap_or(SpanData { range: r, anchor }), }); result.push(ident.into()); continue; @@ -330,10 +303,9 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { while let Some(entry) = stack.pop() { let parent = stack.last_mut(); - conv.id_alloc().close_delim(entry.idx, None); - let leaf: tt::Leaf = tt::Punct { - span: conv.id_alloc().alloc(entry.open_range, None), - char: match entry.subtree.delimiter.kind { + let leaf: tt::Leaf<_> = tt::Punct { + span: entry.delimiter.open, + char: match entry.delimiter.kind { tt::DelimiterKind::Parenthesis => '(', tt::DelimiterKind::Brace => '{', tt::DelimiterKind::Bracket => '[', @@ -342,11 +314,11 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { spacing: tt::Spacing::Alone, } .into(); - parent.subtree.token_trees.push(leaf.into()); - parent.subtree.token_trees.extend(entry.subtree.token_trees); + parent.token_trees.push(leaf.into()); + parent.token_trees.extend(entry.token_trees); } - let subtree = stack.into_last().subtree; + let subtree = stack.into_last(); if let [tt::TokenTree::Subtree(first)] = &*subtree.token_trees { first.clone() } else { @@ -354,111 +326,6 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { } } -fn collect_tokens(conv: &mut C) { - struct StackEntry { - idx: usize, - open_range: TextRange, - delimiter: tt::DelimiterKind, - } - - let entry = StackEntry { - delimiter: tt::DelimiterKind::Invisible, - // never used (delimiter is `None`) - idx: !0, - open_range: TextRange::empty(TextSize::of('.')), - }; - let mut stack = NonEmptyVec::new(entry); - - loop { - let StackEntry { delimiter, .. } = stack.last_mut(); - let (token, range) = match conv.bump() { - Some(it) => it, - None => break, - }; - let synth_id = token.synthetic_id(conv); - - let kind = token.kind(conv); - if kind == COMMENT { - // Since `convert_doc_comment` can fail, we need to peek the next id, so that we can - // figure out which token id to use for the doc comment, if it is converted successfully. - let next_id = conv.id_alloc().peek_next_id(); - if let Some(_tokens) = conv.convert_doc_comment(&token, next_id) { - let id = conv.id_alloc().alloc(range, synth_id); - debug_assert_eq!(id, next_id); - } - continue; - } - if kind.is_punct() && kind != UNDERSCORE { - if synth_id.is_none() { - assert_eq!(range.len(), TextSize::of('.')); - } - - let expected = match delimiter { - tt::DelimiterKind::Parenthesis => Some(T![')']), - tt::DelimiterKind::Brace => Some(T!['}']), - tt::DelimiterKind::Bracket => Some(T![']']), - tt::DelimiterKind::Invisible => None, - }; - - if let Some(expected) = expected { - if kind == expected { - if let Some(entry) = stack.pop() { - conv.id_alloc().close_delim(entry.idx, Some(range)); - } - continue; - } - } - - let delim = match kind { - T!['('] => Some(tt::DelimiterKind::Parenthesis), - T!['{'] => Some(tt::DelimiterKind::Brace), - T!['['] => Some(tt::DelimiterKind::Bracket), - _ => None, - }; - - if let Some(kind) = delim { - let (_id, idx) = conv.id_alloc().open_delim(range, synth_id); - - stack.push(StackEntry { idx, open_range: range, delimiter: kind }); - continue; - } - - conv.id_alloc().alloc(range, synth_id); - } else { - macro_rules! make_leaf { - ($i:ident) => {{ - conv.id_alloc().alloc(range, synth_id); - }}; - } - match kind { - T![true] | T![false] => make_leaf!(Ident), - IDENT => make_leaf!(Ident), - UNDERSCORE => make_leaf!(Ident), - k if k.is_keyword() => make_leaf!(Ident), - k if k.is_literal() => make_leaf!(Literal), - LIFETIME_IDENT => { - let char_unit = TextSize::of('\''); - let r = TextRange::at(range.start(), char_unit); - conv.id_alloc().alloc(r, synth_id); - - let r = TextRange::at(range.start() + char_unit, range.len() - char_unit); - conv.id_alloc().alloc(r, synth_id); - continue; - } - _ => continue, - }; - }; - - // If we get here, we've consumed all input tokens. - // We might have more than one subtree in the stack, if the delimiters are improperly balanced. - // Merge them so we're left with one. - while let Some(entry) = stack.pop() { - conv.id_alloc().close_delim(entry.idx, None); - conv.id_alloc().alloc(entry.open_range, None); - } - } -} - fn is_single_token_op(kind: SyntaxKind) -> bool { matches!( kind, @@ -507,112 +374,54 @@ fn doc_comment_text(comment: &ast::Comment) -> SmolStr { text.into() } -fn convert_doc_comment( +fn convert_doc_comment( token: &syntax::SyntaxToken, - span: tt::TokenId, -) -> Option>> { + span: S, +) -> Option>> { cov_mark::hit!(test_meta_doc_comments); let comment = ast::Comment::cast(token.clone())?; let doc = comment.kind().doc?; + let mk_ident = + |s: &str| tt::TokenTree::from(tt::Leaf::from(tt::Ident { text: s.into(), span })); + + let mk_punct = |c: char| { + tt::TokenTree::from(tt::Leaf::from(tt::Punct { + char: c, + spacing: tt::Spacing::Alone, + span, + })) + }; + + let mk_doc_literal = |comment: &ast::Comment| { + let lit = tt::Literal { text: doc_comment_text(comment), span }; + + tt::TokenTree::from(tt::Leaf::from(lit)) + }; + // Make `doc="\" Comments\"" - let meta_tkns = - vec![mk_ident("doc", span), mk_punct('=', span), mk_doc_literal(&comment, span)]; + let meta_tkns = vec![mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)]; // Make `#![]` let mut token_trees = Vec::with_capacity(3); - token_trees.push(mk_punct('#', span)); + token_trees.push(mk_punct('#')); if let ast::CommentPlacement::Inner = doc { - token_trees.push(mk_punct('!', span)); + token_trees.push(mk_punct('!')); } token_trees.push(tt::TokenTree::from(tt::Subtree { delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket }, token_trees: meta_tkns, })); - return Some(token_trees); - - // Helper functions - fn mk_ident(s: &str, span: tt::TokenId) -> tt::TokenTree { - tt::TokenTree::from(tt::Leaf::from(tt::Ident { text: s.into(), span })) - } - - fn mk_punct(c: char, span: tt::TokenId) -> tt::TokenTree { - tt::TokenTree::from(tt::Leaf::from(tt::Punct { - char: c, - spacing: tt::Spacing::Alone, - span, - })) - } - - fn mk_doc_literal(comment: &ast::Comment, span: tt::TokenId) -> tt::TokenTree { - let lit = tt::Literal { text: doc_comment_text(comment), span }; - - tt::TokenTree::from(tt::Leaf::from(lit)) - } -} - -struct TokenIdAlloc { - map: TokenMap, - global_offset: TextSize, - next_id: u32, -} - -impl TokenIdAlloc { - fn alloc( - &mut self, - absolute_range: TextRange, - synthetic_id: Option, - ) -> tt::TokenId { - let relative_range = absolute_range - self.global_offset; - let token_id = tt::TokenId(self.next_id); - self.next_id += 1; - self.map.insert(token_id, relative_range); - if let Some(id) = synthetic_id { - self.map.insert_synthetic(token_id, id); - } - token_id - } - - fn open_delim( - &mut self, - open_abs_range: TextRange, - synthetic_id: Option, - ) -> (tt::TokenId, usize) { - let token_id = tt::TokenId(self.next_id); - self.next_id += 1; - let idx = self.map.insert_delim( - token_id, - open_abs_range - self.global_offset, - open_abs_range - self.global_offset, - ); - if let Some(id) = synthetic_id { - self.map.insert_synthetic(token_id, id); - } - (token_id, idx) - } - - fn close_delim(&mut self, idx: usize, close_abs_range: Option) { - match close_abs_range { - None => { - self.map.remove_delim(idx); - } - Some(close) => { - self.map.update_close_delim(idx, close - self.global_offset); - } - } - } - - fn peek_next_id(&self) -> tt::TokenId { - tt::TokenId(self.next_id) - } + Some(token_trees) } /// A raw token (straight from lexer) converter -struct RawConverter<'a> { +struct RawConverter<'a, SpanAnchor> { lexed: parser::LexedStr<'a>, pos: usize, - id_alloc: TokenIdAlloc, + _offset: TextSize, + file_id: SpanAnchor, } trait SrcToken: std::fmt::Debug { @@ -621,66 +430,64 @@ trait SrcToken: std::fmt::Debug { fn to_char(&self, ctx: &Ctx) -> Option; fn to_text(&self, ctx: &Ctx) -> SmolStr; - - fn synthetic_id(&self, ctx: &Ctx) -> Option; } -trait TokenConverter: Sized { +trait TokenConverter: Sized { type Token: SrcToken; fn convert_doc_comment( &self, token: &Self::Token, - span: tt::TokenId, - ) -> Option>>; + span: SpanData, + ) -> Option>>>; - fn bump(&mut self) -> Option<(Self::Token, TextRange)>; + fn bump(&mut self) -> Option<(Self::Token, TextRange, TextRange)>; fn peek(&self) -> Option; - fn id_alloc(&mut self) -> &mut TokenIdAlloc; + fn anchor(&self) -> SpanAnchor; + fn span_for(&self, range: TextRange) -> Option>; } -impl SrcToken> for usize { - fn kind(&self, ctx: &RawConverter<'_>) -> SyntaxKind { +impl SrcToken> for usize { + fn kind(&self, ctx: &RawConverter<'_, SpanAnchor>) -> SyntaxKind { ctx.lexed.kind(*self) } - fn to_char(&self, ctx: &RawConverter<'_>) -> Option { + fn to_char(&self, ctx: &RawConverter<'_, SpanAnchor>) -> Option { ctx.lexed.text(*self).chars().next() } - fn to_text(&self, ctx: &RawConverter<'_>) -> SmolStr { + fn to_text(&self, ctx: &RawConverter<'_, SpanAnchor>) -> SmolStr { ctx.lexed.text(*self).into() } - - fn synthetic_id(&self, _ctx: &RawConverter<'_>) -> Option { - None - } } -impl TokenConverter for RawConverter<'_> { +impl TokenConverter for RawConverter<'_, SpanAnchor> +where + SpanData: Span, +{ type Token = usize; fn convert_doc_comment( &self, &token: &usize, - span: tt::TokenId, - ) -> Option>> { + span: SpanData, + ) -> Option>>> { let text = self.lexed.text(token); convert_doc_comment(&doc_comment(text), span) } - fn bump(&mut self) -> Option<(Self::Token, TextRange)> { + fn bump(&mut self) -> Option<(Self::Token, TextRange, TextRange)> { if self.pos == self.lexed.len() { return None; } let token = self.pos; self.pos += 1; let range = self.lexed.text_range(token); - let range = TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap()); + let range = TextRange::new(range.start.try_into().ok()?, range.end.try_into().ok()?); - Some((token, range)) + Some((token, range, range)) } fn peek(&self) -> Option { @@ -690,77 +497,60 @@ impl TokenConverter for RawConverter<'_> { Some(self.pos) } - fn id_alloc(&mut self) -> &mut TokenIdAlloc { - &mut self.id_alloc + fn anchor(&self) -> SpanAnchor { + self.file_id + } + fn span_for(&self, _: TextRange) -> Option> { + None } } -struct Converter { - id_alloc: TokenIdAlloc, +struct Converter<'a, SpanAnchor> { current: Option, - current_synthetic: Vec, preorder: PreorderWithTokens, - replace: FxHashMap>, - append: FxHashMap>, range: TextRange, punct_offset: Option<(SyntaxToken, TextSize)>, + /// Used to make the emitted text ranges in the spans relative to the span anchor. + offset: TextSize, + file_id: SpanAnchor, + map: &'a TokenMap>, + censored: Vec, } -impl Converter { +impl<'a, SpanAnchor> Converter<'a, SpanAnchor> { fn new( node: &SyntaxNode, - global_offset: TextSize, - existing_token_map: TokenMap, - next_id: u32, - mut replace: FxHashMap>, - mut append: FxHashMap>, - ) -> Converter { + anchor_offset: TextSize, + file_id: SpanAnchor, + censored: Vec, + map: &'a TokenMap>, + ) -> Converter<'a, SpanAnchor> { let range = node.text_range(); let mut preorder = node.preorder_with_tokens(); - let (first, synthetic) = Self::next_token(&mut preorder, &mut replace, &mut append); + let first = Self::next_token(&mut preorder, &censored); Converter { - id_alloc: { TokenIdAlloc { map: existing_token_map, global_offset, next_id } }, current: first, - current_synthetic: synthetic, preorder, range, - replace, - append, punct_offset: None, + offset: anchor_offset, + file_id, + censored, + map, } } - fn next_token( - preorder: &mut PreorderWithTokens, - replace: &mut FxHashMap>, - append: &mut FxHashMap>, - ) -> (Option, Vec) { + fn next_token(preorder: &mut PreorderWithTokens, censor: &[SyntaxNode]) -> Option { while let Some(ev) = preorder.next() { - let ele = match ev { - WalkEvent::Enter(ele) => ele, - WalkEvent::Leave(ele) => { - if let Some(mut v) = append.remove(&ele) { - if !v.is_empty() { - v.reverse(); - return (None, v); - } - } - continue; + match ev { + WalkEvent::Enter(SyntaxElement::Token(t)) => return Some(t), + WalkEvent::Enter(SyntaxElement::Node(n)) if censor.contains(&n) => { + preorder.skip_subtree() } - }; - if let Some(mut v) = replace.remove(&ele) { - preorder.skip_subtree(); - if !v.is_empty() { - v.reverse(); - return (None, v); - } - } - match ele { - SyntaxElement::Token(t) => return (Some(t), Vec::new()), - _ => {} + _ => (), } } - (None, Vec::new()) + None } } @@ -768,100 +558,79 @@ impl Converter { enum SynToken { Ordinary(SyntaxToken), // FIXME is this supposed to be `Punct`? - Punch(SyntaxToken, TextSize), - Synthetic(SyntheticToken), + Punct(SyntaxToken, usize), } impl SynToken { - fn token(&self) -> Option<&SyntaxToken> { + fn token(&self) -> &SyntaxToken { match self { - SynToken::Ordinary(it) | SynToken::Punch(it, _) => Some(it), - SynToken::Synthetic(_) => None, + SynToken::Ordinary(it) | SynToken::Punct(it, _) => it, } } } -impl SrcToken for SynToken { - fn kind(&self, ctx: &Converter) -> SyntaxKind { +impl SrcToken> for SynToken { + fn kind(&self, ctx: &Converter<'_, SpanAnchor>) -> SyntaxKind { match self { SynToken::Ordinary(token) => token.kind(), - SynToken::Punch(..) => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(), - SynToken::Synthetic(token) => token.kind, + SynToken::Punct(..) => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(), } } - fn to_char(&self, _ctx: &Converter) -> Option { + fn to_char(&self, _ctx: &Converter<'_, SpanAnchor>) -> Option { match self { SynToken::Ordinary(_) => None, - SynToken::Punch(it, i) => it.text().chars().nth((*i).into()), - SynToken::Synthetic(token) if token.text.len() == 1 => token.text.chars().next(), - SynToken::Synthetic(_) => None, + SynToken::Punct(it, i) => it.text().chars().nth(*i), } } - fn to_text(&self, _ctx: &Converter) -> SmolStr { + fn to_text(&self, _ctx: &Converter<'_, SpanAnchor>) -> SmolStr { match self { - SynToken::Ordinary(token) => token.text().into(), - SynToken::Punch(token, _) => token.text().into(), - SynToken::Synthetic(token) => token.text.clone(), - } - } - - fn synthetic_id(&self, _ctx: &Converter) -> Option { - match self { - SynToken::Synthetic(token) => Some(token.id), - _ => None, + SynToken::Ordinary(token) | SynToken::Punct(token, _) => token.text().into(), } } } -impl TokenConverter for Converter { +impl TokenConverter for Converter<'_, SpanAnchor> +where + SpanData: Span, +{ type Token = SynToken; fn convert_doc_comment( &self, token: &Self::Token, - span: tt::TokenId, - ) -> Option>> { - convert_doc_comment(token.token()?, span) + span: SpanData, + ) -> Option>>> { + convert_doc_comment(token.token(), span) } - fn bump(&mut self) -> Option<(Self::Token, TextRange)> { + fn bump(&mut self) -> Option<(Self::Token, TextRange, TextRange)> { if let Some((punct, offset)) = self.punct_offset.clone() { if usize::from(offset) + 1 < punct.text().len() { let offset = offset + TextSize::of('.'); let range = punct.text_range(); self.punct_offset = Some((punct.clone(), offset)); let range = TextRange::at(range.start() + offset, TextSize::of('.')); - return Some((SynToken::Punch(punct, offset), range)); + return Some(( + SynToken::Punct(punct, u32::from(offset) as usize), + range - self.offset, + range, + )); } } - if let Some(synth_token) = self.current_synthetic.pop() { - if self.current_synthetic.is_empty() { - let (new_current, new_synth) = - Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append); - self.current = new_current; - self.current_synthetic = new_synth; - } - let range = synth_token.range; - return Some((SynToken::Synthetic(synth_token), range)); - } - let curr = self.current.clone()?; if !self.range.contains_range(curr.text_range()) { return None; } - let (new_current, new_synth) = - Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append); - self.current = new_current; - self.current_synthetic = new_synth; + self.current = Self::next_token(&mut self.preorder, &self.censored); let token = if curr.kind().is_punct() { self.punct_offset = Some((curr.clone(), 0.into())); let range = curr.text_range(); let range = TextRange::at(range.start(), TextSize::of('.')); - (SynToken::Punch(curr, 0.into()), range) + (SynToken::Punct(curr, 0 as usize), range - self.offset, range) } else { self.punct_offset = None; let range = curr.text_range(); - (SynToken::Ordinary(curr), range) + (SynToken::Ordinary(curr), range - self.offset, range) }; Some(token) @@ -871,54 +640,54 @@ impl TokenConverter for Converter { if let Some((punct, mut offset)) = self.punct_offset.clone() { offset += TextSize::of('.'); if usize::from(offset) < punct.text().len() { - return Some(SynToken::Punch(punct, offset)); + return Some(SynToken::Punct(punct, usize::from(offset))); } } - if let Some(synth_token) = self.current_synthetic.last() { - return Some(SynToken::Synthetic(synth_token.clone())); - } - let curr = self.current.clone()?; if !self.range.contains_range(curr.text_range()) { return None; } let token = if curr.kind().is_punct() { - SynToken::Punch(curr, 0.into()) + SynToken::Punct(curr, 0 as usize) } else { SynToken::Ordinary(curr) }; Some(token) } - fn id_alloc(&mut self) -> &mut TokenIdAlloc { - &mut self.id_alloc + fn anchor(&self) -> SpanAnchor { + self.file_id + } + fn span_for(&self, range: TextRange) -> Option> { + self.map.span_for_range(range) } } -struct TtTreeSink<'a> { +struct TtTreeSink<'a, SpanAnchor> { buf: String, - cursor: Cursor<'a, TokenId>, - open_delims: FxHashMap, + cursor: Cursor<'a, SpanData>, text_pos: TextSize, inner: SyntaxTreeBuilder, - token_map: TokenMap, + token_map: TokenMap>, } -impl<'a> TtTreeSink<'a> { - fn new(cursor: Cursor<'a, TokenId>) -> Self { +impl<'a, SpanAnchor> TtTreeSink<'a, SpanAnchor> +where + SpanData: Span, +{ + fn new(cursor: Cursor<'a, SpanData>) -> Self { TtTreeSink { buf: String::new(), cursor, - open_delims: FxHashMap::default(), text_pos: 0.into(), inner: SyntaxTreeBuilder::default(), token_map: TokenMap::default(), } } - fn finish(mut self) -> (Parse, TokenMap) { + fn finish(mut self) -> (Parse, TokenMap>) { self.token_map.shrink_to_fit(); (self.inner.finish(), self.token_map) } @@ -936,7 +705,10 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> { Some(&texts[idx..texts.len() - (1 - idx)]) } -impl TtTreeSink<'_> { +impl TtTreeSink<'_, SpanAnchor> +where + SpanData: Span, +{ /// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween. /// This occurs when a float literal is used as a field access. fn float_split(&mut self, has_pseudo_dot: bool) { @@ -991,7 +763,7 @@ impl TtTreeSink<'_> { break match self.cursor.token_tree() { Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => { // Mark the range if needed - let (text, id) = match leaf { + let (text, span) = match leaf { tt::Leaf::Ident(ident) => (ident.text.as_str(), ident.span), tt::Leaf::Punct(punct) => { assert!(punct.char.is_ascii()); @@ -1004,7 +776,7 @@ impl TtTreeSink<'_> { tt::Leaf::Literal(lit) => (lit.text.as_str(), lit.span), }; let range = TextRange::at(self.text_pos, TextSize::of(text)); - self.token_map.insert(id, range); + self.token_map.insert(range, span); self.cursor = self.cursor.bump(); text } @@ -1012,7 +784,8 @@ impl TtTreeSink<'_> { self.cursor = self.cursor.subtree().unwrap(); match delim_to_str(subtree.delimiter.kind, false) { Some(it) => { - self.open_delims.insert(subtree.delimiter.open, self.text_pos); + let range = TextRange::at(self.text_pos, TextSize::of(it)); + self.token_map.insert(range, subtree.delimiter.open); it } None => continue, @@ -1023,18 +796,8 @@ impl TtTreeSink<'_> { self.cursor = self.cursor.bump(); match delim_to_str(parent.delimiter.kind, true) { Some(it) => { - if let Some(open_delim) = - self.open_delims.get(&parent.delimiter.open) - { - let open_range = TextRange::at(*open_delim, TextSize::of('(')); - let close_range = - TextRange::at(self.text_pos, TextSize::of('(')); - self.token_map.insert_delim( - parent.delimiter.open, - open_range, - close_range, - ); - } + let range = TextRange::at(self.text_pos, TextSize::of(it)); + self.token_map.insert(range, parent.delimiter.close); it } None => continue, diff --git a/crates/mbe/src/syntax_bridge/tests.rs b/crates/mbe/src/syntax_bridge/tests.rs index fa0125f3e9..32dfb4d7e0 100644 --- a/crates/mbe/src/syntax_bridge/tests.rs +++ b/crates/mbe/src/syntax_bridge/tests.rs @@ -4,24 +4,32 @@ use syntax::{ast, AstNode}; use test_utils::extract_annotations; use tt::{ buffer::{TokenBuffer, TokenTreeRef}, - Leaf, Punct, Spacing, + Leaf, Punct, Spacing, Span, }; +use crate::syntax_bridge::SpanData; + use super::syntax_node_to_token_tree; fn check_punct_spacing(fixture: &str) { + #[derive(PartialEq, Eq, Clone, Copy, Debug)] + struct DummyFile; + impl Span for DummyFile { + const DUMMY: Self = DummyFile; + } + let source_file = ast::SourceFile::parse(fixture).ok().unwrap(); - let (subtree, token_map) = syntax_node_to_token_tree(source_file.syntax()); + let subtree = + syntax_node_to_token_tree(source_file.syntax(), DummyFile, 0.into(), &Default::default()); let mut annotations: HashMap<_, _> = extract_annotations(fixture) .into_iter() .map(|(range, annotation)| { - let token = token_map.token_by_range(range).expect("no token found"); let spacing = match annotation.as_str() { "Alone" => Spacing::Alone, "Joint" => Spacing::Joint, a => panic!("unknown annotation: {a}"), }; - (token, spacing) + (range, spacing) }) .collect(); @@ -29,8 +37,12 @@ fn check_punct_spacing(fixture: &str) { let mut cursor = buf.begin(); while !cursor.eof() { while let Some(token_tree) = cursor.token_tree() { - if let TokenTreeRef::Leaf(Leaf::Punct(Punct { spacing, span, .. }), _) = token_tree { - if let Some(expected) = annotations.remove(span) { + if let TokenTreeRef::Leaf( + Leaf::Punct(Punct { spacing, span: SpanData { range, .. }, .. }), + _, + ) = token_tree + { + if let Some(expected) = annotations.remove(range) { assert_eq!(expected, *spacing); } } diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs index 73a27df5db..1af50c8b3b 100644 --- a/crates/mbe/src/token_map.rs +++ b/crates/mbe/src/token_map.rs @@ -2,123 +2,121 @@ use std::hash::Hash; -use parser::{SyntaxKind, T}; -use syntax::{TextRange, TextSize}; +use syntax::TextRange; +use tt::Span; -use crate::syntax_bridge::SyntheticTokenId; +// pub type HirFile = u32; +// pub type FileRange = (HirFile, TextRange); +// Option, LocalSyntaxContet +// pub type SyntaxContext = (); +// pub type LocalSyntaxContext = u32; -#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] -enum TokenTextRange { - Token(TextRange), - Delimiter(TextRange), +/// Maps absolute text ranges for the corresponding file to the relevant span data. +#[derive(Debug, PartialEq, Eq, Clone, Hash)] +// FIXME: Rename to SpanMap +pub struct TokenMap { + // FIXME: This needs to be sorted by (FileId, AstId) + // Then we can do a binary search on the file id, + // then a bin search on the ast id + pub span_map: Vec<(TextRange, S)>, + // span_map2: rustc_hash::FxHashMap, } -impl TokenTextRange { - fn by_kind(self, kind: SyntaxKind) -> Option { - match self { - TokenTextRange::Token(it) => Some(it), - TokenTextRange::Delimiter(it) => match kind { - T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())), - T!['}'] | T![')'] | T![']'] => { - Some(TextRange::at(it.end() - TextSize::of('}'), 1.into())) - } - _ => None, - }, - } +impl Default for TokenMap { + fn default() -> Self { + Self { span_map: Vec::new() } } } -/// Maps `tt::TokenId` to the relative range of the original token. -#[derive(Debug, PartialEq, Eq, Clone, Default, Hash)] -pub struct TokenMap { - /// Maps `tt::TokenId` to the *relative* source range. - entries: Vec<(tt::TokenId, TokenTextRange)>, - pub synthetic_entries: Vec<(tt::TokenId, SyntheticTokenId)>, -} - -impl TokenMap { - pub fn token_by_range(&self, relative_range: TextRange) -> Option { - let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { - TokenTextRange::Token(it) => *it == relative_range, - TokenTextRange::Delimiter(it) => { - let open = TextRange::at(it.start(), 1.into()); - let close = TextRange::at(it.end() - TextSize::of('}'), 1.into()); - open == relative_range || close == relative_range - } - })?; - Some(token_id) - } - - pub fn ranges_by_token( - &self, - token_id: tt::TokenId, - kind: SyntaxKind, - ) -> impl Iterator + '_ { - self.entries - .iter() - .filter(move |&&(tid, _)| tid == token_id) - .filter_map(move |(_, range)| range.by_kind(kind)) - } - - pub fn synthetic_token_id(&self, token_id: tt::TokenId) -> Option { - self.synthetic_entries.iter().find(|(tid, _)| *tid == token_id).map(|(_, id)| *id) - } - - pub fn first_range_by_token( - &self, - token_id: tt::TokenId, - kind: SyntaxKind, - ) -> Option { - self.ranges_by_token(token_id, kind).next() - } - +impl TokenMap { pub(crate) fn shrink_to_fit(&mut self) { - self.entries.shrink_to_fit(); - self.synthetic_entries.shrink_to_fit(); + self.span_map.shrink_to_fit(); } - pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { - self.entries.push((token_id, TokenTextRange::Token(relative_range))); + pub(crate) fn insert(&mut self, range: TextRange, span: S) { + self.span_map.push((range, span)); } - pub(crate) fn insert_synthetic(&mut self, token_id: tt::TokenId, id: SyntheticTokenId) { - self.synthetic_entries.push((token_id, id)); + pub fn ranges_with_span(&self, span: S) -> impl Iterator + '_ { + self.span_map.iter().filter_map( + move |(range, s)| { + if s == &span { + Some(*range) + } else { + None + } + }, + ) } - pub(crate) fn insert_delim( - &mut self, - token_id: tt::TokenId, - open_relative_range: TextRange, - close_relative_range: TextRange, - ) -> usize { - let res = self.entries.len(); - let cover = open_relative_range.cover(close_relative_range); - - self.entries.push((token_id, TokenTextRange::Delimiter(cover))); - res + pub fn span_for_range(&self, range: TextRange) -> Option { + self.span_map.iter().find_map(|(r, s)| if r == &range { Some(s.clone()) } else { None }) } - pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { - let (_, token_text_range) = &mut self.entries[idx]; - if let TokenTextRange::Delimiter(dim) = token_text_range { - let cover = dim.cover(close_relative_range); - *token_text_range = TokenTextRange::Delimiter(cover); - } - } + // pub fn ranges_by_token( + // &self, + // token_id: tt::TokenId, + // kind: SyntaxKind, + // ) -> impl Iterator + '_ { + // self.entries + // .iter() + // .filter(move |&&(tid, _)| tid == token_id) + // .filter_map(move |(_, range)| range.by_kind(kind)) + // } - pub(crate) fn remove_delim(&mut self, idx: usize) { - // FIXME: This could be accidentally quadratic - self.entries.remove(idx); - } + // pub(crate) fn remove_delim(&mut self, idx: usize) { + // // FIXME: This could be accidentally quadratic + // self.entries.remove(idx); + // } - pub fn entries(&self) -> impl Iterator + '_ { - self.entries.iter().filter_map(|&(tid, tr)| match tr { - TokenTextRange::Token(range) => Some((tid, range)), - TokenTextRange::Delimiter(_) => None, - }) - } + // pub fn entries(&self) -> impl Iterator + '_ { + // self.entries.iter().filter_map(|&(tid, tr)| match tr { + // TokenTextRange::Token(range) => Some((tid, range)), + // TokenTextRange::Delimiter(_) => None, + // }) + // } - pub fn filter(&mut self, id: impl Fn(tt::TokenId) -> bool) { - self.entries.retain(|&(tid, _)| id(tid)); - } + // pub fn filter(&mut self, id: impl Fn(tt::TokenId) -> bool) { + // self.entries.retain(|&(tid, _)| id(tid)); + // } + // pub fn synthetic_token_id(&self, token_id: tt::TokenId) -> Option { + // self.synthetic_entries.iter().find(|(tid, _)| *tid == token_id).map(|(_, id)| *id) + // } + + // pub fn first_range_by_token( + // &self, + // token_id: tt::TokenId, + // kind: SyntaxKind, + // ) -> Option { + // self.ranges_by_token(token_id, kind).next() + // } + + // pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { + // self.entries.push((token_id, TokenTextRange::Token(relative_range))); + // } + + // pub(crate) fn insert_synthetic(&mut self, token_id: tt::TokenId, id: SyntheticTokenId) { + // self.synthetic_entries.push((token_id, id)); + // } + + // pub(crate) fn insert_delim( + // &mut self, + // token_id: tt::TokenId, + // open_relative_range: TextRange, + // close_relative_range: TextRange, + // ) -> usize { + // let res = self.entries.len(); + // let cover = open_relative_range.cover(close_relative_range); + + // self.entries.push((token_id, TokenTextRange::Delimiter(cover))); + // res + // } + + // pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { + // let (_, token_text_range) = &mut self.entries[idx]; + // if let TokenTextRange::Delimiter(dim) = token_text_range { + // let cover = dim.cover(close_relative_range); + // *token_text_range = TokenTextRange::Delimiter(cover); + // } + // } } diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml index 2c2d2e8a94..4c87c89add 100644 --- a/crates/proc-macro-api/Cargo.toml +++ b/crates/proc-macro-api/Cargo.toml @@ -31,5 +31,6 @@ paths.workspace = true tt.workspace = true stdx.workspace = true profile.workspace = true +text-size.workspace = true # Intentionally *not* depend on anything salsa-related # base-db.workspace = true diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs index fe82b8d045..bfb3213a25 100644 --- a/crates/proc-macro-api/src/msg/flat.rs +++ b/crates/proc-macro-api/src/msg/flat.rs @@ -38,6 +38,7 @@ use std::collections::{HashMap, VecDeque}; use serde::{Deserialize, Serialize}; +use text_size::TextRange; use tt::Span; use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, VARIABLE_SIZED_SPANS}; @@ -55,6 +56,19 @@ impl SerializableSpan<1> for tt::TokenId { } } +impl SerializableSpan<3> for tt::SpanData +where + FileId: From + Into, + Self: Span, +{ + fn into_u32(self) -> [u32; 3] { + [self.anchor.into(), self.range.start().into(), self.range.end().into()] + } + fn from_u32([file_id, start, end]: [u32; 3]) -> Self { + tt::SpanData { anchor: file_id.into(), range: TextRange::new(start.into(), end.into()) } + } +} + #[derive(Serialize, Deserialize, Debug)] pub struct FlatTree { subtree: Vec, diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/cargo_target_spec.rs index c7b84c41b3..b6dcc26de6 100644 --- a/crates/rust-analyzer/src/cargo_target_spec.rs +++ b/crates/rust-analyzer/src/cargo_target_spec.rs @@ -209,17 +209,24 @@ mod tests { use super::*; use cfg::CfgExpr; + use hir::HirFileId; + use ide_db::base_db::span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID}; use mbe::syntax_node_to_token_tree; use syntax::{ ast::{self, AstNode}, - SmolStr, + SmolStr, TextSize, }; fn check(cfg: &str, expected_features: &[&str]) { let cfg_expr = { let source_file = ast::SourceFile::parse(cfg).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let (tt, _) = syntax_node_to_token_tree(tt.syntax()); + let tt = syntax_node_to_token_tree( + tt.syntax(), + SpanAnchor { file_id: HirFileId::from(0), ast_id: ROOT_ERASED_FILE_AST_ID }, + TextSize::new(0), + &Default::default(), + ); CfgExpr::parse(&tt) }; diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 0f6539f224..7e795cf463 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -8,7 +8,7 @@ use std::{ use hir::{ db::{DefDatabase, ExpandDatabase, HirDatabase}, - Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, ModuleDef, Name, + Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, HirFileIdExt, ModuleDef, Name, }; use hir_def::{ body::{BodySourceMap, SyntheticSyntax}, diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs index 8541be715a..abec267946 100644 --- a/crates/rust-analyzer/src/cli/diagnostics.rs +++ b/crates/rust-analyzer/src/cli/diagnostics.rs @@ -4,7 +4,7 @@ use project_model::{CargoConfig, RustLibSource}; use rustc_hash::FxHashSet; -use hir::{db::HirDatabase, Crate, Module}; +use hir::{db::HirDatabase, Crate, HirFileIdExt, Module}; use ide::{AssistResolveStrategy, DiagnosticsConfig, Severity}; use ide_db::base_db::SourceDatabaseExt; use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index 4939ab3904..1d7b7de390 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -75,7 +75,7 @@ pub use smol_str::SmolStr; #[derive(Debug, PartialEq, Eq)] pub struct Parse { green: GreenNode, - errors: Arc<[SyntaxError]>, + errors: Option>, _ty: PhantomData T>, } @@ -87,14 +87,18 @@ impl Clone for Parse { impl Parse { fn new(green: GreenNode, errors: Vec) -> Parse { - Parse { green, errors: errors.into(), _ty: PhantomData } + Parse { + green, + errors: if errors.is_empty() { None } else { Some(errors.into()) }, + _ty: PhantomData, + } } pub fn syntax_node(&self) -> SyntaxNode { SyntaxNode::new_root(self.green.clone()) } pub fn errors(&self) -> &[SyntaxError] { - &self.errors + self.errors.as_deref().unwrap_or_default() } } @@ -108,10 +112,9 @@ impl Parse { } pub fn ok(self) -> Result> { - if self.errors.is_empty() { - Ok(self.tree()) - } else { - Err(self.errors) + match self.errors { + Some(e) => Err(e), + None => Ok(self.tree()), } } } @@ -129,7 +132,7 @@ impl Parse { impl Parse { pub fn debug_dump(&self) -> String { let mut buf = format!("{:#?}", self.tree().syntax()); - for err in self.errors.iter() { + for err in self.errors.as_deref().into_iter().flat_map(<[_]>::iter) { format_to!(buf, "error {:?}: {}\n", err.range(), err); } buf @@ -141,13 +144,16 @@ impl Parse { fn incremental_reparse(&self, indel: &Indel) -> Option> { // FIXME: validation errors are not handled here - parsing::incremental_reparse(self.tree().syntax(), indel, self.errors.to_vec()).map( - |(green_node, errors, _reparsed_range)| Parse { - green: green_node, - errors: errors.into(), - _ty: PhantomData, - }, + parsing::incremental_reparse( + self.tree().syntax(), + indel, + self.errors.as_deref().unwrap_or_default().iter().cloned(), ) + .map(|(green_node, errors, _reparsed_range)| Parse { + green: green_node, + errors: if errors.is_empty() { None } else { Some(errors.into()) }, + _ty: PhantomData, + }) } fn full_reparse(&self, indel: &Indel) -> Parse { @@ -168,7 +174,11 @@ impl SourceFile { errors.extend(validation::validate(&root)); assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE); - Parse { green, errors: errors.into(), _ty: PhantomData } + Parse { + green, + errors: if errors.is_empty() { None } else { Some(errors.into()) }, + _ty: PhantomData, + } } } @@ -275,7 +285,11 @@ impl ast::TokenTree { let (green, errors) = builder.finish_raw(); - Parse { green, errors: errors.into(), _ty: PhantomData } + Parse { + green, + errors: if errors.is_empty() { None } else { Some(errors.into()) }, + _ty: PhantomData, + } } } diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs index 45e5916098..0ddc641711 100644 --- a/crates/syntax/src/parsing/reparsing.rs +++ b/crates/syntax/src/parsing/reparsing.rs @@ -20,7 +20,7 @@ use crate::{ pub(crate) fn incremental_reparse( node: &SyntaxNode, edit: &Indel, - errors: Vec, + errors: impl IntoIterator, ) -> Option<(GreenNode, Vec, TextRange)> { if let Some((green, new_errors, old_range)) = reparse_token(node, edit) { return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); @@ -147,7 +147,7 @@ fn is_balanced(lexed: &parser::LexedStr<'_>) -> bool { } fn merge_errors( - old_errors: Vec, + old_errors: impl IntoIterator, new_errors: Vec, range_before_reparse: TextRange, edit: &Indel, @@ -191,8 +191,12 @@ mod tests { let fully_reparsed = SourceFile::parse(&after); let incrementally_reparsed: Parse = { let before = SourceFile::parse(&before); - let (green, new_errors, range) = - incremental_reparse(before.tree().syntax(), &edit, before.errors.to_vec()).unwrap(); + let (green, new_errors, range) = incremental_reparse( + before.tree().syntax(), + &edit, + before.errors.as_deref().unwrap_or_default().iter().cloned(), + ) + .unwrap(); assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length"); Parse::new(green, new_errors) }; diff --git a/crates/syntax/src/tests.rs b/crates/syntax/src/tests.rs index 3010d77d82..8ae1242cf7 100644 --- a/crates/syntax/src/tests.rs +++ b/crates/syntax/src/tests.rs @@ -38,7 +38,7 @@ fn benchmark_parser() { let tree = { let _b = bench("parsing"); let p = SourceFile::parse(&data); - assert!(p.errors.is_empty()); + assert!(p.errors.is_none()); assert_eq!(p.tree().syntax.text_range().len(), 352474.into()); p.tree() }; diff --git a/crates/tt/Cargo.toml b/crates/tt/Cargo.toml index a28ee5f1ca..5722244979 100644 --- a/crates/tt/Cargo.toml +++ b/crates/tt/Cargo.toml @@ -13,5 +13,6 @@ doctest = false [dependencies] smol_str.workspace = true +text-size.workspace = true stdx.workspace = true diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index a4ffc328f2..89cb12d2c2 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs @@ -7,6 +7,7 @@ use std::fmt; use stdx::impl_from; +use text_size::{TextRange, TextSize}; pub use smol_str::SmolStr; @@ -31,36 +32,25 @@ impl TokenId { Self::UNSPECIFIED } } - -pub mod token_id { - pub use crate::{DelimiterKind, Spacing, TokenId}; - pub type Span = crate::TokenId; - pub type Subtree = crate::Subtree; - pub type Punct = crate::Punct; - pub type Delimiter = crate::Delimiter; - pub type Leaf = crate::Leaf; - pub type Ident = crate::Ident; - pub type Literal = crate::Literal; - pub type TokenTree = crate::TokenTree; - pub mod buffer { - pub type TokenBuffer<'a> = crate::buffer::TokenBuffer<'a, super::Span>; - pub type Cursor<'a> = crate::buffer::Cursor<'a, super::Span>; - pub type TokenTreeRef<'a> = crate::buffer::TokenTreeRef<'a, super::Span>; - } -} - -pub trait Span: std::fmt::Debug + Copy + Sized { - const DUMMY: Self; - fn is_dummy(&self) -> bool; -} impl Span for TokenId { const DUMMY: Self = TokenId(!0); - - fn is_dummy(&self) -> bool { - *self == Self::DUMMY - } } +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +pub struct SpanData { + /// The text range of this span, relative to the anchor. + pub range: TextRange, + pub anchor: Anchor, +} + +impl Span for SpanData { + const DUMMY: Self = + SpanData { range: TextRange::empty(TextSize::new(0)), anchor: Anchor::DUMMY }; +} + +pub trait Span: std::fmt::Debug + Copy + Sized + Eq { + const DUMMY: Self; +} #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct SyntaxContext(pub u32); @@ -134,7 +124,6 @@ impl Delimiter { } } - #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub enum DelimiterKind { Parenthesis,