Merge commit '426d2842c1f0e5cc5e34bb37c7ac3ee0945f9746' into sync-from-ra2

This commit is contained in:
Laurențiu Nicola 2024-01-03 11:35:07 +02:00
parent e37cf75791
commit 932d85b529
240 changed files with 6941 additions and 3102 deletions

View file

@ -2,7 +2,7 @@
xtask = "run --package xtask --bin xtask --" xtask = "run --package xtask --bin xtask --"
tq = "test -- -q" tq = "test -- -q"
qt = "tq" qt = "tq"
lint = "clippy --all-targets -- -Aclippy::collapsible_if -Aclippy::needless_pass_by_value -Aclippy::nonminimal_bool -Aclippy::redundant_pattern_matching --cap-lints warn" lint = "clippy --all-targets -- --cap-lints warn"
[target.x86_64-pc-windows-msvc] [target.x86_64-pc-windows-msvc]
linker = "rust-lld" linker = "rust-lld"

View file

@ -38,7 +38,6 @@ jobs:
- 'crates/proc-macro-api/**' - 'crates/proc-macro-api/**'
- 'crates/proc-macro-srv/**' - 'crates/proc-macro-srv/**'
- 'crates/proc-macro-srv-cli/**' - 'crates/proc-macro-srv-cli/**'
- 'crates/proc-macro-test/**'
rust: rust:
needs: changes needs: changes

87
Cargo.lock generated
View file

@ -74,11 +74,11 @@ dependencies = [
"profile", "profile",
"rust-analyzer-salsa", "rust-analyzer-salsa",
"rustc-hash", "rustc-hash",
"semver",
"span",
"stdx", "stdx",
"syntax", "syntax",
"test-utils",
"triomphe", "triomphe",
"tt",
"vfs", "vfs",
] ]
@ -516,8 +516,10 @@ dependencies = [
"rustc-dependencies", "rustc-dependencies",
"rustc-hash", "rustc-hash",
"smallvec", "smallvec",
"span",
"stdx", "stdx",
"syntax", "syntax",
"test-fixture",
"test-utils", "test-utils",
"tracing", "tracing",
"triomphe", "triomphe",
@ -542,6 +544,7 @@ dependencies = [
"profile", "profile",
"rustc-hash", "rustc-hash",
"smallvec", "smallvec",
"span",
"stdx", "stdx",
"syntax", "syntax",
"tracing", "tracing",
@ -581,6 +584,7 @@ dependencies = [
"smallvec", "smallvec",
"stdx", "stdx",
"syntax", "syntax",
"test-fixture",
"test-utils", "test-utils",
"tracing", "tracing",
"tracing-subscriber", "tracing-subscriber",
@ -624,6 +628,7 @@ dependencies = [
"smallvec", "smallvec",
"stdx", "stdx",
"syntax", "syntax",
"test-fixture",
"test-utils", "test-utils",
"text-edit", "text-edit",
"toolchain", "toolchain",
@ -647,6 +652,7 @@ dependencies = [
"sourcegen", "sourcegen",
"stdx", "stdx",
"syntax", "syntax",
"test-fixture",
"test-utils", "test-utils",
"text-edit", "text-edit",
] ]
@ -666,6 +672,7 @@ dependencies = [
"smallvec", "smallvec",
"stdx", "stdx",
"syntax", "syntax",
"test-fixture",
"test-utils", "test-utils",
"text-edit", "text-edit",
] ]
@ -694,8 +701,10 @@ dependencies = [
"rayon", "rayon",
"rustc-hash", "rustc-hash",
"sourcegen", "sourcegen",
"span",
"stdx", "stdx",
"syntax", "syntax",
"test-fixture",
"test-utils", "test-utils",
"text-edit", "text-edit",
"tracing", "tracing",
@ -720,6 +729,7 @@ dependencies = [
"sourcegen", "sourcegen",
"stdx", "stdx",
"syntax", "syntax",
"test-fixture",
"test-utils", "test-utils",
"text-edit", "text-edit",
] ]
@ -737,6 +747,7 @@ dependencies = [
"parser", "parser",
"stdx", "stdx",
"syntax", "syntax",
"test-fixture",
"test-utils", "test-utils",
"text-edit", "text-edit",
"triomphe", "triomphe",
@ -903,11 +914,13 @@ version = "0.0.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"crossbeam-channel", "crossbeam-channel",
"hir-expand",
"ide", "ide",
"ide-db", "ide-db",
"itertools", "itertools",
"proc-macro-api", "proc-macro-api",
"project-model", "project-model",
"span",
"tracing", "tracing",
"tt", "tt",
"vfs", "vfs",
@ -932,19 +945,7 @@ checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
[[package]] [[package]]
name = "lsp-server" name = "lsp-server"
version = "0.7.4" version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b52dccdf3302eefab8c8a1273047f0a3c3dca4b527c8458d00c09484c8371928"
dependencies = [
"crossbeam-channel",
"log",
"serde",
"serde_json",
]
[[package]]
name = "lsp-server"
version = "0.7.5"
dependencies = [ dependencies = [
"crossbeam-channel", "crossbeam-channel",
"ctrlc", "ctrlc",
@ -955,10 +956,22 @@ dependencies = [
] ]
[[package]] [[package]]
name = "lsp-types" name = "lsp-server"
version = "0.94.0" version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b63735a13a1f9cd4f4835223d828ed9c2e35c8c5e61837774399f558b6a1237" checksum = "248f65b78f6db5d8e1b1604b4098a28b43d21a8eb1deeca22b1c421b276c7095"
dependencies = [
"crossbeam-channel",
"log",
"serde",
"serde_json",
]
[[package]]
name = "lsp-types"
version = "0.95.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "158c1911354ef73e8fe42da6b10c0484cb65c7f1007f28022e847706c1ab6984"
dependencies = [ dependencies = [
"bitflags 1.3.2", "bitflags 1.3.2",
"serde", "serde",
@ -975,6 +988,7 @@ dependencies = [
"parser", "parser",
"rustc-hash", "rustc-hash",
"smallvec", "smallvec",
"span",
"stdx", "stdx",
"syntax", "syntax",
"test-utils", "test-utils",
@ -1251,6 +1265,7 @@ dependencies = [
"serde", "serde",
"serde_json", "serde_json",
"snap", "snap",
"span",
"stdx", "stdx",
"text-size", "text-size",
"tracing", "tracing",
@ -1262,6 +1277,7 @@ dependencies = [
name = "proc-macro-srv" name = "proc-macro-srv"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"base-db",
"expect-test", "expect-test",
"libloading", "libloading",
"mbe", "mbe",
@ -1270,6 +1286,7 @@ dependencies = [
"paths", "paths",
"proc-macro-api", "proc-macro-api",
"proc-macro-test", "proc-macro-test",
"span",
"stdx", "stdx",
"tt", "tt",
] ]
@ -1287,14 +1304,9 @@ name = "proc-macro-test"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"cargo_metadata", "cargo_metadata",
"proc-macro-test-impl",
"toolchain", "toolchain",
] ]
[[package]]
name = "proc-macro-test-impl"
version = "0.0.0"
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.69" version = "1.0.69"
@ -1514,7 +1526,7 @@ dependencies = [
"ide-ssr", "ide-ssr",
"itertools", "itertools",
"load-cargo", "load-cargo",
"lsp-server 0.7.4", "lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
"lsp-types", "lsp-types",
"mbe", "mbe",
"mimalloc", "mimalloc",
@ -1535,6 +1547,7 @@ dependencies = [
"sourcegen", "sourcegen",
"stdx", "stdx",
"syntax", "syntax",
"test-fixture",
"test-utils", "test-utils",
"tikv-jemallocator", "tikv-jemallocator",
"toolchain", "toolchain",
@ -1726,6 +1739,17 @@ dependencies = [
"xshell", "xshell",
] ]
[[package]]
name = "span"
version = "0.0.0"
dependencies = [
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rust-analyzer-salsa",
"stdx",
"syntax",
"vfs",
]
[[package]] [[package]]
name = "static_assertions" name = "static_assertions"
version = "1.1.0" version = "1.1.0"
@ -1796,6 +1820,20 @@ dependencies = [
"ungrammar", "ungrammar",
] ]
[[package]]
name = "test-fixture"
version = "0.0.0"
dependencies = [
"base-db",
"cfg",
"hir-expand",
"rustc-hash",
"span",
"stdx",
"test-utils",
"tt",
]
[[package]] [[package]]
name = "test-utils" name = "test-utils"
version = "0.0.0" version = "0.0.0"
@ -1998,6 +2036,7 @@ name = "tt"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"smol_str", "smol_str",
"span",
"stdx", "stdx",
"text-size", "text-size",
] ]

View file

@ -1,10 +1,10 @@
[workspace] [workspace]
members = ["xtask/", "lib/*", "crates/*"] members = ["xtask/", "lib/*", "crates/*"]
exclude = ["crates/proc-macro-test/imp"] exclude = ["crates/proc-macro-srv/proc-macro-test/"]
resolver = "2" resolver = "2"
[workspace.package] [workspace.package]
rust-version = "1.70" rust-version = "1.74"
edition = "2021" edition = "2021"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer team"] authors = ["rust-analyzer team"]
@ -70,10 +70,9 @@ proc-macro-srv = { path = "./crates/proc-macro-srv", version = "0.0.0" }
proc-macro-srv-cli = { path = "./crates/proc-macro-srv-cli", version = "0.0.0" } proc-macro-srv-cli = { path = "./crates/proc-macro-srv-cli", version = "0.0.0" }
profile = { path = "./crates/profile", version = "0.0.0" } profile = { path = "./crates/profile", version = "0.0.0" }
project-model = { path = "./crates/project-model", version = "0.0.0" } project-model = { path = "./crates/project-model", version = "0.0.0" }
sourcegen = { path = "./crates/sourcegen", version = "0.0.0" } span = { path = "./crates/span", version = "0.0.0" }
stdx = { path = "./crates/stdx", version = "0.0.0" } stdx = { path = "./crates/stdx", version = "0.0.0" }
syntax = { path = "./crates/syntax", version = "0.0.0" } syntax = { path = "./crates/syntax", version = "0.0.0" }
test-utils = { path = "./crates/test-utils", version = "0.0.0" }
text-edit = { path = "./crates/text-edit", version = "0.0.0" } text-edit = { path = "./crates/text-edit", version = "0.0.0" }
toolchain = { path = "./crates/toolchain", version = "0.0.0" } toolchain = { path = "./crates/toolchain", version = "0.0.0" }
tt = { path = "./crates/tt", version = "0.0.0" } tt = { path = "./crates/tt", version = "0.0.0" }
@ -82,19 +81,25 @@ vfs = { path = "./crates/vfs", version = "0.0.0" }
rustc-dependencies = { path = "./crates/rustc-dependencies", version = "0.0.0" } rustc-dependencies = { path = "./crates/rustc-dependencies", version = "0.0.0" }
# local crates that aren't published to crates.io. These should not have versions. # local crates that aren't published to crates.io. These should not have versions.
proc-macro-test = { path = "./crates/proc-macro-test" } sourcegen = { path = "./crates/sourcegen" }
test-fixture = { path = "./crates/test-fixture" }
test-utils = { path = "./crates/test-utils" }
# In-tree crates that are published separately and follow semver. See lib/README.md # In-tree crates that are published separately and follow semver. See lib/README.md
line-index = { version = "0.1.1" } line-index = { version = "0.1.1" }
la-arena = { version = "0.3.1" } la-arena = { version = "0.3.1" }
lsp-server = { version = "0.7.4" } lsp-server = { version = "0.7.6" }
# non-local crates # non-local crates
anyhow = "1.0.75" anyhow = "1.0.75"
arrayvec = "0.7.4"
bitflags = "2.4.1" bitflags = "2.4.1"
cargo_metadata = "0.18.1" cargo_metadata = "0.18.1"
command-group = "2.0.1"
crossbeam-channel = "0.5.8"
dissimilar = "1.0.7" dissimilar = "1.0.7"
either = "1.9.0" either = "1.9.0"
expect-test = "1.4.0"
hashbrown = { version = "0.14", features = [ hashbrown = { version = "0.14", features = [
"inline-more", "inline-more",
], default-features = false } ], default-features = false }
@ -105,6 +110,7 @@ nohash-hasher = "0.2.0"
rayon = "1.8.0" rayon = "1.8.0"
rust-analyzer-salsa = "0.17.0-pre.4" rust-analyzer-salsa = "0.17.0-pre.4"
rustc-hash = "1.1.0" rustc-hash = "1.1.0"
semver = "1.0.14"
serde = { version = "1.0.192", features = ["derive"] } serde = { version = "1.0.192", features = ["derive"] }
serde_json = "1.0.108" serde_json = "1.0.108"
smallvec = { version = "1.10.0", features = [ smallvec = { version = "1.10.0", features = [
@ -124,5 +130,12 @@ tracing-subscriber = { version = "0.3.18", default-features = false, features =
triomphe = { version = "0.1.10", default-features = false, features = ["std"] } triomphe = { version = "0.1.10", default-features = false, features = ["std"] }
xshell = "0.2.5" xshell = "0.2.5"
# We need to freeze the version of the crate, as the raw-api feature is considered unstable # We need to freeze the version of the crate, as the raw-api feature is considered unstable
dashmap = { version = "=5.5.3", features = ["raw-api"] } dashmap = { version = "=5.5.3", features = ["raw-api"] }
[workspace.lints.clippy]
collapsible_if = "allow"
needless_pass_by_value = "allow"
nonminimal_bool = "allow"
redundant_pattern_matching = "allow"

View file

@ -16,12 +16,15 @@ la-arena.workspace = true
rust-analyzer-salsa.workspace = true rust-analyzer-salsa.workspace = true
rustc-hash.workspace = true rustc-hash.workspace = true
triomphe.workspace = true triomphe.workspace = true
semver.workspace = true
# local deps # local deps
cfg.workspace = true cfg.workspace = true
profile.workspace = true profile.workspace = true
stdx.workspace = true stdx.workspace = true
syntax.workspace = true syntax.workspace = true
test-utils.workspace = true
tt.workspace = true
vfs.workspace = true vfs.workspace = true
span.workspace = true
[lints]
workspace = true

View file

@ -7,18 +7,17 @@ use salsa::Durability;
use triomphe::Arc; use triomphe::Arc;
use vfs::FileId; use vfs::FileId;
use crate::{CrateGraph, ProcMacros, SourceDatabaseExt, SourceRoot, SourceRootId}; use crate::{CrateGraph, SourceDatabaseExt, SourceRoot, SourceRootId};
/// Encapsulate a bunch of raw `.set` calls on the database. /// Encapsulate a bunch of raw `.set` calls on the database.
#[derive(Default)] #[derive(Default)]
pub struct Change { pub struct FileChange {
pub roots: Option<Vec<SourceRoot>>, pub roots: Option<Vec<SourceRoot>>,
pub files_changed: Vec<(FileId, Option<Arc<str>>)>, pub files_changed: Vec<(FileId, Option<Arc<str>>)>,
pub crate_graph: Option<CrateGraph>, pub crate_graph: Option<CrateGraph>,
pub proc_macros: Option<ProcMacros>,
} }
impl fmt::Debug for Change { impl fmt::Debug for FileChange {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut d = fmt.debug_struct("Change"); let mut d = fmt.debug_struct("Change");
if let Some(roots) = &self.roots { if let Some(roots) = &self.roots {
@ -34,9 +33,9 @@ impl fmt::Debug for Change {
} }
} }
impl Change { impl FileChange {
pub fn new() -> Self { pub fn new() -> Self {
Change::default() FileChange::default()
} }
pub fn set_roots(&mut self, roots: Vec<SourceRoot>) { pub fn set_roots(&mut self, roots: Vec<SourceRoot>) {
@ -51,10 +50,6 @@ impl Change {
self.crate_graph = Some(graph); self.crate_graph = Some(graph);
} }
pub fn set_proc_macros(&mut self, proc_macros: ProcMacros) {
self.proc_macros = Some(proc_macros);
}
pub fn apply(self, db: &mut dyn SourceDatabaseExt) { pub fn apply(self, db: &mut dyn SourceDatabaseExt) {
let _p = profile::span("RootDatabase::apply_change"); let _p = profile::span("RootDatabase::apply_change");
if let Some(roots) = self.roots { if let Some(roots) = self.roots {
@ -79,9 +74,6 @@ impl Change {
if let Some(crate_graph) = self.crate_graph { if let Some(crate_graph) = self.crate_graph {
db.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH); db.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH);
} }
if let Some(proc_macros) = self.proc_macros {
db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH);
}
} }
} }

View file

@ -6,22 +6,19 @@
//! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how //! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how
//! actual IO is done and lowered to input. //! actual IO is done and lowered to input.
use std::{fmt, mem, ops, panic::RefUnwindSafe, str::FromStr, sync}; use std::{fmt, mem, ops, str::FromStr};
use cfg::CfgOptions; use cfg::CfgOptions;
use la_arena::{Arena, Idx}; use la_arena::{Arena, Idx};
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use semver::Version;
use syntax::SmolStr; use syntax::SmolStr;
use triomphe::Arc; use triomphe::Arc;
use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath}; use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};
use crate::span::SpanData;
// Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`, // Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`,
// then the crate for the proc-macro hasn't been build yet as the build data is missing. // then the crate for the proc-macro hasn't been build yet as the build data is missing.
pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>; pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>;
pub type ProcMacros = FxHashMap<CrateId, ProcMacroLoadResult>;
/// Files are grouped into source roots. A source root is a directory on the /// Files are grouped into source roots. A source root is a directory on the
/// file systems which is watched for changes. Typically it corresponds to a /// file systems which is watched for changes. Typically it corresponds to a
/// Rust crate. Source roots *might* be nested: in this case, a file belongs to /// Rust crate. Source roots *might* be nested: in this case, a file belongs to
@ -242,49 +239,8 @@ impl CrateDisplayName {
CrateDisplayName { crate_name, canonical_name } CrateDisplayName { crate_name, canonical_name }
} }
} }
// FIXME: These should not be defined in here? Why does base db know about proc-macros
// ProcMacroKind is used in [`fixture`], but that module probably shouldn't be in this crate either.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct ProcMacroId(pub u32);
#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)]
pub enum ProcMacroKind {
CustomDerive,
FuncLike,
Attr,
}
pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
fn expand(
&self,
subtree: &tt::Subtree<SpanData>,
attrs: Option<&tt::Subtree<SpanData>>,
env: &Env,
def_site: SpanData,
call_site: SpanData,
mixed_site: SpanData,
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError>;
}
#[derive(Debug)]
pub enum ProcMacroExpansionError {
Panic(String),
/// Things like "proc macro server was killed by OOM".
System(String),
}
pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, String>;
pub type TargetLayoutLoadResult = Result<Arc<str>, Arc<str>>; pub type TargetLayoutLoadResult = Result<Arc<str>, Arc<str>>;
#[derive(Debug, Clone)]
pub struct ProcMacro {
pub name: SmolStr,
pub kind: ProcMacroKind,
pub expander: sync::Arc<dyn ProcMacroExpander>,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub enum ReleaseChannel { pub enum ReleaseChannel {
Stable, Stable,
@ -303,7 +259,7 @@ impl ReleaseChannel {
pub fn from_str(str: &str) -> Option<Self> { pub fn from_str(str: &str) -> Option<Self> {
Some(match str { Some(match str {
"" => ReleaseChannel::Stable, "" | "stable" => ReleaseChannel::Stable,
"nightly" => ReleaseChannel::Nightly, "nightly" => ReleaseChannel::Nightly,
_ if str.starts_with("beta") => ReleaseChannel::Beta, _ if str.starts_with("beta") => ReleaseChannel::Beta,
_ => return None, _ => return None,
@ -334,7 +290,7 @@ pub struct CrateData {
// things. This info does need to be somewhat present though as to prevent deduplication from // things. This info does need to be somewhat present though as to prevent deduplication from
// happening across different workspaces with different layouts. // happening across different workspaces with different layouts.
pub target_layout: TargetLayoutLoadResult, pub target_layout: TargetLayoutLoadResult,
pub channel: Option<ReleaseChannel>, pub toolchain: Option<Version>,
} }
impl CrateData { impl CrateData {
@ -391,6 +347,10 @@ impl CrateData {
slf_deps.eq(other_deps) slf_deps.eq(other_deps)
} }
pub fn channel(&self) -> Option<ReleaseChannel> {
self.toolchain.as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre))
}
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
@ -398,10 +358,12 @@ pub enum Edition {
Edition2015, Edition2015,
Edition2018, Edition2018,
Edition2021, Edition2021,
Edition2024,
} }
impl Edition { impl Edition {
pub const CURRENT: Edition = Edition::Edition2021; pub const CURRENT: Edition = Edition::Edition2021;
pub const DEFAULT: Edition = Edition::Edition2015;
} }
#[derive(Default, Debug, Clone, PartialEq, Eq)] #[derive(Default, Debug, Clone, PartialEq, Eq)]
@ -472,7 +434,7 @@ impl CrateGraph {
is_proc_macro: bool, is_proc_macro: bool,
origin: CrateOrigin, origin: CrateOrigin,
target_layout: Result<Arc<str>, Arc<str>>, target_layout: Result<Arc<str>, Arc<str>>,
channel: Option<ReleaseChannel>, toolchain: Option<Version>,
) -> CrateId { ) -> CrateId {
let data = CrateData { let data = CrateData {
root_file_id, root_file_id,
@ -486,7 +448,7 @@ impl CrateGraph {
origin, origin,
target_layout, target_layout,
is_proc_macro, is_proc_macro,
channel, toolchain,
}; };
self.arena.alloc(data) self.arena.alloc(data)
} }
@ -784,6 +746,7 @@ impl FromStr for Edition {
"2015" => Edition::Edition2015, "2015" => Edition::Edition2015,
"2018" => Edition::Edition2018, "2018" => Edition::Edition2018,
"2021" => Edition::Edition2021, "2021" => Edition::Edition2021,
"2024" => Edition::Edition2024,
_ => return Err(ParseEditionError { invalid_input: s.to_string() }), _ => return Err(ParseEditionError { invalid_input: s.to_string() }),
}; };
Ok(res) Ok(res)
@ -796,6 +759,7 @@ impl fmt::Display for Edition {
Edition::Edition2015 => "2015", Edition::Edition2015 => "2015",
Edition::Edition2018 => "2018", Edition::Edition2018 => "2018",
Edition::Edition2021 => "2021", Edition::Edition2021 => "2021",
Edition::Edition2024 => "2024",
}) })
} }
} }

View file

@ -4,27 +4,27 @@
mod input; mod input;
mod change; mod change;
pub mod fixture;
pub mod span;
use std::panic; use std::panic;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use syntax::{ast, Parse, SourceFile, TextRange, TextSize}; use syntax::{ast, Parse, SourceFile};
use triomphe::Arc; use triomphe::Arc;
pub use crate::{ pub use crate::{
change::Change, change::FileChange,
input::{ input::{
CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency,
DependencyKind, Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, DependencyKind, Edition, Env, LangCrateOrigin, ProcMacroPaths, ReleaseChannel, SourceRoot,
ProcMacroExpansionError, ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, SourceRootId, TargetLayoutLoadResult,
ProcMacros, ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
}, },
}; };
pub use salsa::{self, Cancelled}; pub use salsa::{self, Cancelled};
pub use span::{FilePosition, FileRange};
pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, FileId, VfsPath}; pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, FileId, VfsPath};
pub use semver::{BuildMetadata, Prerelease, Version, VersionReq};
#[macro_export] #[macro_export]
macro_rules! impl_intern_key { macro_rules! impl_intern_key {
($name:ident) => { ($name:ident) => {
@ -43,18 +43,6 @@ pub trait Upcast<T: ?Sized> {
fn upcast(&self) -> &T; fn upcast(&self) -> &T;
} }
#[derive(Clone, Copy, Debug)]
pub struct FilePosition {
pub file_id: FileId,
pub offset: TextSize,
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
pub struct FileRange {
pub file_id: FileId,
pub range: TextRange,
}
pub const DEFAULT_PARSE_LRU_CAP: usize = 128; pub const DEFAULT_PARSE_LRU_CAP: usize = 128;
pub trait FileLoader { pub trait FileLoader {
@ -74,10 +62,6 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug {
/// The crate graph. /// The crate graph.
#[salsa::input] #[salsa::input]
fn crate_graph(&self) -> Arc<CrateGraph>; fn crate_graph(&self) -> Arc<CrateGraph>;
/// The proc macros.
#[salsa::input]
fn proc_macros(&self) -> Arc<ProcMacros>;
} }
fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> { fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {

View file

@ -12,7 +12,7 @@ rust-version.workspace = true
doctest = false doctest = false
[dependencies] [dependencies]
rustc-hash = "1.1.0" rustc-hash.workspace = true
# locals deps # locals deps
tt.workspace = true tt.workspace = true
@ -29,3 +29,6 @@ derive_arbitrary = "1.3.2"
# local deps # local deps
mbe.workspace = true mbe.workspace = true
syntax.workspace = true syntax.workspace = true
[lints]
workspace = true

View file

@ -1,6 +1,6 @@
use arbitrary::{Arbitrary, Unstructured}; use arbitrary::{Arbitrary, Unstructured};
use expect_test::{expect, Expect}; use expect_test::{expect, Expect};
use mbe::{syntax_node_to_token_tree, DummyTestSpanMap}; use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY};
use syntax::{ast, AstNode}; use syntax::{ast, AstNode};
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
@ -8,7 +8,7 @@ use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
fn assert_parse_result(input: &str, expected: CfgExpr) { fn assert_parse_result(input: &str, expected: CfgExpr) {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
assert_eq!(cfg, expected); assert_eq!(cfg, expected);
} }
@ -16,7 +16,7 @@ fn assert_parse_result(input: &str, expected: CfgExpr) {
fn check_dnf(input: &str, expect: Expect) { fn check_dnf(input: &str, expect: Expect) {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
let actual = format!("#![cfg({})]", DnfExpr::new(cfg)); let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
expect.assert_eq(&actual); expect.assert_eq(&actual);
@ -25,7 +25,7 @@ fn check_dnf(input: &str, expect: Expect) {
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg); let dnf = DnfExpr::new(cfg);
let why_inactive = dnf.why_inactive(opts).unwrap().to_string(); let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
@ -36,7 +36,7 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) { fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg); let dnf = DnfExpr::new(cfg);
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>(); let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();

View file

@ -13,14 +13,17 @@ doctest = false
[dependencies] [dependencies]
cargo_metadata.workspace = true cargo_metadata.workspace = true
crossbeam-channel = "0.5.8" crossbeam-channel.workspace = true
tracing.workspace = true tracing.workspace = true
rustc-hash = "1.1.0" rustc-hash.workspace = true
serde_json.workspace = true serde_json.workspace = true
serde.workspace = true serde.workspace = true
command-group = "2.0.1" command-group.workspace = true
# local deps # local deps
paths.workspace = true paths.workspace = true
stdx.workspace = true stdx.workspace = true
toolchain.workspace = true toolchain.workspace = true
[lints]
workspace = true

View file

@ -12,7 +12,7 @@ rust-version.workspace = true
doctest = false doctest = false
[dependencies] [dependencies]
arrayvec = "0.7.2" arrayvec.workspace = true
bitflags.workspace = true bitflags.workspace = true
cov-mark = "2.0.0-pre.1" cov-mark = "2.0.0-pre.1"
dashmap.workspace = true dashmap.workspace = true
@ -23,7 +23,7 @@ indexmap.workspace = true
itertools.workspace = true itertools.workspace = true
la-arena.workspace = true la-arena.workspace = true
once_cell = "1.17.0" once_cell = "1.17.0"
rustc-hash = "1.1.0" rustc-hash.workspace = true
tracing.workspace = true tracing.workspace = true
smallvec.workspace = true smallvec.workspace = true
hashbrown.workspace = true hashbrown.workspace = true
@ -42,13 +42,18 @@ mbe.workspace = true
cfg.workspace = true cfg.workspace = true
tt.workspace = true tt.workspace = true
limit.workspace = true limit.workspace = true
span.workspace = true
[dev-dependencies] [dev-dependencies]
expect-test = "1.4.0" expect-test.workspace = true
# local deps # local deps
test-utils.workspace = true test-utils.workspace = true
test-fixture.workspace = true
[features] [features]
in-rust-tree = ["rustc-dependencies/in-rust-tree"] in-rust-tree = ["rustc-dependencies/in-rust-tree"]
[lints]
workspace = true

View file

@ -637,9 +637,12 @@ impl<'attr> AttrQuery<'attr> {
} }
} }
fn any_has_attrs( fn any_has_attrs<'db>(
db: &dyn DefDatabase, db: &(dyn DefDatabase + 'db),
id: impl Lookup<Data = impl HasSource<Value = impl ast::HasAttrs>>, id: impl Lookup<
Database<'db> = dyn DefDatabase + 'db,
Data = impl HasSource<Value = impl ast::HasAttrs>,
>,
) -> InFile<ast::AnyHasAttrs> { ) -> InFile<ast::AnyHasAttrs> {
id.lookup(db).source(db).map(ast::AnyHasAttrs::new) id.lookup(db).source(db).map(ast::AnyHasAttrs::new)
} }
@ -650,17 +653,17 @@ fn attrs_from_item_tree<N: ItemTreeNode>(db: &dyn DefDatabase, id: ItemTreeId<N>
tree.raw_attrs(mod_item.into()).clone() tree.raw_attrs(mod_item.into()).clone()
} }
fn attrs_from_item_tree_loc<N: ItemTreeNode>( fn attrs_from_item_tree_loc<'db, N: ItemTreeNode>(
db: &dyn DefDatabase, db: &(dyn DefDatabase + 'db),
lookup: impl Lookup<Data = ItemLoc<N>>, lookup: impl Lookup<Database<'db> = dyn DefDatabase + 'db, Data = ItemLoc<N>>,
) -> RawAttrs { ) -> RawAttrs {
let id = lookup.lookup(db).id; let id = lookup.lookup(db).id;
attrs_from_item_tree(db, id) attrs_from_item_tree(db, id)
} }
fn attrs_from_item_tree_assoc<N: ItemTreeNode>( fn attrs_from_item_tree_assoc<'db, N: ItemTreeNode>(
db: &dyn DefDatabase, db: &(dyn DefDatabase + 'db),
lookup: impl Lookup<Data = AssocItemLoc<N>>, lookup: impl Lookup<Database<'db> = dyn DefDatabase + 'db, Data = AssocItemLoc<N>>,
) -> RawAttrs { ) -> RawAttrs {
let id = lookup.lookup(db).id; let id = lookup.lookup(db).id;
attrs_from_item_tree(db, id) attrs_from_item_tree(db, id)

View file

@ -1,19 +1,23 @@
//! This module contains tests for doc-expression parsing. //! This module contains tests for doc-expression parsing.
//! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`. //! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.
use triomphe::Arc;
use base_db::FileId; use base_db::FileId;
use hir_expand::span::{RealSpanMap, SpanMapRef}; use hir_expand::span_map::{RealSpanMap, SpanMap};
use mbe::syntax_node_to_token_tree; use mbe::syntax_node_to_token_tree;
use syntax::{ast, AstNode}; use syntax::{ast, AstNode, TextRange};
use crate::attr::{DocAtom, DocExpr}; use crate::attr::{DocAtom, DocExpr};
fn assert_parse_result(input: &str, expected: DocExpr) { fn assert_parse_result(input: &str, expected: DocExpr) {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
let tt = syntax_node_to_token_tree( let tt = syntax_node_to_token_tree(
tt.syntax(), tt.syntax(),
SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId::from_raw(0))), map.as_ref(),
map.span_for_range(TextRange::empty(0.into())),
); );
let cfg = DocExpr::parse(&tt); let cfg = DocExpr::parse(&tt);
assert_eq!(cfg, expected); assert_eq!(cfg, expected);

View file

@ -8,7 +8,7 @@ use either::Either;
use hir_expand::{ use hir_expand::{
ast_id_map::AstIdMap, ast_id_map::AstIdMap,
name::{name, AsName, Name}, name::{name, AsName, Name},
AstId, ExpandError, InFile, ExpandError, InFile,
}; };
use intern::Interned; use intern::Interned;
use profile::Count; use profile::Count;
@ -66,7 +66,7 @@ pub(super) fn lower(
krate, krate,
def_map: expander.module.def_map(db), def_map: expander.module.def_map(db),
source_map: BodySourceMap::default(), source_map: BodySourceMap::default(),
ast_id_map: db.ast_id_map(expander.current_file_id), ast_id_map: db.ast_id_map(expander.current_file_id()),
body: Body { body: Body {
exprs: Default::default(), exprs: Default::default(),
pats: Default::default(), pats: Default::default(),
@ -408,7 +408,7 @@ impl ExprCollector<'_> {
ast::Expr::ParenExpr(e) => { ast::Expr::ParenExpr(e) => {
let inner = self.collect_expr_opt(e.expr()); let inner = self.collect_expr_opt(e.expr());
// make the paren expr point to the inner expression as well // make the paren expr point to the inner expression as well
let src = self.expander.to_source(syntax_ptr); let src = self.expander.in_file(syntax_ptr);
self.source_map.expr_map.insert(src, inner); self.source_map.expr_map.insert(src, inner);
inner inner
} }
@ -441,7 +441,7 @@ impl ExprCollector<'_> {
Some(e) => self.collect_expr(e), Some(e) => self.collect_expr(e),
None => self.missing_expr(), None => self.missing_expr(),
}; };
let src = self.expander.to_source(AstPtr::new(&field)); let src = self.expander.in_file(AstPtr::new(&field));
self.source_map.field_map_back.insert(expr, src); self.source_map.field_map_back.insert(expr, src);
Some(RecordLitField { name, expr }) Some(RecordLitField { name, expr })
}) })
@ -644,7 +644,7 @@ impl ExprCollector<'_> {
Some(id) => { Some(id) => {
// Make the macro-call point to its expanded expression so we can query // Make the macro-call point to its expanded expression so we can query
// semantics on syntax pointers to the macro // semantics on syntax pointers to the macro
let src = self.expander.to_source(syntax_ptr); let src = self.expander.in_file(syntax_ptr);
self.source_map.expr_map.insert(src, id); self.source_map.expr_map.insert(src, id);
id id
} }
@ -957,11 +957,19 @@ impl ExprCollector<'_> {
T: ast::AstNode, T: ast::AstNode,
{ {
// File containing the macro call. Expansion errors will be attached here. // File containing the macro call. Expansion errors will be attached here.
let outer_file = self.expander.current_file_id; let outer_file = self.expander.current_file_id();
let macro_call_ptr = self.expander.to_source(AstPtr::new(&mcall)); let macro_call_ptr = self.expander.in_file(syntax_ptr);
let module = self.expander.module.local_id; let module = self.expander.module.local_id;
let res = self.expander.enter_expand(self.db, mcall, |path| {
let res = match self.def_map.modules[module]
.scope
.macro_invocations
.get(&InFile::new(outer_file, self.ast_id_map.ast_id_for_ptr(syntax_ptr)))
{
// fast path, macro call is in a block module
Some(&call) => Ok(self.expander.enter_expand_id(self.db, call)),
None => self.expander.enter_expand(self.db, mcall, |path| {
self.def_map self.def_map
.resolve_path( .resolve_path(
self.db, self.db,
@ -972,7 +980,8 @@ impl ExprCollector<'_> {
) )
.0 .0
.take_macros() .take_macros()
}); }),
};
let res = match res { let res = match res {
Ok(res) => res, Ok(res) => res,
@ -986,7 +995,6 @@ impl ExprCollector<'_> {
return collector(self, None); return collector(self, None);
} }
}; };
if record_diagnostics { if record_diagnostics {
match &res.err { match &res.err {
Some(ExpandError::UnresolvedProcMacro(krate)) => { Some(ExpandError::UnresolvedProcMacro(krate)) => {
@ -1013,10 +1021,10 @@ impl ExprCollector<'_> {
Some((mark, expansion)) => { Some((mark, expansion)) => {
// Keep collecting even with expansion errors so we can provide completions and // Keep collecting even with expansion errors so we can provide completions and
// other services in incomplete macro expressions. // other services in incomplete macro expressions.
self.source_map.expansions.insert(macro_call_ptr, self.expander.current_file_id); self.source_map.expansions.insert(macro_call_ptr, self.expander.current_file_id());
let prev_ast_id_map = mem::replace( let prev_ast_id_map = mem::replace(
&mut self.ast_id_map, &mut self.ast_id_map,
self.db.ast_id_map(self.expander.current_file_id), self.db.ast_id_map(self.expander.current_file_id()),
); );
if record_diagnostics { if record_diagnostics {
@ -1066,7 +1074,7 @@ impl ExprCollector<'_> {
Some(tail) => { Some(tail) => {
// Make the macro-call point to its expanded expression so we can query // Make the macro-call point to its expanded expression so we can query
// semantics on syntax pointers to the macro // semantics on syntax pointers to the macro
let src = self.expander.to_source(syntax_ptr); let src = self.expander.in_file(syntax_ptr);
self.source_map.expr_map.insert(src, tail); self.source_map.expr_map.insert(src, tail);
Some(tail) Some(tail)
} }
@ -1140,7 +1148,7 @@ impl ExprCollector<'_> {
let block_id = if block_has_items { let block_id = if block_has_items {
let file_local_id = self.ast_id_map.ast_id(&block); let file_local_id = self.ast_id_map.ast_id(&block);
let ast_id = AstId::new(self.expander.current_file_id, file_local_id); let ast_id = self.expander.in_file(file_local_id);
Some(self.db.intern_block(BlockLoc { ast_id, module: self.expander.module })) Some(self.db.intern_block(BlockLoc { ast_id, module: self.expander.module }))
} else { } else {
None None
@ -1333,7 +1341,7 @@ impl ExprCollector<'_> {
let ast_pat = f.pat()?; let ast_pat = f.pat()?;
let pat = self.collect_pat(ast_pat, binding_list); let pat = self.collect_pat(ast_pat, binding_list);
let name = f.field_name()?.as_name(); let name = f.field_name()?.as_name();
let src = self.expander.to_source(AstPtr::new(&f)); let src = self.expander.in_file(AstPtr::new(&f));
self.source_map.pat_field_map_back.insert(pat, src); self.source_map.pat_field_map_back.insert(pat, src);
Some(RecordFieldPat { name, pat }) Some(RecordFieldPat { name, pat })
}) })
@ -1391,7 +1399,7 @@ impl ExprCollector<'_> {
ast::Pat::MacroPat(mac) => match mac.macro_call() { ast::Pat::MacroPat(mac) => match mac.macro_call() {
Some(call) => { Some(call) => {
let macro_ptr = AstPtr::new(&call); let macro_ptr = AstPtr::new(&call);
let src = self.expander.to_source(AstPtr::new(&Either::Left(pat))); let src = self.expander.in_file(AstPtr::new(&Either::Left(pat)));
let pat = let pat =
self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| { self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| {
this.collect_pat_opt(expanded_pat, binding_list) this.collect_pat_opt(expanded_pat, binding_list)
@ -1472,10 +1480,7 @@ impl ExprCollector<'_> {
} }
self.source_map.diagnostics.push(BodyDiagnostic::InactiveCode { self.source_map.diagnostics.push(BodyDiagnostic::InactiveCode {
node: InFile::new( node: self.expander.in_file(SyntaxNodePtr::new(owner.syntax())),
self.expander.current_file_id,
SyntaxNodePtr::new(owner.syntax()),
),
cfg, cfg,
opts: self.expander.cfg_options().clone(), opts: self.expander.cfg_options().clone(),
}); });
@ -1514,10 +1519,7 @@ impl ExprCollector<'_> {
} else { } else {
Err(BodyDiagnostic::UnreachableLabel { Err(BodyDiagnostic::UnreachableLabel {
name, name,
node: InFile::new( node: self.expander.in_file(AstPtr::new(&lifetime)),
self.expander.current_file_id,
AstPtr::new(&lifetime),
),
}) })
}; };
} }
@ -1526,7 +1528,7 @@ impl ExprCollector<'_> {
Err(BodyDiagnostic::UndeclaredLabel { Err(BodyDiagnostic::UndeclaredLabel {
name, name,
node: InFile::new(self.expander.current_file_id, AstPtr::new(&lifetime)), node: self.expander.in_file(AstPtr::new(&lifetime)),
}) })
} }
@ -1990,7 +1992,7 @@ fn pat_literal_to_hir(lit: &ast::LiteralPat) -> Option<(Literal, ast::Literal)>
impl ExprCollector<'_> { impl ExprCollector<'_> {
fn alloc_expr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId { fn alloc_expr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId {
let src = self.expander.to_source(ptr); let src = self.expander.in_file(ptr);
let id = self.body.exprs.alloc(expr); let id = self.body.exprs.alloc(expr);
self.source_map.expr_map_back.insert(id, src.clone()); self.source_map.expr_map_back.insert(id, src.clone());
self.source_map.expr_map.insert(src, id); self.source_map.expr_map.insert(src, id);
@ -2018,7 +2020,7 @@ impl ExprCollector<'_> {
} }
fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId { fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId {
let src = self.expander.to_source(ptr); let src = self.expander.in_file(ptr);
let id = self.body.pats.alloc(pat); let id = self.body.pats.alloc(pat);
self.source_map.pat_map_back.insert(id, src.clone()); self.source_map.pat_map_back.insert(id, src.clone());
self.source_map.pat_map.insert(src, id); self.source_map.pat_map.insert(src, id);
@ -2033,7 +2035,7 @@ impl ExprCollector<'_> {
} }
fn alloc_label(&mut self, label: Label, ptr: LabelPtr) -> LabelId { fn alloc_label(&mut self, label: Label, ptr: LabelPtr) -> LabelId {
let src = self.expander.to_source(ptr); let src = self.expander.in_file(ptr);
let id = self.body.labels.alloc(label); let id = self.body.labels.alloc(label);
self.source_map.label_map_back.insert(id, src.clone()); self.source_map.label_map_back.insert(id, src.clone());
self.source_map.label_map.insert(src, id); self.source_map.label_map.insert(src, id);

View file

@ -267,9 +267,10 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use base_db::{fixture::WithFixture, FileId, SourceDatabase}; use base_db::{FileId, SourceDatabase};
use hir_expand::{name::AsName, InFile}; use hir_expand::{name::AsName, InFile};
use syntax::{algo::find_node_at_offset, ast, AstNode}; use syntax::{algo::find_node_at_offset, ast, AstNode};
use test_fixture::WithFixture;
use test_utils::{assert_eq_text, extract_offset}; use test_utils::{assert_eq_text, extract_offset};
use crate::{db::DefDatabase, test_db::TestDB, FunctionId, ModuleDefId}; use crate::{db::DefDatabase, test_db::TestDB, FunctionId, ModuleDefId};

View file

@ -1,7 +1,8 @@
mod block; mod block;
use base_db::{fixture::WithFixture, SourceDatabase}; use base_db::SourceDatabase;
use expect_test::{expect, Expect}; use expect_test::{expect, Expect};
use test_fixture::WithFixture;
use crate::{test_db::TestDB, ModuleDefId}; use crate::{test_db::TestDB, ModuleDefId};

View file

@ -16,7 +16,7 @@ use crate::{
db::DefDatabase, db::DefDatabase,
expander::{Expander, Mark}, expander::{Expander, Mark},
item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, MacroCall, ModItem, TreeId}, item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, MacroCall, ModItem, TreeId},
macro_call_as_call_id, macro_id_to_def_id, macro_call_as_call_id,
nameres::{ nameres::{
attr_resolution::ResolvedAttr, attr_resolution::ResolvedAttr,
diagnostics::DefDiagnostic, diagnostics::DefDiagnostic,
@ -720,7 +720,7 @@ impl<'a> AssocItemCollector<'a> {
) )
.0 .0
.take_macros() .take_macros()
.map(|it| macro_id_to_def_id(self.db, it)) .map(|it| self.db.macro_def(it))
}; };
match macro_call_as_call_id( match macro_call_as_call_id(
self.db.upcast(), self.db.upcast(),

View file

@ -1,7 +1,7 @@
//! Defines database & queries for name resolution. //! Defines database & queries for name resolution.
use base_db::{salsa, CrateId, SourceDatabase, Upcast}; use base_db::{salsa, CrateId, SourceDatabase, Upcast};
use either::Either; use either::Either;
use hir_expand::{db::ExpandDatabase, HirFileId}; use hir_expand::{db::ExpandDatabase, HirFileId, MacroDefId};
use intern::Interned; use intern::Interned;
use la_arena::ArenaMap; use la_arena::ArenaMap;
use syntax::{ast, AstPtr}; use syntax::{ast, AstPtr};
@ -24,9 +24,10 @@ use crate::{
AttrDefId, BlockId, BlockLoc, ConstBlockId, ConstBlockLoc, ConstId, ConstLoc, DefWithBodyId, AttrDefId, BlockId, BlockLoc, ConstBlockId, ConstBlockLoc, ConstId, ConstLoc, DefWithBodyId,
EnumId, EnumLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc, FunctionId, EnumId, EnumLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc, FunctionId,
FunctionLoc, GenericDefId, ImplId, ImplLoc, InTypeConstId, InTypeConstLoc, LocalEnumVariantId, FunctionLoc, GenericDefId, ImplId, ImplLoc, InTypeConstId, InTypeConstLoc, LocalEnumVariantId,
LocalFieldId, Macro2Id, Macro2Loc, MacroRulesId, MacroRulesLoc, ProcMacroId, ProcMacroLoc, LocalFieldId, Macro2Id, Macro2Loc, MacroId, MacroRulesId, MacroRulesLoc, MacroRulesLocFlags,
StaticId, StaticLoc, StructId, StructLoc, TraitAliasId, TraitAliasLoc, TraitId, TraitLoc, ProcMacroId, ProcMacroLoc, StaticId, StaticLoc, StructId, StructLoc, TraitAliasId,
TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, VariantId, TraitAliasLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc,
VariantId,
}; };
#[salsa::query_group(InternDatabaseStorage)] #[salsa::query_group(InternDatabaseStorage)]
@ -110,6 +111,8 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
#[salsa::invoke(DefMap::block_def_map_query)] #[salsa::invoke(DefMap::block_def_map_query)]
fn block_def_map(&self, block: BlockId) -> Arc<DefMap>; fn block_def_map(&self, block: BlockId) -> Arc<DefMap>;
fn macro_def(&self, m: MacroId) -> MacroDefId;
// region:data // region:data
#[salsa::invoke(StructData::struct_data_query)] #[salsa::invoke(StructData::struct_data_query)]
@ -239,12 +242,6 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
#[salsa::invoke(LangItems::crate_lang_items_query)] #[salsa::invoke(LangItems::crate_lang_items_query)]
fn crate_lang_items(&self, krate: CrateId) -> Arc<LangItems>; fn crate_lang_items(&self, krate: CrateId) -> Arc<LangItems>;
#[salsa::transparent]
fn crate_limits(&self, crate_id: CrateId) -> CrateLimits;
#[salsa::transparent]
fn recursion_limit(&self, crate_id: CrateId) -> u32;
fn crate_supports_no_std(&self, crate_id: CrateId) -> bool; fn crate_supports_no_std(&self, crate_id: CrateId) -> bool;
} }
@ -253,24 +250,6 @@ fn crate_def_map_wait(db: &dyn DefDatabase, krate: CrateId) -> Arc<DefMap> {
db.crate_def_map_query(krate) db.crate_def_map_query(krate)
} }
pub struct CrateLimits {
/// The maximum depth for potentially infinitely-recursive compile-time operations like macro expansion or auto-dereference.
pub recursion_limit: u32,
}
fn crate_limits(db: &dyn DefDatabase, crate_id: CrateId) -> CrateLimits {
let def_map = db.crate_def_map(crate_id);
CrateLimits {
// 128 is the default in rustc.
recursion_limit: def_map.recursion_limit().unwrap_or(128),
}
}
fn recursion_limit(db: &dyn DefDatabase, crate_id: CrateId) -> u32 {
db.crate_limits(crate_id).recursion_limit
}
fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool { fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool {
let file = db.crate_graph()[crate_id].root_file_id; let file = db.crate_graph()[crate_id].root_file_id;
let item_tree = db.file_item_tree(file.into()); let item_tree = db.file_item_tree(file.into());
@ -305,3 +284,78 @@ fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool {
false false
} }
fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId {
use hir_expand::InFile;
use crate::{Lookup, MacroDefKind, MacroExpander};
let kind = |expander, file_id, m| {
let in_file = InFile::new(file_id, m);
match expander {
MacroExpander::Declarative => MacroDefKind::Declarative(in_file),
MacroExpander::BuiltIn(it) => MacroDefKind::BuiltIn(it, in_file),
MacroExpander::BuiltInAttr(it) => MacroDefKind::BuiltInAttr(it, in_file),
MacroExpander::BuiltInDerive(it) => MacroDefKind::BuiltInDerive(it, in_file),
MacroExpander::BuiltInEager(it) => MacroDefKind::BuiltInEager(it, in_file),
}
};
match id {
MacroId::Macro2Id(it) => {
let loc: Macro2Loc = it.lookup(db);
let item_tree = loc.id.item_tree(db);
let makro = &item_tree[loc.id.value];
MacroDefId {
krate: loc.container.krate,
kind: kind(loc.expander, loc.id.file_id(), makro.ast_id.upcast()),
local_inner: false,
allow_internal_unsafe: loc.allow_internal_unsafe,
span: db
.span_map(loc.id.file_id())
.span_for_range(db.ast_id_map(loc.id.file_id()).get(makro.ast_id).text_range()),
edition: loc.edition,
}
}
MacroId::MacroRulesId(it) => {
let loc: MacroRulesLoc = it.lookup(db);
let item_tree = loc.id.item_tree(db);
let makro = &item_tree[loc.id.value];
MacroDefId {
krate: loc.container.krate,
kind: kind(loc.expander, loc.id.file_id(), makro.ast_id.upcast()),
local_inner: loc.flags.contains(MacroRulesLocFlags::LOCAL_INNER),
allow_internal_unsafe: loc
.flags
.contains(MacroRulesLocFlags::ALLOW_INTERNAL_UNSAFE),
span: db
.span_map(loc.id.file_id())
.span_for_range(db.ast_id_map(loc.id.file_id()).get(makro.ast_id).text_range()),
edition: loc.edition,
}
}
MacroId::ProcMacroId(it) => {
let loc = it.lookup(db);
let item_tree = loc.id.item_tree(db);
let makro = &item_tree[loc.id.value];
MacroDefId {
krate: loc.container.krate,
kind: MacroDefKind::ProcMacro(
loc.expander,
loc.kind,
InFile::new(loc.id.file_id(), makro.ast_id),
),
local_inner: false,
allow_internal_unsafe: false,
span: db
.span_map(loc.id.file_id())
.span_for_range(db.ast_id_map(loc.id.file_id()).get(makro.ast_id).text_range()),
edition: loc.edition,
}
}
}
}

View file

@ -4,15 +4,15 @@ use base_db::CrateId;
use cfg::CfgOptions; use cfg::CfgOptions;
use drop_bomb::DropBomb; use drop_bomb::DropBomb;
use hir_expand::{ use hir_expand::{
attrs::RawAttrs, mod_path::ModPath, span::SpanMap, ExpandError, ExpandResult, HirFileId, attrs::RawAttrs, mod_path::ModPath, span_map::SpanMap, ExpandError, ExpandResult, HirFileId,
InFile, MacroCallId, InFile, MacroCallId,
}; };
use limit::Limit; use limit::Limit;
use syntax::{ast, Parse, SyntaxNode}; use syntax::{ast, Parse};
use crate::{ use crate::{
attr::Attrs, db::DefDatabase, lower::LowerCtx, macro_id_to_def_id, path::Path, AsMacroCall, attr::Attrs, db::DefDatabase, lower::LowerCtx, path::Path, AsMacroCall, MacroId, ModuleId,
MacroId, ModuleId, UnresolvedMacro, UnresolvedMacro,
}; };
#[derive(Debug)] #[derive(Debug)]
@ -20,7 +20,7 @@ pub struct Expander {
cfg_options: CfgOptions, cfg_options: CfgOptions,
span_map: SpanMap, span_map: SpanMap,
krate: CrateId, krate: CrateId,
pub(crate) current_file_id: HirFileId, current_file_id: HirFileId,
pub(crate) module: ModuleId, pub(crate) module: ModuleId,
/// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached. /// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached.
recursion_depth: u32, recursion_depth: u32,
@ -29,12 +29,13 @@ pub struct Expander {
impl Expander { impl Expander {
pub fn new(db: &dyn DefDatabase, current_file_id: HirFileId, module: ModuleId) -> Expander { pub fn new(db: &dyn DefDatabase, current_file_id: HirFileId, module: ModuleId) -> Expander {
let recursion_limit = db.recursion_limit(module.krate); let recursion_limit = module.def_map(db).recursion_limit() as usize;
#[cfg(not(test))] let recursion_limit = Limit::new(if cfg!(test) {
let recursion_limit = Limit::new(recursion_limit as usize);
// Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug // Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug
#[cfg(test)] std::cmp::min(32, recursion_limit)
let recursion_limit = Limit::new(std::cmp::min(32, recursion_limit as usize)); } else {
recursion_limit
});
Expander { Expander {
current_file_id, current_file_id,
module, module,
@ -56,9 +57,9 @@ impl Expander {
let mut unresolved_macro_err = None; let mut unresolved_macro_err = None;
let result = self.within_limit(db, |this| { let result = self.within_limit(db, |this| {
let macro_call = InFile::new(this.current_file_id, &macro_call); let macro_call = this.in_file(&macro_call);
match macro_call.as_call_id_with_errors(db.upcast(), this.module.krate(), |path| { match macro_call.as_call_id_with_errors(db.upcast(), this.module.krate(), |path| {
resolver(path).map(|it| macro_id_to_def_id(db, it)) resolver(path).map(|it| db.macro_def(it))
}) { }) {
Ok(call_id) => call_id, Ok(call_id) => call_id,
Err(resolve_err) => { Err(resolve_err) => {
@ -83,17 +84,6 @@ impl Expander {
self.within_limit(db, |_this| ExpandResult::ok(Some(call_id))) self.within_limit(db, |_this| ExpandResult::ok(Some(call_id)))
} }
fn enter_expand_inner(
db: &dyn DefDatabase,
call_id: MacroCallId,
error: Option<ExpandError>,
) -> ExpandResult<Option<InFile<Parse<SyntaxNode>>>> {
let macro_file = call_id.as_macro_file();
let ExpandResult { value, err } = db.parse_macro_expansion(macro_file);
ExpandResult { value: Some(InFile::new(macro_file.into(), value.0)), err: error.or(err) }
}
pub fn exit(&mut self, mut mark: Mark) { pub fn exit(&mut self, mut mark: Mark) {
self.span_map = mark.span_map; self.span_map = mark.span_map;
self.current_file_id = mark.file_id; self.current_file_id = mark.file_id;
@ -113,7 +103,7 @@ impl Expander {
LowerCtx::new(db, self.span_map.clone(), self.current_file_id) LowerCtx::new(db, self.span_map.clone(), self.current_file_id)
} }
pub(crate) fn to_source<T>(&self, value: T) -> InFile<T> { pub(crate) fn in_file<T>(&self, value: T) -> InFile<T> {
InFile { file_id: self.current_file_id, value } InFile { file_id: self.current_file_id, value }
} }
@ -164,26 +154,34 @@ impl Expander {
return ExpandResult { value: None, err }; return ExpandResult { value: None, err };
}; };
let res = Self::enter_expand_inner(db, call_id, err); let macro_file = call_id.as_macro_file();
match res.err { let res = db.parse_macro_expansion(macro_file);
let err = err.or(res.err);
ExpandResult {
value: match err {
// If proc-macro is disabled or unresolved, we want to expand to a missing expression // If proc-macro is disabled or unresolved, we want to expand to a missing expression
// instead of an empty tree which might end up in an empty block. // instead of an empty tree which might end up in an empty block.
Some(ExpandError::UnresolvedProcMacro(_)) => res.map(|_| None), Some(ExpandError::UnresolvedProcMacro(_)) => None,
_ => res.map(|value| { _ => (|| {
value.and_then(|InFile { file_id, value }| { let parse = res.value.0.cast::<T>()?;
let parse = value.cast::<T>()?;
self.recursion_depth += 1; self.recursion_depth += 1;
let old_span_map = std::mem::replace(&mut self.span_map, db.span_map(file_id)); let old_span_map = std::mem::replace(
let old_file_id = std::mem::replace(&mut self.current_file_id, file_id); &mut self.span_map,
SpanMap::ExpansionSpanMap(res.value.1),
);
let old_file_id =
std::mem::replace(&mut self.current_file_id, macro_file.into());
let mark = Mark { let mark = Mark {
file_id: old_file_id, file_id: old_file_id,
span_map: old_span_map, span_map: old_span_map,
bomb: DropBomb::new("expansion mark dropped"), bomb: DropBomb::new("expansion mark dropped"),
}; };
Some((mark, parse)) Some((mark, parse))
}) })(),
}), },
err,
} }
} }
} }

View file

@ -585,9 +585,9 @@ fn find_local_import_locations(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use base_db::fixture::WithFixture;
use hir_expand::db::ExpandDatabase; use hir_expand::db::ExpandDatabase;
use syntax::ast::AstNode; use syntax::ast::AstNode;
use test_fixture::WithFixture;
use crate::test_db::TestDB; use crate::test_db::TestDB;

View file

@ -469,8 +469,9 @@ pub fn search_dependencies(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use base_db::{fixture::WithFixture, SourceDatabase, Upcast}; use base_db::{SourceDatabase, Upcast};
use expect_test::{expect, Expect}; use expect_test::{expect, Expect};
use test_fixture::WithFixture;
use crate::{db::DefDatabase, test_db::TestDB, ItemContainerId, Lookup}; use crate::{db::DefDatabase, test_db::TestDB, ItemContainerId, Lookup};

View file

@ -102,8 +102,10 @@ pub struct ItemScope {
// FIXME: Macro shadowing in one module is not properly handled. Non-item place macros will // FIXME: Macro shadowing in one module is not properly handled. Non-item place macros will
// be all resolved to the last one defined if shadowing happens. // be all resolved to the last one defined if shadowing happens.
legacy_macros: FxHashMap<Name, SmallVec<[MacroId; 1]>>, legacy_macros: FxHashMap<Name, SmallVec<[MacroId; 1]>>,
/// The derive macro invocations in this scope. /// The attribute macro invocations in this scope.
attr_macros: FxHashMap<AstId<ast::Item>, MacroCallId>, attr_macros: FxHashMap<AstId<ast::Item>, MacroCallId>,
/// The macro invocations in this scope.
pub macro_invocations: FxHashMap<AstId<ast::MacroCall>, MacroCallId>,
/// The derive macro invocations in this scope, keyed by the owner item over the actual derive attributes /// The derive macro invocations in this scope, keyed by the owner item over the actual derive attributes
/// paired with the derive macro invocations for the specific attribute. /// paired with the derive macro invocations for the specific attribute.
derive_macros: FxHashMap<AstId<ast::Adt>, SmallVec<[DeriveMacroInvocation; 1]>>, derive_macros: FxHashMap<AstId<ast::Adt>, SmallVec<[DeriveMacroInvocation; 1]>>,
@ -345,6 +347,10 @@ impl ItemScope {
self.attr_macros.insert(item, call); self.attr_macros.insert(item, call);
} }
pub(crate) fn add_macro_invoc(&mut self, call: AstId<ast::MacroCall>, call_id: MacroCallId) {
self.macro_invocations.insert(call, call_id);
}
pub(crate) fn attr_macro_invocs( pub(crate) fn attr_macro_invocs(
&self, &self,
) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ { ) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
@ -692,6 +698,7 @@ impl ItemScope {
use_imports_values, use_imports_values,
use_imports_types, use_imports_types,
use_imports_macros, use_imports_macros,
macro_invocations,
} = self; } = self;
types.shrink_to_fit(); types.shrink_to_fit();
values.shrink_to_fit(); values.shrink_to_fit();
@ -709,6 +716,7 @@ impl ItemScope {
derive_macros.shrink_to_fit(); derive_macros.shrink_to_fit();
extern_crate_decls.shrink_to_fit(); extern_crate_decls.shrink_to_fit();
use_decls.shrink_to_fit(); use_decls.shrink_to_fit();
macro_invocations.shrink_to_fit();
} }
} }

View file

@ -29,6 +29,9 @@
//! //!
//! In general, any item in the `ItemTree` stores its `AstId`, which allows mapping it back to its //! In general, any item in the `ItemTree` stores its `AstId`, which allows mapping it back to its
//! surface syntax. //! surface syntax.
//!
//! Note that we cannot store [`span::Span`]s inside of this, as typing in an item invalidates its
//! encompassing span!
mod lower; mod lower;
mod pretty; mod pretty;
@ -42,7 +45,7 @@ use std::{
}; };
use ast::{AstNode, HasName, StructKind}; use ast::{AstNode, HasName, StructKind};
use base_db::{span::SyntaxContextId, CrateId}; use base_db::CrateId;
use either::Either; use either::Either;
use hir_expand::{ use hir_expand::{
ast_id_map::{AstIdNode, FileAstId}, ast_id_map::{AstIdNode, FileAstId},
@ -55,6 +58,7 @@ use la_arena::{Arena, Idx, IdxRange, RawIdx};
use profile::Count; use profile::Count;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use smallvec::SmallVec; use smallvec::SmallVec;
use span::Span;
use stdx::never; use stdx::never;
use syntax::{ast, match_ast, SyntaxKind}; use syntax::{ast, match_ast, SyntaxKind};
use triomphe::Arc; use triomphe::Arc;
@ -280,7 +284,7 @@ struct ItemTreeData {
mods: Arena<Mod>, mods: Arena<Mod>,
macro_calls: Arena<MacroCall>, macro_calls: Arena<MacroCall>,
macro_rules: Arena<MacroRules>, macro_rules: Arena<MacroRules>,
macro_defs: Arena<MacroDef>, macro_defs: Arena<Macro2>,
vis: ItemVisibilities, vis: ItemVisibilities,
} }
@ -513,7 +517,7 @@ mod_items! {
Mod in mods -> ast::Module, Mod in mods -> ast::Module,
MacroCall in macro_calls -> ast::MacroCall, MacroCall in macro_calls -> ast::MacroCall,
MacroRules in macro_rules -> ast::MacroRules, MacroRules in macro_rules -> ast::MacroRules,
MacroDef in macro_defs -> ast::MacroDef, Macro2 in macro_defs -> ast::MacroDef,
} }
macro_rules! impl_index { macro_rules! impl_index {
@ -746,7 +750,8 @@ pub struct MacroCall {
pub path: Interned<ModPath>, pub path: Interned<ModPath>,
pub ast_id: FileAstId<ast::MacroCall>, pub ast_id: FileAstId<ast::MacroCall>,
pub expand_to: ExpandTo, pub expand_to: ExpandTo,
pub call_site: SyntaxContextId, // FIXME: We need to move this out. It invalidates the item tree when typing inside the macro call.
pub call_site: Span,
} }
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
@ -758,7 +763,7 @@ pub struct MacroRules {
/// "Macros 2.0" macro definition. /// "Macros 2.0" macro definition.
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
pub struct MacroDef { pub struct Macro2 {
pub name: Name, pub name: Name,
pub visibility: RawVisibilityId, pub visibility: RawVisibilityId,
pub ast_id: FileAstId<ast::MacroDef>, pub ast_id: FileAstId<ast::MacroDef>,
@ -917,7 +922,7 @@ impl ModItem {
| ModItem::Impl(_) | ModItem::Impl(_)
| ModItem::Mod(_) | ModItem::Mod(_)
| ModItem::MacroRules(_) | ModItem::MacroRules(_)
| ModItem::MacroDef(_) => None, | ModItem::Macro2(_) => None,
ModItem::MacroCall(call) => Some(AssocItem::MacroCall(*call)), ModItem::MacroCall(call) => Some(AssocItem::MacroCall(*call)),
ModItem::Const(konst) => Some(AssocItem::Const(*konst)), ModItem::Const(konst) => Some(AssocItem::Const(*konst)),
ModItem::TypeAlias(alias) => Some(AssocItem::TypeAlias(*alias)), ModItem::TypeAlias(alias) => Some(AssocItem::TypeAlias(*alias)),
@ -943,7 +948,7 @@ impl ModItem {
ModItem::Mod(it) => tree[it.index()].ast_id().upcast(), ModItem::Mod(it) => tree[it.index()].ast_id().upcast(),
ModItem::MacroCall(it) => tree[it.index()].ast_id().upcast(), ModItem::MacroCall(it) => tree[it.index()].ast_id().upcast(),
ModItem::MacroRules(it) => tree[it.index()].ast_id().upcast(), ModItem::MacroRules(it) => tree[it.index()].ast_id().upcast(),
ModItem::MacroDef(it) => tree[it.index()].ast_id().upcast(), ModItem::Macro2(it) => tree[it.index()].ast_id().upcast(),
} }
} }
} }

View file

@ -2,7 +2,7 @@
use std::collections::hash_map::Entry; use std::collections::hash_map::Entry;
use hir_expand::{ast_id_map::AstIdMap, span::SpanMapRef, HirFileId}; use hir_expand::{ast_id_map::AstIdMap, span_map::SpanMapRef, HirFileId};
use syntax::ast::{self, HasModuleItem, HasTypeBounds}; use syntax::ast::{self, HasModuleItem, HasTypeBounds};
use crate::{ use crate::{
@ -549,7 +549,7 @@ impl<'a> Ctx<'a> {
path, path,
ast_id, ast_id,
expand_to, expand_to,
call_site: span_map.span_for_range(m.syntax().text_range()).ctx, call_site: span_map.span_for_range(m.syntax().text_range()),
}; };
Some(id(self.data().macro_calls.alloc(res))) Some(id(self.data().macro_calls.alloc(res)))
} }
@ -562,13 +562,13 @@ impl<'a> Ctx<'a> {
Some(id(self.data().macro_rules.alloc(res))) Some(id(self.data().macro_rules.alloc(res)))
} }
fn lower_macro_def(&mut self, m: &ast::MacroDef) -> Option<FileItemTreeId<MacroDef>> { fn lower_macro_def(&mut self, m: &ast::MacroDef) -> Option<FileItemTreeId<Macro2>> {
let name = m.name().map(|it| it.as_name())?; let name = m.name().map(|it| it.as_name())?;
let ast_id = self.source_ast_id_map.ast_id(m); let ast_id = self.source_ast_id_map.ast_id(m);
let visibility = self.lower_visibility(m); let visibility = self.lower_visibility(m);
let res = MacroDef { name, ast_id, visibility }; let res = Macro2 { name, ast_id, visibility };
Some(id(self.data().macro_defs.alloc(res))) Some(id(self.data().macro_defs.alloc(res)))
} }

View file

@ -464,8 +464,8 @@ impl Printer<'_> {
let MacroRules { name, ast_id: _ } = &self.tree[it]; let MacroRules { name, ast_id: _ } = &self.tree[it];
wln!(self, "macro_rules! {} {{ ... }}", name.display(self.db.upcast())); wln!(self, "macro_rules! {} {{ ... }}", name.display(self.db.upcast()));
} }
ModItem::MacroDef(it) => { ModItem::Macro2(it) => {
let MacroDef { name, visibility, ast_id: _ } = &self.tree[it]; let Macro2 { name, visibility, ast_id: _ } = &self.tree[it];
self.print_visibility(*visibility); self.print_visibility(*visibility);
wln!(self, "macro {} {{ ... }}", name.display(self.db.upcast())); wln!(self, "macro {} {{ ... }}", name.display(self.db.upcast()));
} }

View file

@ -1,5 +1,5 @@
use base_db::fixture::WithFixture;
use expect_test::{expect, Expect}; use expect_test::{expect, Expect};
use test_fixture::WithFixture;
use crate::{db::DefDatabase, test_db::TestDB}; use crate::{db::DefDatabase, test_db::TestDB};

View file

@ -63,7 +63,7 @@ use std::{
panic::{RefUnwindSafe, UnwindSafe}, panic::{RefUnwindSafe, UnwindSafe},
}; };
use base_db::{impl_intern_key, salsa, span::SyntaxContextId, CrateId, ProcMacroKind}; use base_db::{impl_intern_key, salsa, CrateId, Edition};
use hir_expand::{ use hir_expand::{
ast_id_map::{AstIdNode, FileAstId}, ast_id_map::{AstIdNode, FileAstId},
attrs::{Attr, AttrId, AttrInput}, attrs::{Attr, AttrId, AttrInput},
@ -72,24 +72,27 @@ use hir_expand::{
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase, db::ExpandDatabase,
eager::expand_eager_macro_input, eager::expand_eager_macro_input,
impl_intern_lookup,
name::Name, name::Name,
proc_macro::ProcMacroExpander, proc_macro::{CustomProcMacroExpander, ProcMacroKind},
AstId, ExpandError, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, AstId, ExpandError, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind,
MacroDefId, MacroDefKind, MacroDefId, MacroDefKind,
}; };
use item_tree::ExternBlock; use item_tree::ExternBlock;
use la_arena::Idx; use la_arena::Idx;
use nameres::DefMap; use nameres::DefMap;
use span::Span;
use stdx::impl_from; use stdx::impl_from;
use syntax::{ast, AstNode}; use syntax::{ast, AstNode};
pub use hir_expand::tt; pub use hir_expand::{tt, Intern, Lookup};
use crate::{ use crate::{
builtin_type::BuiltinType, builtin_type::BuiltinType,
data::adt::VariantData, data::adt::VariantData,
db::DefDatabase,
item_tree::{ item_tree::{
Const, Enum, ExternCrate, Function, Impl, ItemTreeId, ItemTreeNode, MacroDef, MacroRules, Const, Enum, ExternCrate, Function, Impl, ItemTreeId, ItemTreeNode, Macro2, MacroRules,
Static, Struct, Trait, TraitAlias, TypeAlias, Union, Use, Static, Struct, Trait, TraitAlias, TypeAlias, Union, Use,
}, },
}; };
@ -101,7 +104,7 @@ pub struct CrateRootModuleId {
} }
impl CrateRootModuleId { impl CrateRootModuleId {
pub fn def_map(&self, db: &dyn db::DefDatabase) -> Arc<DefMap> { pub fn def_map(&self, db: &dyn DefDatabase) -> Arc<DefMap> {
db.crate_def_map(self.krate) db.crate_def_map(self.krate)
} }
@ -163,7 +166,7 @@ pub struct ModuleId {
} }
impl ModuleId { impl ModuleId {
pub fn def_map(self, db: &dyn db::DefDatabase) -> Arc<DefMap> { pub fn def_map(self, db: &dyn DefDatabase) -> Arc<DefMap> {
match self.block { match self.block {
Some(block) => db.block_def_map(block), Some(block) => db.block_def_map(block),
None => db.crate_def_map(self.krate), None => db.crate_def_map(self.krate),
@ -174,7 +177,7 @@ impl ModuleId {
self.krate self.krate
} }
pub fn name(self, db: &dyn db::DefDatabase) -> Option<Name> { pub fn name(self, db: &dyn DefDatabase) -> Option<Name> {
let def_map = self.def_map(db); let def_map = self.def_map(db);
let parent = def_map[self.local_id].parent?; let parent = def_map[self.local_id].parent?;
def_map[parent].children.iter().find_map(|(name, module_id)| { def_map[parent].children.iter().find_map(|(name, module_id)| {
@ -186,7 +189,7 @@ impl ModuleId {
}) })
} }
pub fn containing_module(self, db: &dyn db::DefDatabase) -> Option<ModuleId> { pub fn containing_module(self, db: &dyn DefDatabase) -> Option<ModuleId> {
self.def_map(db).containing_module(self.local_id) self.def_map(db).containing_module(self.local_id)
} }
@ -263,20 +266,7 @@ impl<N: ItemTreeNode> Hash for AssocItemLoc<N> {
macro_rules! impl_intern { macro_rules! impl_intern {
($id:ident, $loc:ident, $intern:ident, $lookup:ident) => { ($id:ident, $loc:ident, $intern:ident, $lookup:ident) => {
impl_intern_key!($id); impl_intern_key!($id);
impl_intern_lookup!(DefDatabase, $id, $loc, $intern, $lookup);
impl Intern for $loc {
type ID = $id;
fn intern(self, db: &dyn db::DefDatabase) -> $id {
db.$intern(self)
}
}
impl Lookup for $id {
type Data = $loc;
fn lookup(&self, db: &dyn db::DefDatabase) -> $loc {
db.$lookup(*self)
}
}
}; };
} }
@ -376,9 +366,10 @@ pub struct Macro2Id(salsa::InternId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Macro2Loc { pub struct Macro2Loc {
pub container: ModuleId, pub container: ModuleId,
pub id: ItemTreeId<MacroDef>, pub id: ItemTreeId<Macro2>,
pub expander: MacroExpander, pub expander: MacroExpander,
pub allow_internal_unsafe: bool, pub allow_internal_unsafe: bool,
pub edition: Edition,
} }
impl_intern!(Macro2Id, Macro2Loc, intern_macro2, lookup_intern_macro2); impl_intern!(Macro2Id, Macro2Loc, intern_macro2, lookup_intern_macro2);
@ -389,19 +380,28 @@ pub struct MacroRulesLoc {
pub container: ModuleId, pub container: ModuleId,
pub id: ItemTreeId<MacroRules>, pub id: ItemTreeId<MacroRules>,
pub expander: MacroExpander, pub expander: MacroExpander,
pub allow_internal_unsafe: bool, pub flags: MacroRulesLocFlags,
pub local_inner: bool, pub edition: Edition,
} }
impl_intern!(MacroRulesId, MacroRulesLoc, intern_macro_rules, lookup_intern_macro_rules); impl_intern!(MacroRulesId, MacroRulesLoc, intern_macro_rules, lookup_intern_macro_rules);
bitflags::bitflags! {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroRulesLocFlags: u8 {
const ALLOW_INTERNAL_UNSAFE = 1 << 0;
const LOCAL_INNER = 1 << 1;
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
pub struct ProcMacroId(salsa::InternId); pub struct ProcMacroId(salsa::InternId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ProcMacroLoc { pub struct ProcMacroLoc {
pub container: CrateRootModuleId, pub container: CrateRootModuleId,
pub id: ItemTreeId<Function>, pub id: ItemTreeId<Function>,
pub expander: ProcMacroExpander, pub expander: CustomProcMacroExpander,
pub kind: ProcMacroKind, pub kind: ProcMacroKind,
pub edition: Edition,
} }
impl_intern!(ProcMacroId, ProcMacroLoc, intern_proc_macro, lookup_intern_proc_macro); impl_intern!(ProcMacroId, ProcMacroLoc, intern_proc_macro, lookup_intern_proc_macro);
@ -510,7 +510,7 @@ pub enum MacroId {
impl_from!(Macro2Id, MacroRulesId, ProcMacroId for MacroId); impl_from!(Macro2Id, MacroRulesId, ProcMacroId for MacroId);
impl MacroId { impl MacroId {
pub fn is_attribute(self, db: &dyn db::DefDatabase) -> bool { pub fn is_attribute(self, db: &dyn DefDatabase) -> bool {
matches!(self, MacroId::ProcMacroId(it) if it.lookup(db).kind == ProcMacroKind::Attr) matches!(self, MacroId::ProcMacroId(it) if it.lookup(db).kind == ProcMacroKind::Attr)
} }
} }
@ -722,7 +722,7 @@ impl PartialEq for InTypeConstLoc {
} }
impl InTypeConstId { impl InTypeConstId {
pub fn source(&self, db: &dyn db::DefDatabase) -> ast::ConstArg { pub fn source(&self, db: &dyn DefDatabase) -> ast::ConstArg {
let src = self.lookup(db).id; let src = self.lookup(db).id;
let file_id = src.file_id; let file_id = src.file_id;
let root = &db.parse_or_expand(file_id); let root = &db.parse_or_expand(file_id);
@ -742,7 +742,7 @@ pub enum GeneralConstId {
impl_from!(ConstId, ConstBlockId, InTypeConstId for GeneralConstId); impl_from!(ConstId, ConstBlockId, InTypeConstId for GeneralConstId);
impl GeneralConstId { impl GeneralConstId {
pub fn generic_def(self, db: &dyn db::DefDatabase) -> Option<GenericDefId> { pub fn generic_def(self, db: &dyn DefDatabase) -> Option<GenericDefId> {
match self { match self {
GeneralConstId::ConstId(it) => Some(it.into()), GeneralConstId::ConstId(it) => Some(it.into()),
GeneralConstId::ConstBlockId(it) => it.lookup(db).parent.as_generic_def_id(), GeneralConstId::ConstBlockId(it) => it.lookup(db).parent.as_generic_def_id(),
@ -750,7 +750,7 @@ impl GeneralConstId {
} }
} }
pub fn name(self, db: &dyn db::DefDatabase) -> String { pub fn name(self, db: &dyn DefDatabase) -> String {
match self { match self {
GeneralConstId::ConstId(const_id) => db GeneralConstId::ConstId(const_id) => db
.const_data(const_id) .const_data(const_id)
@ -933,7 +933,7 @@ pub enum VariantId {
impl_from!(EnumVariantId, StructId, UnionId for VariantId); impl_from!(EnumVariantId, StructId, UnionId for VariantId);
impl VariantId { impl VariantId {
pub fn variant_data(self, db: &dyn db::DefDatabase) -> Arc<VariantData> { pub fn variant_data(self, db: &dyn DefDatabase) -> Arc<VariantData> {
match self { match self {
VariantId::StructId(it) => db.struct_data(it).variant_data.clone(), VariantId::StructId(it) => db.struct_data(it).variant_data.clone(),
VariantId::UnionId(it) => db.union_data(it).variant_data.clone(), VariantId::UnionId(it) => db.union_data(it).variant_data.clone(),
@ -943,7 +943,7 @@ impl VariantId {
} }
} }
pub fn file_id(self, db: &dyn db::DefDatabase) -> HirFileId { pub fn file_id(self, db: &dyn DefDatabase) -> HirFileId {
match self { match self {
VariantId::EnumVariantId(it) => it.parent.lookup(db).id.file_id(), VariantId::EnumVariantId(it) => it.parent.lookup(db).id.file_id(),
VariantId::StructId(it) => it.lookup(db).id.file_id(), VariantId::StructId(it) => it.lookup(db).id.file_id(),
@ -960,22 +960,12 @@ impl VariantId {
} }
} }
trait Intern {
type ID;
fn intern(self, db: &dyn db::DefDatabase) -> Self::ID;
}
pub trait Lookup {
type Data;
fn lookup(&self, db: &dyn db::DefDatabase) -> Self::Data;
}
pub trait HasModule { pub trait HasModule {
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId; fn module(&self, db: &dyn DefDatabase) -> ModuleId;
} }
impl HasModule for ItemContainerId { impl HasModule for ItemContainerId {
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { fn module(&self, db: &dyn DefDatabase) -> ModuleId {
match *self { match *self {
ItemContainerId::ModuleId(it) => it, ItemContainerId::ModuleId(it) => it,
ItemContainerId::ImplId(it) => it.lookup(db).container, ItemContainerId::ImplId(it) => it.lookup(db).container,
@ -986,13 +976,13 @@ impl HasModule for ItemContainerId {
} }
impl<N: ItemTreeNode> HasModule for AssocItemLoc<N> { impl<N: ItemTreeNode> HasModule for AssocItemLoc<N> {
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { fn module(&self, db: &dyn DefDatabase) -> ModuleId {
self.container.module(db) self.container.module(db)
} }
} }
impl HasModule for AdtId { impl HasModule for AdtId {
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { fn module(&self, db: &dyn DefDatabase) -> ModuleId {
match self { match self {
AdtId::StructId(it) => it.lookup(db).container, AdtId::StructId(it) => it.lookup(db).container,
AdtId::UnionId(it) => it.lookup(db).container, AdtId::UnionId(it) => it.lookup(db).container,
@ -1002,13 +992,13 @@ impl HasModule for AdtId {
} }
impl HasModule for ExternCrateId { impl HasModule for ExternCrateId {
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { fn module(&self, db: &dyn DefDatabase) -> ModuleId {
self.lookup(db).container self.lookup(db).container
} }
} }
impl HasModule for VariantId { impl HasModule for VariantId {
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { fn module(&self, db: &dyn DefDatabase) -> ModuleId {
match self { match self {
VariantId::EnumVariantId(it) => it.parent.lookup(db).container, VariantId::EnumVariantId(it) => it.parent.lookup(db).container,
VariantId::StructId(it) => it.lookup(db).container, VariantId::StructId(it) => it.lookup(db).container,
@ -1018,7 +1008,7 @@ impl HasModule for VariantId {
} }
impl HasModule for MacroId { impl HasModule for MacroId {
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { fn module(&self, db: &dyn DefDatabase) -> ModuleId {
match self { match self {
MacroId::MacroRulesId(it) => it.lookup(db).container, MacroId::MacroRulesId(it) => it.lookup(db).container,
MacroId::Macro2Id(it) => it.lookup(db).container, MacroId::Macro2Id(it) => it.lookup(db).container,
@ -1028,7 +1018,7 @@ impl HasModule for MacroId {
} }
impl HasModule for TypeOwnerId { impl HasModule for TypeOwnerId {
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { fn module(&self, db: &dyn DefDatabase) -> ModuleId {
match self { match self {
TypeOwnerId::FunctionId(it) => it.lookup(db).module(db), TypeOwnerId::FunctionId(it) => it.lookup(db).module(db),
TypeOwnerId::StaticId(it) => it.lookup(db).module(db), TypeOwnerId::StaticId(it) => it.lookup(db).module(db),
@ -1045,7 +1035,7 @@ impl HasModule for TypeOwnerId {
} }
impl HasModule for DefWithBodyId { impl HasModule for DefWithBodyId {
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { fn module(&self, db: &dyn DefDatabase) -> ModuleId {
match self { match self {
DefWithBodyId::FunctionId(it) => it.lookup(db).module(db), DefWithBodyId::FunctionId(it) => it.lookup(db).module(db),
DefWithBodyId::StaticId(it) => it.lookup(db).module(db), DefWithBodyId::StaticId(it) => it.lookup(db).module(db),
@ -1057,7 +1047,7 @@ impl HasModule for DefWithBodyId {
} }
impl HasModule for GenericDefId { impl HasModule for GenericDefId {
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { fn module(&self, db: &dyn DefDatabase) -> ModuleId {
match self { match self {
GenericDefId::FunctionId(it) => it.lookup(db).module(db), GenericDefId::FunctionId(it) => it.lookup(db).module(db),
GenericDefId::AdtId(it) => it.module(db), GenericDefId::AdtId(it) => it.module(db),
@ -1072,13 +1062,13 @@ impl HasModule for GenericDefId {
} }
impl HasModule for TypeAliasId { impl HasModule for TypeAliasId {
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { fn module(&self, db: &dyn DefDatabase) -> ModuleId {
self.lookup(db).module(db) self.lookup(db).module(db)
} }
} }
impl HasModule for TraitId { impl HasModule for TraitId {
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { fn module(&self, db: &dyn DefDatabase) -> ModuleId {
self.lookup(db).container self.lookup(db).container
} }
} }
@ -1087,7 +1077,7 @@ impl ModuleDefId {
/// Returns the module containing `self` (or `self`, if `self` is itself a module). /// Returns the module containing `self` (or `self`, if `self` is itself a module).
/// ///
/// Returns `None` if `self` refers to a primitive type. /// Returns `None` if `self` refers to a primitive type.
pub fn module(&self, db: &dyn db::DefDatabase) -> Option<ModuleId> { pub fn module(&self, db: &dyn DefDatabase) -> Option<ModuleId> {
Some(match self { Some(match self {
ModuleDefId::ModuleId(id) => *id, ModuleDefId::ModuleId(id) => *id,
ModuleDefId::FunctionId(id) => id.lookup(db).module(db), ModuleDefId::FunctionId(id) => id.lookup(db).module(db),
@ -1105,7 +1095,7 @@ impl ModuleDefId {
} }
impl AttrDefId { impl AttrDefId {
pub fn krate(&self, db: &dyn db::DefDatabase) -> CrateId { pub fn krate(&self, db: &dyn DefDatabase) -> CrateId {
match self { match self {
AttrDefId::ModuleId(it) => it.krate, AttrDefId::ModuleId(it) => it.krate,
AttrDefId::FieldId(it) => it.parent.module(db).krate, AttrDefId::FieldId(it) => it.parent.module(db).krate,
@ -1171,7 +1161,7 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation"))); return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
}; };
let call_site = span_map.span_for_range(self.value.syntax().text_range()).ctx; let call_site = span_map.span_for_range(self.value.syntax().text_range());
macro_call_as_call_id_with_eager( macro_call_as_call_id_with_eager(
db, db,
@ -1201,7 +1191,7 @@ impl<T: AstIdNode> AstIdWithPath<T> {
fn macro_call_as_call_id( fn macro_call_as_call_id(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>, call: &AstIdWithPath<ast::MacroCall>,
call_site: SyntaxContextId, call_site: Span,
expand_to: ExpandTo, expand_to: ExpandTo,
krate: CrateId, krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy, resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
@ -1213,7 +1203,7 @@ fn macro_call_as_call_id(
fn macro_call_as_call_id_with_eager( fn macro_call_as_call_id_with_eager(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>, call: &AstIdWithPath<ast::MacroCall>,
call_site: SyntaxContextId, call_site: Span,
expand_to: ExpandTo, expand_to: ExpandTo,
krate: CrateId, krate: CrateId,
resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>, resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>,
@ -1243,83 +1233,12 @@ fn macro_call_as_call_id_with_eager(
Ok(res) Ok(res)
} }
pub fn macro_id_to_def_id(db: &dyn db::DefDatabase, id: MacroId) -> MacroDefId {
match id {
MacroId::Macro2Id(it) => {
let loc = it.lookup(db);
let item_tree = loc.id.item_tree(db);
let makro = &item_tree[loc.id.value];
let in_file = |m: FileAstId<ast::MacroDef>| InFile::new(loc.id.file_id(), m.upcast());
MacroDefId {
krate: loc.container.krate,
kind: match loc.expander {
MacroExpander::Declarative => MacroDefKind::Declarative(in_file(makro.ast_id)),
MacroExpander::BuiltIn(it) => MacroDefKind::BuiltIn(it, in_file(makro.ast_id)),
MacroExpander::BuiltInAttr(it) => {
MacroDefKind::BuiltInAttr(it, in_file(makro.ast_id))
}
MacroExpander::BuiltInDerive(it) => {
MacroDefKind::BuiltInDerive(it, in_file(makro.ast_id))
}
MacroExpander::BuiltInEager(it) => {
MacroDefKind::BuiltInEager(it, in_file(makro.ast_id))
}
},
local_inner: false,
allow_internal_unsafe: loc.allow_internal_unsafe,
}
}
MacroId::MacroRulesId(it) => {
let loc = it.lookup(db);
let item_tree = loc.id.item_tree(db);
let makro = &item_tree[loc.id.value];
let in_file = |m: FileAstId<ast::MacroRules>| InFile::new(loc.id.file_id(), m.upcast());
MacroDefId {
krate: loc.container.krate,
kind: match loc.expander {
MacroExpander::Declarative => MacroDefKind::Declarative(in_file(makro.ast_id)),
MacroExpander::BuiltIn(it) => MacroDefKind::BuiltIn(it, in_file(makro.ast_id)),
MacroExpander::BuiltInAttr(it) => {
MacroDefKind::BuiltInAttr(it, in_file(makro.ast_id))
}
MacroExpander::BuiltInDerive(it) => {
MacroDefKind::BuiltInDerive(it, in_file(makro.ast_id))
}
MacroExpander::BuiltInEager(it) => {
MacroDefKind::BuiltInEager(it, in_file(makro.ast_id))
}
},
local_inner: loc.local_inner,
allow_internal_unsafe: loc.allow_internal_unsafe,
}
}
MacroId::ProcMacroId(it) => {
let loc = it.lookup(db);
let item_tree = loc.id.item_tree(db);
let makro = &item_tree[loc.id.value];
MacroDefId {
krate: loc.container.krate,
kind: MacroDefKind::ProcMacro(
loc.expander,
loc.kind,
InFile::new(loc.id.file_id(), makro.ast_id),
),
local_inner: false,
allow_internal_unsafe: false,
}
}
}
}
fn derive_macro_as_call_id( fn derive_macro_as_call_id(
db: &dyn db::DefDatabase, db: &dyn DefDatabase,
item_attr: &AstIdWithPath<ast::Adt>, item_attr: &AstIdWithPath<ast::Adt>,
derive_attr_index: AttrId, derive_attr_index: AttrId,
derive_pos: u32, derive_pos: u32,
call_site: SyntaxContextId, call_site: Span,
krate: CrateId, krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>, resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> { ) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
@ -1340,7 +1259,7 @@ fn derive_macro_as_call_id(
} }
fn attr_macro_as_call_id( fn attr_macro_as_call_id(
db: &dyn db::DefDatabase, db: &dyn DefDatabase,
item_attr: &AstIdWithPath<ast::Item>, item_attr: &AstIdWithPath<ast::Item>,
macro_attr: &Attr, macro_attr: &Attr,
krate: CrateId, krate: CrateId,
@ -1349,7 +1268,7 @@ fn attr_macro_as_call_id(
let arg = match macro_attr.input.as_deref() { let arg = match macro_attr.input.as_deref() {
Some(AttrInput::TokenTree(tt)) => { Some(AttrInput::TokenTree(tt)) => {
let mut tt = tt.as_ref().clone(); let mut tt = tt.as_ref().clone();
tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE; tt.delimiter = tt::Delimiter::invisible_spanned(macro_attr.span);
Some(tt) Some(tt)
} }
@ -1364,7 +1283,7 @@ fn attr_macro_as_call_id(
attr_args: arg.map(Arc::new), attr_args: arg.map(Arc::new),
invoc_attr_index: macro_attr.id, invoc_attr_index: macro_attr.id,
}, },
macro_attr.ctxt, macro_attr.span,
) )
} }

View file

@ -3,7 +3,7 @@ use std::cell::OnceCell;
use hir_expand::{ use hir_expand::{
ast_id_map::{AstIdMap, AstIdNode}, ast_id_map::{AstIdMap, AstIdNode},
span::{SpanMap, SpanMapRef}, span_map::{SpanMap, SpanMapRef},
AstId, HirFileId, InFile, AstId, HirFileId, InFile,
}; };
use syntax::ast; use syntax::ast;

View file

@ -163,31 +163,43 @@ fn main() { ""; }
fn test_assert_expand() { fn test_assert_expand() {
check( check(
r#" r#"
#[rustc_builtin_macro] //- minicore: assert
macro_rules! assert {
($cond:expr) => ({ /* compiler built-in */ });
($cond:expr, $($args:tt)*) => ({ /* compiler built-in */ })
}
fn main() { fn main() {
assert!(true, "{} {:?}", arg1(a, b, c), arg2); assert!(true, "{} {:?}", arg1(a, b, c), arg2);
} }
"#, "#,
expect![[r##" expect![[r#"
#[rustc_builtin_macro]
macro_rules! assert {
($cond:expr) => ({ /* compiler built-in */ });
($cond:expr, $($args:tt)*) => ({ /* compiler built-in */ })
}
fn main() { fn main() {
{ {
if !(true ) { if !(true ) {
$crate::panic!("{} {:?}", arg1(a, b, c), arg2); $crate::panic::panic_2021!("{} {:?}", arg1(a, b, c), arg2);
} }
}; };
} }
"##]], "#]],
);
}
// FIXME: This is the wrong expansion, see FIXME on `builtin_fn_macro::use_panic_2021`
#[test]
fn test_assert_expand_2015() {
check(
r#"
//- minicore: assert
//- /main.rs edition:2015
fn main() {
assert!(true, "{} {:?}", arg1(a, b, c), arg2);
}
"#,
expect![[r#"
fn main() {
{
if !(true ) {
$crate::panic::panic_2021!("{} {:?}", arg1(a, b, c), arg2);
}
};
}
"#]],
); );
} }

View file

@ -1218,8 +1218,10 @@ m! {
macro_rules! m { macro_rules! m {
($(#[$m:meta])+) => ( $(#[$m])+ fn bar() {} ) ($(#[$m:meta])+) => ( $(#[$m])+ fn bar() {} )
} }
#[doc = " Single Line Doc 1"] #[doc = r" Single Line Doc 1"]
#[doc = "\n MultiLines Doc\n "] fn bar() {} #[doc = r"
MultiLines Doc
"] fn bar() {}
"##]], "##]],
); );
} }
@ -1260,8 +1262,10 @@ m! {
macro_rules! m { macro_rules! m {
($(#[$ m:meta])+) => ( $(#[$m])+ fn bar() {} ) ($(#[$ m:meta])+) => ( $(#[$m])+ fn bar() {} )
} }
#[doc = " 錦瑟無端五十弦,一弦一柱思華年。"] #[doc = r" 錦瑟無端五十弦,一弦一柱思華年。"]
#[doc = "\n 莊生曉夢迷蝴蝶,望帝春心託杜鵑。\n "] fn bar() {} #[doc = r"
"] fn bar() {}
"##]], "##]],
); );
} }
@ -1281,7 +1285,7 @@ m! {
macro_rules! m { macro_rules! m {
($(#[$m:meta])+) => ( $(#[$m])+ fn bar() {} ) ($(#[$m:meta])+) => ( $(#[$m])+ fn bar() {} )
} }
#[doc = " \\ \" \'"] fn bar() {} #[doc = r#" \ " '"#] fn bar() {}
"##]], "##]],
); );
} }

View file

@ -18,7 +18,7 @@ macro_rules! m {
($($false:ident)*) => ($false); ($($false:ident)*) => ($false);
(double_dollar) => ($$); (double_dollar) => ($$);
($) => (m!($);); ($) => (m!($););
($($t:tt)*) => ($( ${ignore(t)} ${index()} )-*); ($($t:tt)*) => ($( ${ignore($t)} ${index()} )-*);
} }
m!($); m!($);
"#, "#,
@ -33,7 +33,7 @@ macro_rules! m {
($($false:ident)*) => ($false); ($($false:ident)*) => ($false);
(double_dollar) => ($$); (double_dollar) => ($$);
($) => (m!($);); ($) => (m!($););
($($t:tt)*) => ($( ${ignore(t)} ${index()} )-*); ($($t:tt)*) => ($( ${ignore($t)} ${index()} )-*);
} }
m!($); m!($);
"#]], "#]],

View file

@ -77,13 +77,13 @@ fn test_metavar_exprs() {
check( check(
r#" r#"
macro_rules! m { macro_rules! m {
( $( $t:tt )* ) => ( $( ${ignore(t)} -${index()} )-* ); ( $( $t:tt )* ) => ( $( ${ignore($t)} -${index()} )-* );
} }
const _: i32 = m!(a b c); const _: i32 = m!(a b c);
"#, "#,
expect![[r#" expect![[r#"
macro_rules! m { macro_rules! m {
( $( $t:tt )* ) => ( $( ${ignore(t)} -${index()} )-* ); ( $( $t:tt )* ) => ( $( ${ignore($t)} -${index()} )-* );
} }
const _: i32 = -0--1--2; const _: i32 = -0--1--2;
"#]], "#]],
@ -96,7 +96,7 @@ fn count_basic() {
r#" r#"
macro_rules! m { macro_rules! m {
($($t:ident),*) => { ($($t:ident),*) => {
${count(t)} ${count($t)}
} }
} }
@ -109,7 +109,7 @@ fn test() {
expect![[r#" expect![[r#"
macro_rules! m { macro_rules! m {
($($t:ident),*) => { ($($t:ident),*) => {
${count(t)} ${count($t)}
} }
} }
@ -130,9 +130,9 @@ macro_rules! foo {
($( $( $($t:ident)* ),* );*) => { ($( $( $($t:ident)* ),* );*) => {
$( $(
{ {
let depth_none = ${count(t)}; let depth_none = ${count($t)};
let depth_zero = ${count(t, 0)}; let depth_zero = ${count($t, 0)};
let depth_one = ${count(t, 1)}; let depth_one = ${count($t, 1)};
} }
)* )*
} }
@ -150,9 +150,9 @@ macro_rules! foo {
($( $( $($t:ident)* ),* );*) => { ($( $( $($t:ident)* ),* );*) => {
$( $(
{ {
let depth_none = ${count(t)}; let depth_none = ${count($t)};
let depth_zero = ${count(t, 0)}; let depth_zero = ${count($t, 0)};
let depth_one = ${count(t, 1)}; let depth_one = ${count($t, 1)};
} }
)* )*
} }
@ -160,11 +160,11 @@ macro_rules! foo {
fn bar() { fn bar() {
{ {
let depth_none = 6; let depth_none = 3;
let depth_zero = 3; let depth_zero = 3;
let depth_one = 6; let depth_one = 6;
} { } {
let depth_none = 3; let depth_none = 1;
let depth_zero = 1; let depth_zero = 1;
let depth_one = 3; let depth_one = 3;
} }
@ -178,12 +178,12 @@ fn count_depth_out_of_bounds() {
check( check(
r#" r#"
macro_rules! foo { macro_rules! foo {
($($t:ident)*) => { ${count(t, 1)} }; ($($t:ident)*) => { ${count($t, 1)} };
($( $( $l:literal )* );*) => { $(${count(l, 1)};)* } ($( $( $l:literal )* );*) => { $(${count($l, 1)};)* }
} }
macro_rules! bar { macro_rules! bar {
($($t:ident)*) => { ${count(t, 1024)} }; ($($t:ident)*) => { ${count($t, 1024)} };
($( $( $l:literal )* );*) => { $(${count(l, 8192)};)* } ($( $( $l:literal )* );*) => { $(${count($l, 8192)};)* }
} }
fn test() { fn test() {
@ -195,19 +195,21 @@ fn test() {
"#, "#,
expect![[r#" expect![[r#"
macro_rules! foo { macro_rules! foo {
($($t:ident)*) => { ${count(t, 1)} }; ($($t:ident)*) => { ${count($t, 1)} };
($( $( $l:literal )* );*) => { $(${count(l, 1)};)* } ($( $( $l:literal )* );*) => { $(${count($l, 1)};)* }
} }
macro_rules! bar { macro_rules! bar {
($($t:ident)*) => { ${count(t, 1024)} }; ($($t:ident)*) => { ${count($t, 1024)} };
($( $( $l:literal )* );*) => { $(${count(l, 8192)};)* } ($( $( $l:literal )* );*) => { $(${count($l, 8192)};)* }
} }
fn test() { fn test() {
/* error: ${count} out of bounds */; 2;
/* error: ${count} out of bounds */; 2;
/* error: ${count} out of bounds */; 1;;
/* error: ${count} out of bounds */; 2;
2;
1;;
} }
"#]], "#]],
); );
@ -218,8 +220,8 @@ fn misplaced_count() {
check( check(
r#" r#"
macro_rules! foo { macro_rules! foo {
($($t:ident)*) => { $(${count(t)})* }; ($($t:ident)*) => { $(${count($t)})* };
($l:literal) => { ${count(l)} } ($l:literal) => { ${count($l)} }
} }
fn test() { fn test() {
@ -229,13 +231,13 @@ fn test() {
"#, "#,
expect![[r#" expect![[r#"
macro_rules! foo { macro_rules! foo {
($($t:ident)*) => { $(${count(t)})* }; ($($t:ident)*) => { $(${count($t)})* };
($l:literal) => { ${count(l)} } ($l:literal) => { ${count($l)} }
} }
fn test() { fn test() {
/* error: ${count} misplaced */; 1 1 1;
/* error: ${count} misplaced */; 1;
} }
"#]], "#]],
); );
@ -246,13 +248,13 @@ fn malformed_count() {
check( check(
r#" r#"
macro_rules! too_many_args { macro_rules! too_many_args {
($($t:ident)*) => { ${count(t, 1, leftover)} } ($($t:ident)*) => { ${count($t, 1, leftover)} }
} }
macro_rules! depth_suffixed { macro_rules! depth_suffixed {
($($t:ident)*) => { ${count(t, 0usize)} } ($($t:ident)*) => { ${count($t, 0usize)} }
} }
macro_rules! depth_too_large { macro_rules! depth_too_large {
($($t:ident)*) => { ${count(t, 18446744073709551616)} } ($($t:ident)*) => { ${count($t, 18446744073709551616)} }
} }
fn test() { fn test() {
@ -263,13 +265,13 @@ fn test() {
"#, "#,
expect![[r#" expect![[r#"
macro_rules! too_many_args { macro_rules! too_many_args {
($($t:ident)*) => { ${count(t, 1, leftover)} } ($($t:ident)*) => { ${count($t, 1, leftover)} }
} }
macro_rules! depth_suffixed { macro_rules! depth_suffixed {
($($t:ident)*) => { ${count(t, 0usize)} } ($($t:ident)*) => { ${count($t, 0usize)} }
} }
macro_rules! depth_too_large { macro_rules! depth_too_large {
($($t:ident)*) => { ${count(t, 18446744073709551616)} } ($($t:ident)*) => { ${count($t, 18446744073709551616)} }
} }
fn test() { fn test() {
@ -288,7 +290,7 @@ fn count_interaction_with_empty_binding() {
r#" r#"
macro_rules! m { macro_rules! m {
($($t:ident),*) => { ($($t:ident),*) => {
${count(t, 100)} ${count($t, 100)}
} }
} }
@ -299,7 +301,7 @@ fn test() {
expect![[r#" expect![[r#"
macro_rules! m { macro_rules! m {
($($t:ident),*) => { ($($t:ident),*) => {
${count(t, 100)} ${count($t, 100)}
} }
} }

View file

@ -16,9 +16,15 @@ mod proc_macros;
use std::{iter, ops::Range, sync}; use std::{iter, ops::Range, sync};
use base_db::{fixture::WithFixture, span::SpanData, ProcMacro, SourceDatabase}; use base_db::SourceDatabase;
use expect_test::Expect; use expect_test::Expect;
use hir_expand::{db::ExpandDatabase, span::SpanMapRef, InFile, MacroFileId, MacroFileIdExt}; use hir_expand::{
db::ExpandDatabase,
proc_macro::{ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind},
span_map::SpanMapRef,
InFile, MacroFileId, MacroFileIdExt,
};
use span::Span;
use stdx::format_to; use stdx::format_to;
use syntax::{ use syntax::{
ast::{self, edit::IndentLevel}, ast::{self, edit::IndentLevel},
@ -26,10 +32,10 @@ use syntax::{
SyntaxKind::{COMMENT, EOF, IDENT, LIFETIME_IDENT}, SyntaxKind::{COMMENT, EOF, IDENT, LIFETIME_IDENT},
SyntaxNode, T, SyntaxNode, T,
}; };
use test_fixture::WithFixture;
use crate::{ use crate::{
db::DefDatabase, db::DefDatabase,
macro_id_to_def_id,
nameres::{DefMap, MacroSubNs, ModuleSource}, nameres::{DefMap, MacroSubNs, ModuleSource},
resolver::HasResolver, resolver::HasResolver,
src::HasSource, src::HasSource,
@ -50,7 +56,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
.into(), .into(),
ProcMacro { ProcMacro {
name: "identity_when_valid".into(), name: "identity_when_valid".into(),
kind: base_db::ProcMacroKind::Attr, kind: ProcMacroKind::Attr,
expander: sync::Arc::new(IdentityWhenValidProcMacroExpander), expander: sync::Arc::new(IdentityWhenValidProcMacroExpander),
}, },
)]; )];
@ -90,7 +96,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
.as_call_id_with_errors(&db, krate, |path| { .as_call_id_with_errors(&db, krate, |path| {
resolver resolver
.resolve_path_as_macro(&db, &path, Some(MacroSubNs::Bang)) .resolve_path_as_macro(&db, &path, Some(MacroSubNs::Bang))
.map(|(it, _)| macro_id_to_def_id(&db, it)) .map(|(it, _)| db.macro_def(it))
}) })
.unwrap(); .unwrap();
let macro_call_id = res.value.unwrap(); let macro_call_id = res.value.unwrap();
@ -307,16 +313,16 @@ fn pretty_print_macro_expansion(
// compile errors. // compile errors.
#[derive(Debug)] #[derive(Debug)]
struct IdentityWhenValidProcMacroExpander; struct IdentityWhenValidProcMacroExpander;
impl base_db::ProcMacroExpander for IdentityWhenValidProcMacroExpander { impl ProcMacroExpander for IdentityWhenValidProcMacroExpander {
fn expand( fn expand(
&self, &self,
subtree: &Subtree, subtree: &Subtree,
_: Option<&Subtree>, _: Option<&Subtree>,
_: &base_db::Env, _: &base_db::Env,
_: SpanData, _: Span,
_: SpanData, _: Span,
_: SpanData, _: Span,
) -> Result<Subtree, base_db::ProcMacroExpansionError> { ) -> Result<Subtree, ProcMacroExpansionError> {
let (parse, _) = let (parse, _) =
::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems); ::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems);
if parse.errors().is_empty() { if parse.errors().is_empty() {

View file

@ -59,8 +59,11 @@ mod tests;
use std::{cmp::Ord, ops::Deref}; use std::{cmp::Ord, ops::Deref};
use base_db::{CrateId, Edition, FileId, ProcMacroKind}; use base_db::{CrateId, Edition, FileId};
use hir_expand::{ast_id_map::FileAstId, name::Name, HirFileId, InFile, MacroCallId, MacroDefId}; use hir_expand::{
ast_id_map::FileAstId, name::Name, proc_macro::ProcMacroKind, HirFileId, InFile, MacroCallId,
MacroDefId,
};
use itertools::Itertools; use itertools::Itertools;
use la_arena::Arena; use la_arena::Arena;
use profile::Count; use profile::Count;
@ -97,7 +100,7 @@ pub struct DefMap {
/// contains this block. /// contains this block.
block: Option<BlockInfo>, block: Option<BlockInfo>,
/// The modules and their data declared in this crate. /// The modules and their data declared in this crate.
modules: Arena<ModuleData>, pub modules: Arena<ModuleData>,
krate: CrateId, krate: CrateId,
/// The prelude module for this crate. This either comes from an import /// The prelude module for this crate. This either comes from an import
/// marked with the `prelude_import` attribute, or (in the normal case) from /// marked with the `prelude_import` attribute, or (in the normal case) from
@ -623,8 +626,9 @@ impl DefMap {
self.diagnostics.as_slice() self.diagnostics.as_slice()
} }
pub fn recursion_limit(&self) -> Option<u32> { pub fn recursion_limit(&self) -> u32 {
self.data.recursion_limit // 128 is the default in rustc
self.data.recursion_limit.unwrap_or(128)
} }
} }

View file

@ -8,7 +8,6 @@ use crate::{
attr_macro_as_call_id, attr_macro_as_call_id,
db::DefDatabase, db::DefDatabase,
item_scope::BuiltinShadowMode, item_scope::BuiltinShadowMode,
macro_id_to_def_id,
nameres::path_resolution::ResolveMode, nameres::path_resolution::ResolveMode,
path::{ModPath, PathKind}, path::{ModPath, PathKind},
AstIdWithPath, LocalModuleId, UnresolvedMacro, AstIdWithPath, LocalModuleId, UnresolvedMacro,
@ -63,7 +62,7 @@ impl DefMap {
&ast_id, &ast_id,
attr, attr,
self.krate, self.krate,
macro_id_to_def_id(db, def), db.macro_def(def),
))) )))
} }

View file

@ -5,7 +5,7 @@
use std::{cmp::Ordering, iter, mem}; use std::{cmp::Ordering, iter, mem};
use base_db::{span::SyntaxContextId, CrateId, Dependency, Edition, FileId}; use base_db::{CrateId, Dependency, Edition, FileId};
use cfg::{CfgExpr, CfgOptions}; use cfg::{CfgExpr, CfgOptions};
use either::Either; use either::Either;
use hir_expand::{ use hir_expand::{
@ -15,7 +15,7 @@ use hir_expand::{
builtin_derive_macro::find_builtin_derive, builtin_derive_macro::find_builtin_derive,
builtin_fn_macro::find_builtin_macro, builtin_fn_macro::find_builtin_macro,
name::{name, AsName, Name}, name::{name, AsName, Name},
proc_macro::ProcMacroExpander, proc_macro::CustomProcMacroExpander,
ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroCallLoc, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroCallLoc,
MacroDefId, MacroDefKind, MacroDefId, MacroDefKind,
}; };
@ -23,6 +23,7 @@ use itertools::{izip, Itertools};
use la_arena::Idx; use la_arena::Idx;
use limit::Limit; use limit::Limit;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use span::{Span, SyntaxContextId};
use stdx::always; use stdx::always;
use syntax::{ast, SmolStr}; use syntax::{ast, SmolStr};
use triomphe::Arc; use triomphe::Arc;
@ -35,9 +36,9 @@ use crate::{
item_scope::{ImportId, ImportOrExternCrate, ImportType, PerNsGlobImports}, item_scope::{ImportId, ImportOrExternCrate, ImportType, PerNsGlobImports},
item_tree::{ item_tree::{
self, ExternCrate, Fields, FileItemTreeId, ImportKind, ItemTree, ItemTreeId, ItemTreeNode, self, ExternCrate, Fields, FileItemTreeId, ImportKind, ItemTree, ItemTreeId, ItemTreeNode,
MacroCall, MacroDef, MacroRules, Mod, ModItem, ModKind, TreeId, Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, TreeId,
}, },
macro_call_as_call_id, macro_call_as_call_id_with_eager, macro_id_to_def_id, macro_call_as_call_id, macro_call_as_call_id_with_eager,
nameres::{ nameres::{
diagnostics::DefDiagnostic, diagnostics::DefDiagnostic,
mod_resolution::ModDir, mod_resolution::ModDir,
@ -53,8 +54,9 @@ use crate::{
AdtId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, EnumVariantId, AdtId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, EnumVariantId,
ExternBlockLoc, ExternCrateId, ExternCrateLoc, FunctionId, FunctionLoc, ImplLoc, Intern, ExternBlockLoc, ExternCrateId, ExternCrateLoc, FunctionId, FunctionLoc, ImplLoc, Intern,
ItemContainerId, LocalModuleId, Lookup, Macro2Id, Macro2Loc, MacroExpander, MacroId, ItemContainerId, LocalModuleId, Lookup, Macro2Id, Macro2Loc, MacroExpander, MacroId,
MacroRulesId, MacroRulesLoc, ModuleDefId, ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc, MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ModuleDefId, ModuleId, ProcMacroId,
StructLoc, TraitAliasLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro, UseId, UseLoc, ProcMacroLoc, StaticLoc, StructLoc, TraitAliasLoc, TraitLoc, TypeAliasLoc, UnionLoc,
UnresolvedMacro, UseId, UseLoc,
}; };
static GLOB_RECURSION_LIMIT: Limit = Limit::new(100); static GLOB_RECURSION_LIMIT: Limit = Limit::new(100);
@ -86,16 +88,21 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
// FIXME: a hacky way to create a Name from string. // FIXME: a hacky way to create a Name from string.
let name = tt::Ident { let name = tt::Ident {
text: it.name.clone(), text: it.name.clone(),
span: tt::SpanData { span: Span {
range: syntax::TextRange::empty(syntax::TextSize::new(0)), range: syntax::TextRange::empty(syntax::TextSize::new(0)),
anchor: base_db::span::SpanAnchor { anchor: span::SpanAnchor {
file_id: FileId::BOGUS, file_id: FileId::BOGUS,
ast_id: base_db::span::ROOT_ERASED_FILE_AST_ID, ast_id: span::ROOT_ERASED_FILE_AST_ID,
}, },
ctx: SyntaxContextId::ROOT, ctx: SyntaxContextId::ROOT,
}, },
}; };
(name.as_name(), ProcMacroExpander::new(base_db::ProcMacroId(idx as u32))) (
name.as_name(),
CustomProcMacroExpander::new(hir_expand::proc_macro::ProcMacroId(
idx as u32,
)),
)
}) })
.collect()) .collect())
} }
@ -222,13 +229,13 @@ enum MacroDirectiveKind {
FnLike { FnLike {
ast_id: AstIdWithPath<ast::MacroCall>, ast_id: AstIdWithPath<ast::MacroCall>,
expand_to: ExpandTo, expand_to: ExpandTo,
call_site: SyntaxContextId, call_site: Span,
}, },
Derive { Derive {
ast_id: AstIdWithPath<ast::Adt>, ast_id: AstIdWithPath<ast::Adt>,
derive_attr: AttrId, derive_attr: AttrId,
derive_pos: usize, derive_pos: usize,
call_site: SyntaxContextId, call_site: Span,
}, },
Attr { Attr {
ast_id: AstIdWithPath<ast::Item>, ast_id: AstIdWithPath<ast::Item>,
@ -253,7 +260,7 @@ struct DefCollector<'a> {
/// built by the build system, and is the list of proc. macros we can actually expand. It is /// built by the build system, and is the list of proc. macros we can actually expand. It is
/// empty when proc. macro support is disabled (in which case we still do name resolution for /// empty when proc. macro support is disabled (in which case we still do name resolution for
/// them). /// them).
proc_macros: Result<Vec<(Name, ProcMacroExpander)>, Box<str>>, proc_macros: Result<Vec<(Name, CustomProcMacroExpander)>, Box<str>>,
is_proc_macro: bool, is_proc_macro: bool,
from_glob_import: PerNsGlobImports, from_glob_import: PerNsGlobImports,
/// If we fail to resolve an attribute on a `ModItem`, we fall back to ignoring the attribute. /// If we fail to resolve an attribute on a `ModItem`, we fall back to ignoring the attribute.
@ -545,6 +552,8 @@ impl DefCollector<'_> {
Edition::Edition2015 => name![rust_2015], Edition::Edition2015 => name![rust_2015],
Edition::Edition2018 => name![rust_2018], Edition::Edition2018 => name![rust_2018],
Edition::Edition2021 => name![rust_2021], Edition::Edition2021 => name![rust_2021],
// FIXME: update this when rust_2024 exists
Edition::Edition2024 => name![rust_2021],
}; };
let path_kind = match self.def_map.data.edition { let path_kind = match self.def_map.data.edition {
@ -603,18 +612,21 @@ impl DefCollector<'_> {
let (expander, kind) = let (expander, kind) =
match self.proc_macros.as_ref().map(|it| it.iter().find(|(n, _)| n == &def.name)) { match self.proc_macros.as_ref().map(|it| it.iter().find(|(n, _)| n == &def.name)) {
Ok(Some(&(_, expander))) => (expander, kind), Ok(Some(&(_, expander))) => (expander, kind),
_ => (ProcMacroExpander::dummy(), kind), _ => (CustomProcMacroExpander::dummy(), kind),
}; };
let proc_macro_id = let proc_macro_id = ProcMacroLoc {
ProcMacroLoc { container: self.def_map.crate_root(), id, expander, kind } container: self.def_map.crate_root(),
id,
expander,
kind,
edition: self.def_map.data.edition,
}
.intern(self.db); .intern(self.db);
self.define_proc_macro(def.name.clone(), proc_macro_id); self.define_proc_macro(def.name.clone(), proc_macro_id);
let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap(); let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap();
if let ProcMacroKind::CustomDerive { helpers } = def.kind { if let ProcMacroKind::CustomDerive { helpers } = def.kind {
crate_data crate_data.exported_derives.insert(self.db.macro_def(proc_macro_id.into()), helpers);
.exported_derives
.insert(macro_id_to_def_id(self.db, proc_macro_id.into()), helpers);
} }
crate_data.fn_proc_macro_mapping.insert(fn_id, proc_macro_id); crate_data.fn_proc_macro_mapping.insert(fn_id, proc_macro_id);
} }
@ -1125,10 +1137,7 @@ impl DefCollector<'_> {
BuiltinShadowMode::Module, BuiltinShadowMode::Module,
Some(subns), Some(subns),
); );
resolved_res resolved_res.resolved_def.take_macros().map(|it| (it, self.db.macro_def(it)))
.resolved_def
.take_macros()
.map(|it| (it, macro_id_to_def_id(self.db, it)))
}; };
let resolver_def_id = |path| resolver(path).map(|(_, it)| it); let resolver_def_id = |path| resolver(path).map(|(_, it)| it);
@ -1143,6 +1152,9 @@ impl DefCollector<'_> {
resolver_def_id, resolver_def_id,
); );
if let Ok(Some(call_id)) = call_id { if let Ok(Some(call_id)) = call_id {
self.def_map.modules[directive.module_id]
.scope
.add_macro_invoc(ast_id.ast_id, call_id);
push_resolved(directive, call_id); push_resolved(directive, call_id);
res = ReachedFixedPoint::No; res = ReachedFixedPoint::No;
@ -1299,14 +1311,13 @@ impl DefCollector<'_> {
// Not resolved to a derive helper or the derive attribute, so try to treat as a normal attribute. // Not resolved to a derive helper or the derive attribute, so try to treat as a normal attribute.
let call_id = let call_id =
attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def); attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def);
let loc: MacroCallLoc = self.db.lookup_intern_macro_call(call_id);
// If proc attribute macro expansion is disabled, skip expanding it here // If proc attribute macro expansion is disabled, skip expanding it here
if !self.db.expand_proc_attr_macros() { if !self.db.expand_proc_attr_macros() {
self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
directive.module_id, directive.module_id,
loc.kind, self.db.lookup_intern_macro_call(call_id).kind,
loc.def.krate, def.krate,
)); ));
return recollect_without(self); return recollect_without(self);
} }
@ -1314,14 +1325,14 @@ impl DefCollector<'_> {
// Skip #[test]/#[bench] expansion, which would merely result in more memory usage // Skip #[test]/#[bench] expansion, which would merely result in more memory usage
// due to duplicating functions into macro expansions // due to duplicating functions into macro expansions
if matches!( if matches!(
loc.def.kind, def.kind,
MacroDefKind::BuiltInAttr(expander, _) MacroDefKind::BuiltInAttr(expander, _)
if expander.is_test() || expander.is_bench() if expander.is_test() || expander.is_bench()
) { ) {
return recollect_without(self); return recollect_without(self);
} }
if let MacroDefKind::ProcMacro(exp, ..) = loc.def.kind { if let MacroDefKind::ProcMacro(exp, ..) = def.kind {
if exp.is_dummy() { if exp.is_dummy() {
// If there's no expander for the proc macro (e.g. // If there's no expander for the proc macro (e.g.
// because proc macros are disabled, or building the // because proc macros are disabled, or building the
@ -1329,8 +1340,8 @@ impl DefCollector<'_> {
// expansion like we would if it was disabled // expansion like we would if it was disabled
self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
directive.module_id, directive.module_id,
loc.kind, self.db.lookup_intern_macro_call(call_id).kind,
loc.def.krate, def.krate,
)); ));
return recollect_without(self); return recollect_without(self);
@ -1436,10 +1447,7 @@ impl DefCollector<'_> {
BuiltinShadowMode::Module, BuiltinShadowMode::Module,
Some(MacroSubNs::Bang), Some(MacroSubNs::Bang),
); );
resolved_res resolved_res.resolved_def.take_macros().map(|it| self.db.macro_def(it))
.resolved_def
.take_macros()
.map(|it| macro_id_to_def_id(self.db, it))
}, },
); );
if let Err(UnresolvedMacro { path }) = macro_call_as_call_id { if let Err(UnresolvedMacro { path }) = macro_call_as_call_id {
@ -1645,7 +1653,7 @@ impl ModCollector<'_, '_> {
), ),
ModItem::MacroCall(mac) => self.collect_macro_call(&self.item_tree[mac], container), ModItem::MacroCall(mac) => self.collect_macro_call(&self.item_tree[mac], container),
ModItem::MacroRules(id) => self.collect_macro_rules(id, module), ModItem::MacroRules(id) => self.collect_macro_rules(id, module),
ModItem::MacroDef(id) => self.collect_macro_def(id, module), ModItem::Macro2(id) => self.collect_macro_def(id, module),
ModItem::Impl(imp) => { ModItem::Impl(imp) => {
let impl_id = let impl_id =
ImplLoc { container: module, id: ItemTreeId::new(self.tree_id, imp) } ImplLoc { container: module, id: ItemTreeId::new(self.tree_id, imp) }
@ -2090,11 +2098,11 @@ impl ModCollector<'_, '_> {
// FIXME: a hacky way to create a Name from string. // FIXME: a hacky way to create a Name from string.
name = tt::Ident { name = tt::Ident {
text: it.clone(), text: it.clone(),
span: tt::SpanData { span: Span {
range: syntax::TextRange::empty(syntax::TextSize::new(0)), range: syntax::TextRange::empty(syntax::TextSize::new(0)),
anchor: base_db::span::SpanAnchor { anchor: span::SpanAnchor {
file_id: FileId::BOGUS, file_id: FileId::BOGUS,
ast_id: base_db::span::ROOT_ERASED_FILE_AST_ID, ast_id: span::ROOT_ERASED_FILE_AST_ID,
}, },
ctx: SyntaxContextId::ROOT, ctx: SyntaxContextId::ROOT,
}, },
@ -2136,12 +2144,16 @@ impl ModCollector<'_, '_> {
}; };
let allow_internal_unsafe = attrs.by_key("allow_internal_unsafe").exists(); let allow_internal_unsafe = attrs.by_key("allow_internal_unsafe").exists();
let mut flags = MacroRulesLocFlags::empty();
flags.set(MacroRulesLocFlags::LOCAL_INNER, local_inner);
flags.set(MacroRulesLocFlags::ALLOW_INTERNAL_UNSAFE, allow_internal_unsafe);
let macro_id = MacroRulesLoc { let macro_id = MacroRulesLoc {
container: module, container: module,
id: ItemTreeId::new(self.tree_id, id), id: ItemTreeId::new(self.tree_id, id),
local_inner, flags,
allow_internal_unsafe,
expander, expander,
edition: self.def_collector.def_map.data.edition,
} }
.intern(self.def_collector.db); .intern(self.def_collector.db);
self.def_collector.define_macro_rules( self.def_collector.define_macro_rules(
@ -2152,7 +2164,7 @@ impl ModCollector<'_, '_> {
); );
} }
fn collect_macro_def(&mut self, id: FileItemTreeId<MacroDef>, module: ModuleId) { fn collect_macro_def(&mut self, id: FileItemTreeId<Macro2>, module: ModuleId) {
let krate = self.def_collector.def_map.krate; let krate = self.def_collector.def_map.krate;
let mac = &self.item_tree[id]; let mac = &self.item_tree[id];
let ast_id = InFile::new(self.file_id(), mac.ast_id.upcast()); let ast_id = InFile::new(self.file_id(), mac.ast_id.upcast());
@ -2207,6 +2219,7 @@ impl ModCollector<'_, '_> {
id: ItemTreeId::new(self.tree_id, id), id: ItemTreeId::new(self.tree_id, id),
expander, expander,
allow_internal_unsafe, allow_internal_unsafe,
edition: self.def_collector.def_map.data.edition,
} }
.intern(self.def_collector.db); .intern(self.def_collector.db);
self.def_collector.define_macro_def( self.def_collector.define_macro_def(
@ -2220,7 +2233,7 @@ impl ModCollector<'_, '_> {
Arc::get_mut(&mut self.def_collector.def_map.data) Arc::get_mut(&mut self.def_collector.def_map.data)
.unwrap() .unwrap()
.exported_derives .exported_derives
.insert(macro_id_to_def_id(self.def_collector.db, macro_id.into()), helpers); .insert(self.def_collector.db.macro_def(macro_id.into()), helpers);
} }
} }
} }
@ -2259,7 +2272,7 @@ impl ModCollector<'_, '_> {
Some(MacroSubNs::Bang), Some(MacroSubNs::Bang),
) )
}) })
.map(|it| macro_id_to_def_id(self.def_collector.db, it)) .map(|it| self.def_collector.db.macro_def(it))
}) })
}, },
|path| { |path| {
@ -2271,7 +2284,7 @@ impl ModCollector<'_, '_> {
BuiltinShadowMode::Module, BuiltinShadowMode::Module,
Some(MacroSubNs::Bang), Some(MacroSubNs::Bang),
); );
resolved_res.resolved_def.take_macros().map(|it| macro_id_to_def_id(db, it)) resolved_res.resolved_def.take_macros().map(|it| db.macro_def(it))
}, },
) { ) {
// FIXME: if there were errors, this mightve been in the eager expansion from an // FIXME: if there were errors, this mightve been in the eager expansion from an
@ -2279,10 +2292,13 @@ impl ModCollector<'_, '_> {
if res.err.is_none() { if res.err.is_none() {
// Legacy macros need to be expanded immediately, so that any macros they produce // Legacy macros need to be expanded immediately, so that any macros they produce
// are in scope. // are in scope.
if let Some(val) = res.value { if let Some(call_id) = res.value {
self.def_collector.def_map.modules[self.module_id]
.scope
.add_macro_invoc(ast_id.ast_id, call_id);
self.def_collector.collect_macro_expansion( self.def_collector.collect_macro_expansion(
self.module_id, self.module_id,
val, call_id,
self.macro_depth + 1, self.macro_depth + 1,
container, container,
); );
@ -2296,7 +2312,7 @@ impl ModCollector<'_, '_> {
self.def_collector.unresolved_macros.push(MacroDirective { self.def_collector.unresolved_macros.push(MacroDirective {
module_id: self.module_id, module_id: self.module_id,
depth: self.macro_depth + 1, depth: self.macro_depth + 1,
kind: MacroDirectiveKind::FnLike { ast_id, expand_to: expand_to, call_site }, kind: MacroDirectiveKind::FnLike { ast_id, expand_to, call_site },
container, container,
}); });
} }
@ -2363,8 +2379,10 @@ impl ModCollector<'_, '_> {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use base_db::SourceDatabase;
use test_fixture::WithFixture;
use crate::{db::DefDatabase, test_db::TestDB}; use crate::{db::DefDatabase, test_db::TestDB};
use base_db::{fixture::WithFixture, SourceDatabase};
use super::*; use super::*;

View file

@ -19,11 +19,13 @@ pub enum ProcMacroKind {
} }
impl ProcMacroKind { impl ProcMacroKind {
pub(super) fn to_basedb_kind(&self) -> base_db::ProcMacroKind { pub(super) fn to_basedb_kind(&self) -> hir_expand::proc_macro::ProcMacroKind {
match self { match self {
ProcMacroKind::CustomDerive { .. } => base_db::ProcMacroKind::CustomDerive, ProcMacroKind::CustomDerive { .. } => {
ProcMacroKind::FnLike => base_db::ProcMacroKind::FuncLike, hir_expand::proc_macro::ProcMacroKind::CustomDerive
ProcMacroKind::Attr => base_db::ProcMacroKind::Attr, }
ProcMacroKind::FnLike => hir_expand::proc_macro::ProcMacroKind::FuncLike,
ProcMacroKind::Attr => hir_expand::proc_macro::ProcMacroKind::Attr,
} }
} }
} }

View file

@ -4,8 +4,9 @@ mod macros;
mod mod_resolution; mod mod_resolution;
mod primitives; mod primitives;
use base_db::{fixture::WithFixture, SourceDatabase}; use base_db::SourceDatabase;
use expect_test::{expect, Expect}; use expect_test::{expect, Expect};
use test_fixture::WithFixture;
use triomphe::Arc; use triomphe::Arc;
use crate::{db::DefDatabase, nameres::DefMap, test_db::TestDB}; use crate::{db::DefDatabase, nameres::DefMap, test_db::TestDB};

View file

@ -1,11 +1,8 @@
use base_db::{SourceDatabase, SourceDatabaseExt}; use base_db::{SourceDatabase, SourceDatabaseExt};
use test_fixture::WithFixture;
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{db::DefDatabase, nameres::tests::TestDB, AdtId, ModuleDefId};
db::DefDatabase,
nameres::tests::{TestDB, WithFixture},
AdtId, ModuleDefId,
};
fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: &str) { fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: &str) {
let (mut db, pos) = TestDB::with_position(ra_fixture_initial); let (mut db, pos) = TestDB::with_position(ra_fixture_initial);

View file

@ -1,6 +1,12 @@
use super::*; use expect_test::expect;
use test_fixture::WithFixture;
use itertools::Itertools; use itertools::Itertools;
use crate::nameres::tests::check;
use super::*;
#[test] #[test]
fn macro_rules_are_globally_visible() { fn macro_rules_are_globally_visible() {
check( check(

View file

@ -2,7 +2,10 @@
use std::{fmt, hash::BuildHasherDefault}; use std::{fmt, hash::BuildHasherDefault};
use base_db::CrateId; use base_db::CrateId;
use hir_expand::name::{name, Name}; use hir_expand::{
name::{name, Name},
MacroDefId,
};
use indexmap::IndexMap; use indexmap::IndexMap;
use intern::Interned; use intern::Interned;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
@ -406,6 +409,15 @@ impl Resolver {
.take_macros_import() .take_macros_import()
} }
pub fn resolve_path_as_macro_def(
&self,
db: &dyn DefDatabase,
path: &ModPath,
expected_macro_kind: Option<MacroSubNs>,
) -> Option<MacroDefId> {
self.resolve_path_as_macro(db, path, expected_macro_kind).map(|(it, _)| db.macro_def(it))
}
/// Returns a set of names available in the current scope. /// Returns a set of names available in the current scope.
/// ///
/// Note that this is a somewhat fuzzy concept -- internally, the compiler /// Note that this is a somewhat fuzzy concept -- internally, the compiler

View file

@ -2,7 +2,7 @@
use std::iter; use std::iter;
use hir_expand::{span::SpanMapRef, InFile}; use hir_expand::{span_map::SpanMapRef, InFile};
use la_arena::ArenaMap; use la_arena::ArenaMap;
use syntax::ast; use syntax::ast;
use triomphe::Arc; use triomphe::Arc;

View file

@ -15,7 +15,7 @@ doctest = false
cov-mark = "2.0.0-pre.1" cov-mark = "2.0.0-pre.1"
tracing.workspace = true tracing.workspace = true
either.workspace = true either.workspace = true
rustc-hash = "1.1.0" rustc-hash.workspace = true
la-arena.workspace = true la-arena.workspace = true
itertools.workspace = true itertools.workspace = true
hashbrown.workspace = true hashbrown.workspace = true
@ -32,6 +32,10 @@ profile.workspace = true
tt.workspace = true tt.workspace = true
mbe.workspace = true mbe.workspace = true
limit.workspace = true limit.workspace = true
span.workspace = true
[dev-dependencies] [dev-dependencies]
expect-test = "1.4.0" expect-test = "1.4.0"
[lints]
workspace = true

View file

@ -5,6 +5,8 @@
//! item as an ID. That way, id's don't change unless the set of items itself //! item as an ID. That way, id's don't change unless the set of items itself
//! changes. //! changes.
// FIXME: Consider moving this into the span crate
use std::{ use std::{
any::type_name, any::type_name,
fmt, fmt,
@ -17,9 +19,9 @@ use profile::Count;
use rustc_hash::FxHasher; use rustc_hash::FxHasher;
use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr}; use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
use crate::db; use crate::db::ExpandDatabase;
pub use base_db::span::ErasedFileAstId; pub use span::ErasedFileAstId;
/// `AstId` points to an AST node in any file. /// `AstId` points to an AST node in any file.
/// ///
@ -27,13 +29,13 @@ pub use base_db::span::ErasedFileAstId;
pub type AstId<N> = crate::InFile<FileAstId<N>>; pub type AstId<N> = crate::InFile<FileAstId<N>>;
impl<N: AstIdNode> AstId<N> { impl<N: AstIdNode> AstId<N> {
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N { pub fn to_node(&self, db: &dyn ExpandDatabase) -> N {
self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)) self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
} }
pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> crate::InFile<N> { pub fn to_in_file_node(&self, db: &dyn ExpandDatabase) -> crate::InFile<N> {
crate::InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))) crate::InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)))
} }
pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr<N> { pub fn to_ptr(&self, db: &dyn ExpandDatabase) -> AstPtr<N> {
db.ast_id_map(self.file_id).get(self.value) db.ast_id_map(self.file_id).get(self.value)
} }
} }
@ -41,7 +43,7 @@ impl<N: AstIdNode> AstId<N> {
pub type ErasedAstId = crate::InFile<ErasedFileAstId>; pub type ErasedAstId = crate::InFile<ErasedFileAstId>;
impl ErasedAstId { impl ErasedAstId {
pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr { pub fn to_ptr(&self, db: &dyn ExpandDatabase) -> SyntaxNodePtr {
db.ast_id_map(self.file_id).get_erased(self.value) db.ast_id_map(self.file_id).get_erased(self.value)
} }
} }
@ -197,6 +199,19 @@ impl AstIdMap {
FileAstId { raw, covariant: PhantomData } FileAstId { raw, covariant: PhantomData }
} }
pub fn ast_id_for_ptr<N: AstIdNode>(&self, ptr: AstPtr<N>) -> FileAstId<N> {
let ptr = ptr.syntax_node_ptr();
let hash = hash_ptr(&ptr);
match self.map.raw_entry().from_hash(hash, |&idx| self.arena[idx] == ptr) {
Some((&raw, &())) => FileAstId { raw, covariant: PhantomData },
None => panic!(
"Can't find {:?} in AstIdMap:\n{:?}",
ptr,
self.arena.iter().map(|(_id, i)| i).collect::<Vec<_>>(),
),
}
}
pub fn get<N: AstIdNode>(&self, id: FileAstId<N>) -> AstPtr<N> { pub fn get<N: AstIdNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap() AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap()
} }

View file

@ -1,19 +1,20 @@
//! A higher level attributes based on TokenTree, with also some shortcuts. //! A higher level attributes based on TokenTree, with also some shortcuts.
use std::{fmt, ops}; use std::{fmt, ops};
use base_db::{span::SyntaxContextId, CrateId}; use base_db::CrateId;
use cfg::CfgExpr; use cfg::CfgExpr;
use either::Either; use either::Either;
use intern::Interned; use intern::Interned;
use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct}; use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
use span::Span;
use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode}; use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode};
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
db::ExpandDatabase, db::ExpandDatabase,
mod_path::ModPath, mod_path::ModPath,
span::SpanMapRef, span_map::SpanMapRef,
tt::{self, Subtree}, tt::{self, Subtree},
InFile, InFile,
}; };
@ -52,7 +53,7 @@ impl RawAttrs {
id, id,
input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))), input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
path: Interned::new(ModPath::from(crate::name!(doc))), path: Interned::new(ModPath::from(crate::name!(doc))),
ctxt: span_map.span_for_range(comment.syntax().text_range()).ctx, span: span_map.span_for_range(comment.syntax().text_range()),
}), }),
}); });
let entries: Arc<[Attr]> = Arc::from_iter(entries); let entries: Arc<[Attr]> = Arc::from_iter(entries);
@ -119,7 +120,7 @@ impl RawAttrs {
let attrs = let attrs =
parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| { parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| {
let tree = Subtree { let tree = Subtree {
delimiter: tt::Delimiter::dummy_invisible(), delimiter: tt::Delimiter::invisible_spanned(attr.first()?.first_span()),
token_trees: attr.to_vec(), token_trees: attr.to_vec(),
}; };
Attr::from_tt(db, &tree, index.with_cfg_attr(idx)) Attr::from_tt(db, &tree, index.with_cfg_attr(idx))
@ -176,7 +177,7 @@ pub struct Attr {
pub id: AttrId, pub id: AttrId,
pub path: Interned<ModPath>, pub path: Interned<ModPath>,
pub input: Option<Interned<AttrInput>>, pub input: Option<Interned<AttrInput>>,
pub ctxt: SyntaxContextId, pub span: Span,
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -205,6 +206,7 @@ impl Attr {
id: AttrId, id: AttrId,
) -> Option<Attr> { ) -> Option<Attr> {
let path = Interned::new(ModPath::from_src(db, ast.path()?, span_map)?); let path = Interned::new(ModPath::from_src(db, ast.path()?, span_map)?);
let span = span_map.span_for_range(ast.syntax().text_range());
let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() { let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
let value = match lit.kind() { let value = match lit.kind() {
ast::LiteralKind::String(string) => string.value()?.into(), ast::LiteralKind::String(string) => string.value()?.into(),
@ -212,12 +214,12 @@ impl Attr {
}; };
Some(Interned::new(AttrInput::Literal(value))) Some(Interned::new(AttrInput::Literal(value)))
} else if let Some(tt) = ast.token_tree() { } else if let Some(tt) = ast.token_tree() {
let tree = syntax_node_to_token_tree(tt.syntax(), span_map); let tree = syntax_node_to_token_tree(tt.syntax(), span_map, span);
Some(Interned::new(AttrInput::TokenTree(Box::new(tree)))) Some(Interned::new(AttrInput::TokenTree(Box::new(tree))))
} else { } else {
None None
}; };
Some(Attr { id, path, input, ctxt: span_map.span_for_range(ast.syntax().text_range()).ctx }) Some(Attr { id, path, input, span })
} }
fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> { fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> {
@ -265,7 +267,7 @@ impl Attr {
pub fn parse_path_comma_token_tree<'a>( pub fn parse_path_comma_token_tree<'a>(
&'a self, &'a self,
db: &'a dyn ExpandDatabase, db: &'a dyn ExpandDatabase,
) -> Option<impl Iterator<Item = (ModPath, SyntaxContextId)> + 'a> { ) -> Option<impl Iterator<Item = (ModPath, Span)> + 'a> {
let args = self.token_tree_value()?; let args = self.token_tree_value()?;
if args.delimiter.kind != DelimiterKind::Parenthesis { if args.delimiter.kind != DelimiterKind::Parenthesis {
@ -281,7 +283,7 @@ impl Attr {
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation // FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation
// here or maybe just parse a mod path from a token tree directly // here or maybe just parse a mod path from a token tree directly
let subtree = tt::Subtree { let subtree = tt::Subtree {
delimiter: tt::Delimiter::dummy_invisible(), delimiter: tt::Delimiter::invisible_spanned(tts.first()?.first_span()),
token_trees: tts.to_vec(), token_trees: tts.to_vec(),
}; };
let (parse, span_map) = let (parse, span_map) =
@ -293,7 +295,7 @@ impl Attr {
return None; return None;
} }
let path = meta.path()?; let path = meta.path()?;
let call_site = span_map.span_at(path.syntax().text_range().start()).ctx; let call_site = span_map.span_at(path.syntax().text_range().start());
Some(( Some((
ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?, ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?,
call_site, call_site,

View file

@ -1,12 +1,7 @@
//! Builtin attributes. //! Builtin attributes.
use span::{MacroCallId, Span};
use base_db::{ use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallKind};
span::{SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
FileId,
};
use syntax::{TextRange, TextSize};
use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind};
macro_rules! register_builtin { macro_rules! register_builtin {
($expand_fn:ident: $(($name:ident, $variant:ident) => $expand:ident),* ) => { ($expand_fn:ident: $(($name:ident, $variant:ident) => $expand:ident),* ) => {
@ -106,7 +101,12 @@ fn derive_attr_expand(
MacroCallKind::Attr { attr_args: Some(attr_args), .. } if loc.def.is_attribute_derive() => { MacroCallKind::Attr { attr_args: Some(attr_args), .. } if loc.def.is_attribute_derive() => {
attr_args attr_args
} }
_ => return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan::DUMMY)), _ => {
return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan {
open: loc.call_site,
close: loc.call_site,
}))
}
}; };
pseudo_derive_attr_expansion(tt, derives, loc.call_site) pseudo_derive_attr_expansion(tt, derives, loc.call_site)
} }
@ -114,20 +114,13 @@ fn derive_attr_expand(
pub fn pseudo_derive_attr_expansion( pub fn pseudo_derive_attr_expansion(
tt: &tt::Subtree, tt: &tt::Subtree,
args: &tt::Subtree, args: &tt::Subtree,
call_site: SyntaxContextId, call_site: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let mk_leaf = |char| { let mk_leaf = |char| {
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char, char,
spacing: tt::Spacing::Alone, spacing: tt::Spacing::Alone,
span: tt::SpanData { span: call_site,
range: TextRange::empty(TextSize::new(0)),
anchor: base_db::span::SpanAnchor {
file_id: FileId::BOGUS,
ast_id: ROOT_ERASED_FILE_AST_ID,
},
ctx: call_site,
},
})) }))
}; };

View file

@ -1,20 +1,21 @@
//! Builtin derives. //! Builtin derives.
use base_db::{span::SpanData, CrateOrigin, LangCrateOrigin}; use base_db::{CrateOrigin, LangCrateOrigin};
use itertools::izip; use itertools::izip;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use span::{MacroCallId, Span};
use stdx::never; use stdx::never;
use tracing::debug; use tracing::debug;
use crate::{ use crate::{
hygiene::span_with_def_site_ctxt, hygiene::span_with_def_site_ctxt,
name::{AsName, Name}, name::{AsName, Name},
span::SpanMapRef, span_map::SpanMapRef,
tt, tt,
}; };
use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds}; use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds};
use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult, MacroCallId}; use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult};
macro_rules! register_builtin { macro_rules! register_builtin {
( $($trait:ident => $expand:ident),* ) => { ( $($trait:ident => $expand:ident),* ) => {
@ -35,7 +36,7 @@ macro_rules! register_builtin {
$( BuiltinDeriveExpander::$trait => $expand, )* $( BuiltinDeriveExpander::$trait => $expand, )*
}; };
let span = db.lookup_intern_macro_call(id).span(db); let span = db.lookup_intern_macro_call(id).call_site;
let span = span_with_def_site_ctxt(db, span, id); let span = span_with_def_site_ctxt(db, span, id);
expander(db, id, span, tt, token_map) expander(db, id, span, tt, token_map)
} }
@ -73,16 +74,16 @@ enum VariantShape {
Unit, Unit,
} }
fn tuple_field_iterator(span: SpanData, n: usize) -> impl Iterator<Item = tt::Ident> { fn tuple_field_iterator(span: Span, n: usize) -> impl Iterator<Item = tt::Ident> {
(0..n).map(move |it| tt::Ident::new(format!("f{it}"), span)) (0..n).map(move |it| tt::Ident::new(format!("f{it}"), span))
} }
impl VariantShape { impl VariantShape {
fn as_pattern(&self, path: tt::Subtree, span: SpanData) -> tt::Subtree { fn as_pattern(&self, path: tt::Subtree, span: Span) -> tt::Subtree {
self.as_pattern_map(path, span, |it| quote!(span => #it)) self.as_pattern_map(path, span, |it| quote!(span => #it))
} }
fn field_names(&self, span: SpanData) -> Vec<tt::Ident> { fn field_names(&self, span: Span) -> Vec<tt::Ident> {
match self { match self {
VariantShape::Struct(s) => s.clone(), VariantShape::Struct(s) => s.clone(),
VariantShape::Tuple(n) => tuple_field_iterator(span, *n).collect(), VariantShape::Tuple(n) => tuple_field_iterator(span, *n).collect(),
@ -93,7 +94,7 @@ impl VariantShape {
fn as_pattern_map( fn as_pattern_map(
&self, &self,
path: tt::Subtree, path: tt::Subtree,
span: SpanData, span: Span,
field_map: impl Fn(&tt::Ident) -> tt::Subtree, field_map: impl Fn(&tt::Ident) -> tt::Subtree,
) -> tt::Subtree { ) -> tt::Subtree {
match self { match self {
@ -143,11 +144,11 @@ enum AdtShape {
} }
impl AdtShape { impl AdtShape {
fn as_pattern(&self, span: SpanData, name: &tt::Ident) -> Vec<tt::Subtree> { fn as_pattern(&self, span: Span, name: &tt::Ident) -> Vec<tt::Subtree> {
self.as_pattern_map(name, |it| quote!(span =>#it), span) self.as_pattern_map(name, |it| quote!(span =>#it), span)
} }
fn field_names(&self, span: SpanData) -> Vec<Vec<tt::Ident>> { fn field_names(&self, span: Span) -> Vec<Vec<tt::Ident>> {
match self { match self {
AdtShape::Struct(s) => { AdtShape::Struct(s) => {
vec![s.field_names(span)] vec![s.field_names(span)]
@ -166,7 +167,7 @@ impl AdtShape {
&self, &self,
name: &tt::Ident, name: &tt::Ident,
field_map: impl Fn(&tt::Ident) -> tt::Subtree, field_map: impl Fn(&tt::Ident) -> tt::Subtree,
span: SpanData, span: Span,
) -> Vec<tt::Subtree> { ) -> Vec<tt::Subtree> {
match self { match self {
AdtShape::Struct(s) => { AdtShape::Struct(s) => {
@ -199,7 +200,7 @@ struct BasicAdtInfo {
fn parse_adt( fn parse_adt(
tm: SpanMapRef<'_>, tm: SpanMapRef<'_>,
adt: &ast::Adt, adt: &ast::Adt,
call_site: SpanData, call_site: Span,
) -> Result<BasicAdtInfo, ExpandError> { ) -> Result<BasicAdtInfo, ExpandError> {
let (name, generic_param_list, shape) = match adt { let (name, generic_param_list, shape) = match adt {
ast::Adt::Struct(it) => ( ast::Adt::Struct(it) => (
@ -245,7 +246,7 @@ fn parse_adt(
match this { match this {
Some(it) => { Some(it) => {
param_type_set.insert(it.as_name()); param_type_set.insert(it.as_name());
mbe::syntax_node_to_token_tree(it.syntax(), tm) mbe::syntax_node_to_token_tree(it.syntax(), tm, call_site)
} }
None => { None => {
tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site }) tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
@ -253,15 +254,15 @@ fn parse_adt(
} }
}; };
let bounds = match &param { let bounds = match &param {
ast::TypeOrConstParam::Type(it) => { ast::TypeOrConstParam::Type(it) => it
it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm)) .type_bound_list()
} .map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm, call_site)),
ast::TypeOrConstParam::Const(_) => None, ast::TypeOrConstParam::Const(_) => None,
}; };
let ty = if let ast::TypeOrConstParam::Const(param) = param { let ty = if let ast::TypeOrConstParam::Const(param) = param {
let ty = param let ty = param
.ty() .ty()
.map(|ty| mbe::syntax_node_to_token_tree(ty.syntax(), tm)) .map(|ty| mbe::syntax_node_to_token_tree(ty.syntax(), tm, call_site))
.unwrap_or_else(|| { .unwrap_or_else(|| {
tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site }) tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
}); });
@ -297,7 +298,7 @@ fn parse_adt(
let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name(); let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
param_type_set.contains(&name).then_some(p) param_type_set.contains(&name).then_some(p)
}) })
.map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm)) .map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm, call_site))
.collect(); .collect();
let name_token = name_to_token(tm, name)?; let name_token = name_to_token(tm, name)?;
Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types }) Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })
@ -349,7 +350,7 @@ fn name_to_token(
/// therefore does not get bound by the derived trait. /// therefore does not get bound by the derived trait.
fn expand_simple_derive( fn expand_simple_derive(
// FIXME: use // FIXME: use
invoc_span: SpanData, invoc_span: Span,
tt: &ast::Adt, tt: &ast::Adt,
tm: SpanMapRef<'_>, tm: SpanMapRef<'_>,
trait_path: tt::Subtree, trait_path: tt::Subtree,
@ -397,7 +398,7 @@ fn expand_simple_derive(
ExpandResult::ok(expanded) ExpandResult::ok(expanded)
} }
fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId, span: SpanData) -> tt::TokenTree { fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId, span: Span) -> tt::TokenTree {
// FIXME: make hygiene works for builtin derive macro // FIXME: make hygiene works for builtin derive macro
// such that $crate can be used here. // such that $crate can be used here.
let cg = db.crate_graph(); let cg = db.crate_graph();
@ -416,7 +417,7 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId, span: SpanData)
fn copy_expand( fn copy_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
span: SpanData, span: Span,
tt: &ast::Adt, tt: &ast::Adt,
tm: SpanMapRef<'_>, tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
@ -427,7 +428,7 @@ fn copy_expand(
fn clone_expand( fn clone_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
span: SpanData, span: Span,
tt: &ast::Adt, tt: &ast::Adt,
tm: SpanMapRef<'_>, tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
@ -470,13 +471,13 @@ fn clone_expand(
} }
/// This function exists since `quote! {span => => }` doesn't work. /// This function exists since `quote! {span => => }` doesn't work.
fn fat_arrow(span: SpanData) -> tt::Subtree { fn fat_arrow(span: Span) -> tt::Subtree {
let eq = tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span }; let eq = tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span };
quote! {span => #eq> } quote! {span => #eq> }
} }
/// This function exists since `quote! {span => && }` doesn't work. /// This function exists since `quote! {span => && }` doesn't work.
fn and_and(span: SpanData) -> tt::Subtree { fn and_and(span: Span) -> tt::Subtree {
let and = tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span }; let and = tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span };
quote! {span => #and& } quote! {span => #and& }
} }
@ -484,7 +485,7 @@ fn and_and(span: SpanData) -> tt::Subtree {
fn default_expand( fn default_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
span: SpanData, span: Span,
tt: &ast::Adt, tt: &ast::Adt,
tm: SpanMapRef<'_>, tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
@ -529,7 +530,7 @@ fn default_expand(
fn debug_expand( fn debug_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
span: SpanData, span: Span,
tt: &ast::Adt, tt: &ast::Adt,
tm: SpanMapRef<'_>, tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
@ -607,7 +608,7 @@ fn debug_expand(
fn hash_expand( fn hash_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
span: SpanData, span: Span,
tt: &ast::Adt, tt: &ast::Adt,
tm: SpanMapRef<'_>, tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
@ -660,7 +661,7 @@ fn hash_expand(
fn eq_expand( fn eq_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
span: SpanData, span: Span,
tt: &ast::Adt, tt: &ast::Adt,
tm: SpanMapRef<'_>, tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
@ -671,7 +672,7 @@ fn eq_expand(
fn partial_eq_expand( fn partial_eq_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
span: SpanData, span: Span,
tt: &ast::Adt, tt: &ast::Adt,
tm: SpanMapRef<'_>, tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
@ -725,7 +726,7 @@ fn partial_eq_expand(
fn self_and_other_patterns( fn self_and_other_patterns(
adt: &BasicAdtInfo, adt: &BasicAdtInfo,
name: &tt::Ident, name: &tt::Ident,
span: SpanData, span: Span,
) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) { ) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) {
let self_patterns = adt.shape.as_pattern_map( let self_patterns = adt.shape.as_pattern_map(
name, name,
@ -749,7 +750,7 @@ fn self_and_other_patterns(
fn ord_expand( fn ord_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
span: SpanData, span: Span,
tt: &ast::Adt, tt: &ast::Adt,
tm: SpanMapRef<'_>, tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
@ -760,7 +761,7 @@ fn ord_expand(
left: tt::Subtree, left: tt::Subtree,
right: tt::Subtree, right: tt::Subtree,
rest: tt::Subtree, rest: tt::Subtree,
span: SpanData, span: Span,
) -> tt::Subtree { ) -> tt::Subtree {
let fat_arrow1 = fat_arrow(span); let fat_arrow1 = fat_arrow(span);
let fat_arrow2 = fat_arrow(span); let fat_arrow2 = fat_arrow(span);
@ -813,7 +814,7 @@ fn ord_expand(
fn partial_ord_expand( fn partial_ord_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
span: SpanData, span: Span,
tt: &ast::Adt, tt: &ast::Adt,
tm: SpanMapRef<'_>, tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
@ -824,7 +825,7 @@ fn partial_ord_expand(
left: tt::Subtree, left: tt::Subtree,
right: tt::Subtree, right: tt::Subtree,
rest: tt::Subtree, rest: tt::Subtree,
span: SpanData, span: Span,
) -> tt::Subtree { ) -> tt::Subtree {
let fat_arrow1 = fat_arrow(span); let fat_arrow1 = fat_arrow(span);
let fat_arrow2 = fat_arrow(span); let fat_arrow2 = fat_arrow(span);

View file

@ -1,13 +1,11 @@
//! Builtin macro //! Builtin macro
use base_db::{ use base_db::{AnchoredPath, Edition, FileId};
span::{SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
AnchoredPath, Edition, FileId,
};
use cfg::CfgExpr; use cfg::CfgExpr;
use either::Either; use either::Either;
use itertools::Itertools; use itertools::Itertools;
use mbe::{parse_exprs_with_sep, parse_to_token_tree}; use mbe::{parse_exprs_with_sep, parse_to_token_tree};
use span::{Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
use syntax::{ use syntax::{
ast::{self, AstToken}, ast::{self, AstToken},
SmolStr, SmolStr,
@ -15,10 +13,11 @@ use syntax::{
use crate::{ use crate::{
db::ExpandDatabase, db::ExpandDatabase,
hygiene::span_with_def_site_ctxt, hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt},
name, quote, name::{self, known},
quote,
tt::{self, DelimSpan}, tt::{self, DelimSpan},
ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc, ExpandError, ExpandResult, HirFileIdExt, MacroCallId,
}; };
macro_rules! register_builtin { macro_rules! register_builtin {
@ -44,7 +43,7 @@ macro_rules! register_builtin {
$( BuiltinFnLikeExpander::$kind => $expand, )* $( BuiltinFnLikeExpander::$kind => $expand, )*
}; };
let span = db.lookup_intern_macro_call(id).span(db); let span = db.lookup_intern_macro_call(id).call_site;
let span = span_with_def_site_ctxt(db, span, id); let span = span_with_def_site_ctxt(db, span, id);
expander(db, id, tt, span) expander(db, id, tt, span)
} }
@ -61,7 +60,7 @@ macro_rules! register_builtin {
$( EagerExpander::$e_kind => $e_expand, )* $( EagerExpander::$e_kind => $e_expand, )*
}; };
let span = db.lookup_intern_macro_call(id).span(db); let span = db.lookup_intern_macro_call(id).call_site;
let span = span_with_def_site_ctxt(db, span, id); let span = span_with_def_site_ctxt(db, span, id);
expander(db, id, tt, span) expander(db, id, tt, span)
} }
@ -109,6 +108,7 @@ register_builtin! {
(format_args, FormatArgs) => format_args_expand, (format_args, FormatArgs) => format_args_expand,
(const_format_args, ConstFormatArgs) => format_args_expand, (const_format_args, ConstFormatArgs) => format_args_expand,
(format_args_nl, FormatArgsNl) => format_args_nl_expand, (format_args_nl, FormatArgsNl) => format_args_nl_expand,
(quote, Quote) => quote_expand,
EAGER: EAGER:
(compile_error, CompileError) => compile_error_expand, (compile_error, CompileError) => compile_error_expand,
@ -122,7 +122,7 @@ register_builtin! {
(option_env, OptionEnv) => option_env_expand (option_env, OptionEnv) => option_env_expand
} }
fn mk_pound(span: SpanData) -> tt::Subtree { fn mk_pound(span: Span) -> tt::Subtree {
crate::quote::IntoTt::to_subtree( crate::quote::IntoTt::to_subtree(
vec![crate::tt::Leaf::Punct(crate::tt::Punct { vec![crate::tt::Leaf::Punct(crate::tt::Punct {
char: '#', char: '#',
@ -138,7 +138,7 @@ fn module_path_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_id: MacroCallId, _id: MacroCallId,
_tt: &tt::Subtree, _tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
// Just return a dummy result. // Just return a dummy result.
ExpandResult::ok(quote! {span => ExpandResult::ok(quote! {span =>
@ -150,13 +150,13 @@ fn line_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_id: MacroCallId, _id: MacroCallId,
_tt: &tt::Subtree, _tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
// dummy implementation for type-checking purposes // dummy implementation for type-checking purposes
// Note that `line!` and `column!` will never be implemented properly, as they are by definition // Note that `line!` and `column!` will never be implemented properly, as they are by definition
// not incremental // not incremental
ExpandResult::ok(tt::Subtree { ExpandResult::ok(tt::Subtree {
delimiter: tt::Delimiter::dummy_invisible(), delimiter: tt::Delimiter::invisible_spanned(span),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text: "0u32".into(), text: "0u32".into(),
span, span,
@ -168,7 +168,7 @@ fn log_syntax_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_id: MacroCallId, _id: MacroCallId,
_tt: &tt::Subtree, _tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
ExpandResult::ok(quote! {span =>}) ExpandResult::ok(quote! {span =>})
} }
@ -177,7 +177,7 @@ fn trace_macros_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_id: MacroCallId, _id: MacroCallId,
_tt: &tt::Subtree, _tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
ExpandResult::ok(quote! {span =>}) ExpandResult::ok(quote! {span =>})
} }
@ -186,7 +186,7 @@ fn stringify_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_id: MacroCallId, _id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let pretty = ::tt::pretty(&tt.token_trees); let pretty = ::tt::pretty(&tt.token_trees);
@ -198,32 +198,38 @@ fn stringify_expand(
} }
fn assert_expand( fn assert_expand(
_db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
_id: MacroCallId, id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let args = parse_exprs_with_sep(tt, ','); let call_site_span = span_with_call_site_ctxt(db, span, id);
let args = parse_exprs_with_sep(tt, ',', call_site_span);
let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span }; let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
let expanded = match &*args { let expanded = match &*args {
[cond, panic_args @ ..] => { [cond, panic_args @ ..] => {
let comma = tt::Subtree { let comma = tt::Subtree {
delimiter: tt::Delimiter::dummy_invisible(), delimiter: tt::Delimiter::invisible_spanned(call_site_span),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char: ',', char: ',',
spacing: tt::Spacing::Alone, spacing: tt::Spacing::Alone,
span, span: call_site_span,
}))], }))],
}; };
let cond = cond.clone(); let cond = cond.clone();
let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma); let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma);
quote! {span =>{ let mac = if use_panic_2021(db, span) {
quote! {call_site_span => #dollar_crate::panic::panic_2021!(##panic_args) }
} else {
quote! {call_site_span => #dollar_crate::panic!(##panic_args) }
};
quote! {call_site_span =>{
if !(#cond) { if !(#cond) {
#dollar_crate::panic!(##panic_args); #mac;
} }
}} }}
} }
[] => quote! {span =>{}}, [] => quote! {call_site_span =>{}},
}; };
ExpandResult::ok(expanded) ExpandResult::ok(expanded)
@ -233,7 +239,7 @@ fn file_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_id: MacroCallId, _id: MacroCallId,
_tt: &tt::Subtree, _tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
// FIXME: RA purposefully lacks knowledge of absolute file names // FIXME: RA purposefully lacks knowledge of absolute file names
// so just return "". // so just return "".
@ -250,7 +256,7 @@ fn format_args_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
format_args_expand_general(db, id, tt, "", span) format_args_expand_general(db, id, tt, "", span)
} }
@ -259,7 +265,7 @@ fn format_args_nl_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
format_args_expand_general(db, id, tt, "\\n", span) format_args_expand_general(db, id, tt, "\\n", span)
} }
@ -270,7 +276,7 @@ fn format_args_expand_general(
tt: &tt::Subtree, tt: &tt::Subtree,
// FIXME: Make use of this so that mir interpretation works properly // FIXME: Make use of this so that mir interpretation works properly
_end_string: &str, _end_string: &str,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let pound = mk_pound(span); let pound = mk_pound(span);
let mut tt = tt.clone(); let mut tt = tt.clone();
@ -284,7 +290,7 @@ fn asm_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_id: MacroCallId, _id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
// We expand all assembly snippets to `format_args!` invocations to get format syntax // We expand all assembly snippets to `format_args!` invocations to get format syntax
// highlighting for them. // highlighting for them.
@ -314,7 +320,7 @@ fn global_asm_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_id: MacroCallId, _id: MacroCallId,
_tt: &tt::Subtree, _tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
// Expand to nothing (at item-level) // Expand to nothing (at item-level)
ExpandResult::ok(quote! {span =>}) ExpandResult::ok(quote! {span =>})
@ -324,7 +330,7 @@ fn cfg_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(id); let loc = db.lookup_intern_macro_call(id);
let expr = CfgExpr::parse(tt); let expr = CfgExpr::parse(tt);
@ -337,19 +343,25 @@ fn panic_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span }; let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
let call_site_span = span_with_call_site_ctxt(db, span, id);
let mac =
if use_panic_2021(db, call_site_span) { known::panic_2021 } else { known::panic_2015 };
// Expand to a macro call `$crate::panic::panic_{edition}` // Expand to a macro call `$crate::panic::panic_{edition}`
let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 { let mut call = quote!(call_site_span =>#dollar_crate::panic::#mac!);
quote!(span =>#dollar_crate::panic::panic_2021!)
} else {
quote!(span =>#dollar_crate::panic::panic_2015!)
};
// Pass the original arguments // Pass the original arguments
call.token_trees.push(tt::TokenTree::Subtree(tt.clone())); let mut subtree = tt.clone();
subtree.delimiter = tt::Delimiter {
open: call_site_span,
close: call_site_span,
kind: tt::DelimiterKind::Parenthesis,
};
call.token_trees.push(tt::TokenTree::Subtree(subtree));
ExpandResult::ok(call) ExpandResult::ok(call)
} }
@ -357,22 +369,52 @@ fn unreachable_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
// Expand to a macro call `$crate::panic::unreachable_{edition}`
let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span }; let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 { let call_site_span = span_with_call_site_ctxt(db, span, id);
quote!(span =>#dollar_crate::panic::unreachable_2021!)
let mac = if use_panic_2021(db, call_site_span) {
known::unreachable_2021
} else { } else {
quote!(span =>#dollar_crate::panic::unreachable_2015!) known::unreachable_2015
}; };
// Expand to a macro call `$crate::panic::panic_{edition}`
let mut call = quote!(call_site_span =>#dollar_crate::panic::#mac!);
// Pass the original arguments // Pass the original arguments
call.token_trees.push(tt::TokenTree::Subtree(tt.clone())); let mut subtree = tt.clone();
subtree.delimiter = tt::Delimiter {
open: call_site_span,
close: call_site_span,
kind: tt::DelimiterKind::Parenthesis,
};
call.token_trees.push(tt::TokenTree::Subtree(subtree));
ExpandResult::ok(call) ExpandResult::ok(call)
} }
fn use_panic_2021(db: &dyn ExpandDatabase, span: Span) -> bool {
// To determine the edition, we check the first span up the expansion
// stack that does not have #[allow_internal_unstable(edition_panic)].
// (To avoid using the edition of e.g. the assert!() or debug_assert!() definition.)
loop {
let Some(expn) = db.lookup_intern_syntax_context(span.ctx).outer_expn else {
break false;
};
let expn = db.lookup_intern_macro_call(expn);
// FIXME: Record allow_internal_unstable in the macro def (not been done yet because it
// would consume quite a bit extra memory for all call locs...)
// if let Some(features) = expn.def.allow_internal_unstable {
// if features.iter().any(|&f| f == sym::edition_panic) {
// span = expn.call_site;
// continue;
// }
// }
break expn.def.edition >= Edition::Edition2021;
}
}
fn unquote_str(lit: &tt::Literal) -> Option<String> { fn unquote_str(lit: &tt::Literal) -> Option<String> {
let lit = ast::make::tokens::literal(&lit.to_string()); let lit = ast::make::tokens::literal(&lit.to_string());
let token = ast::String::cast(lit)?; let token = ast::String::cast(lit)?;
@ -395,7 +437,7 @@ fn compile_error_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_id: MacroCallId, _id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let err = match &*tt.token_trees { let err = match &*tt.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) { [tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) {
@ -412,7 +454,7 @@ fn concat_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_arg_id: MacroCallId, _arg_id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let mut err = None; let mut err = None;
let mut text = String::new(); let mut text = String::new();
@ -459,7 +501,7 @@ fn concat_bytes_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_arg_id: MacroCallId, _arg_id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let mut bytes = Vec::new(); let mut bytes = Vec::new();
let mut err = None; let mut err = None;
@ -543,7 +585,7 @@ fn concat_idents_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_arg_id: MacroCallId, _arg_id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let mut err = None; let mut err = None;
let mut ident = String::new(); let mut ident = String::new();
@ -596,7 +638,7 @@ fn include_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
arg_id: MacroCallId, arg_id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let file_id = match include_input_to_file_id(db, arg_id, tt) { let file_id = match include_input_to_file_id(db, arg_id, tt) {
Ok(it) => it, Ok(it) => it,
@ -629,11 +671,11 @@ fn include_bytes_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_arg_id: MacroCallId, _arg_id: MacroCallId,
_tt: &tt::Subtree, _tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
// FIXME: actually read the file here if the user asked for macro expansion // FIXME: actually read the file here if the user asked for macro expansion
let res = tt::Subtree { let res = tt::Subtree {
delimiter: tt::Delimiter::dummy_invisible(), delimiter: tt::Delimiter::invisible_spanned(span),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text: r#"b"""#.into(), text: r#"b"""#.into(),
span, span,
@ -646,7 +688,7 @@ fn include_str_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
arg_id: MacroCallId, arg_id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let path = match parse_string(tt) { let path = match parse_string(tt) {
Ok(it) => it, Ok(it) => it,
@ -681,7 +723,7 @@ fn env_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
arg_id: MacroCallId, arg_id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let key = match parse_string(tt) { let key = match parse_string(tt) {
Ok(it) => it, Ok(it) => it,
@ -713,7 +755,7 @@ fn option_env_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
arg_id: MacroCallId, arg_id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let key = match parse_string(tt) { let key = match parse_string(tt) {
Ok(it) => it, Ok(it) => it,
@ -729,3 +771,15 @@ fn option_env_expand(
ExpandResult::ok(expanded) ExpandResult::ok(expanded)
} }
fn quote_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
_tt: &tt::Subtree,
span: Span,
) -> ExpandResult<tt::Subtree> {
ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan { open: span, close: span }),
ExpandError::other("quote! is not implemented"),
)
}

View file

@ -0,0 +1,42 @@
//! Defines a unit of change that can applied to the database to get the next
//! state. Changes are transactional.
use base_db::{salsa::Durability, CrateGraph, FileChange, SourceDatabaseExt, SourceRoot};
use span::FileId;
use triomphe::Arc;
use crate::{db::ExpandDatabase, proc_macro::ProcMacros};
#[derive(Debug, Default)]
pub struct Change {
pub source_change: FileChange,
pub proc_macros: Option<ProcMacros>,
}
impl Change {
pub fn new() -> Self {
Self::default()
}
pub fn apply(self, db: &mut (impl ExpandDatabase + SourceDatabaseExt)) {
self.source_change.apply(db);
if let Some(proc_macros) = self.proc_macros {
db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH);
}
}
pub fn change_file(&mut self, file_id: FileId, new_text: Option<Arc<str>>) {
self.source_change.change_file(file_id, new_text)
}
pub fn set_crate_graph(&mut self, graph: CrateGraph) {
self.source_change.set_crate_graph(graph)
}
pub fn set_proc_macros(&mut self, proc_macros: ProcMacros) {
self.proc_macros = Some(proc_macros);
}
pub fn set_roots(&mut self, roots: Vec<SourceRoot>) {
self.source_change.set_roots(roots)
}
}

View file

@ -1,14 +1,16 @@
//! Defines database & queries for macro expansion. //! Defines database & queries for macro expansion.
use std::sync::OnceLock;
use base_db::{ use base_db::{
salsa::{self, debug::DebugQueryTable}, salsa::{self, debug::DebugQueryTable},
span::SyntaxContextId, CrateId, Edition, FileId, SourceDatabase, VersionReq,
CrateId, Edition, FileId, SourceDatabase,
}; };
use either::Either; use either::Either;
use limit::Limit; use limit::Limit;
use mbe::{syntax_node_to_token_tree, ValueResult}; use mbe::{syntax_node_to_token_tree, ValueResult};
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use span::{Span, SyntaxContextId};
use syntax::{ use syntax::{
ast::{self, HasAttrs}, ast::{self, HasAttrs},
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
@ -21,11 +23,16 @@ use crate::{
builtin_attr_macro::pseudo_derive_attr_expansion, builtin_attr_macro::pseudo_derive_attr_expansion,
builtin_fn_macro::EagerExpander, builtin_fn_macro::EagerExpander,
fixup::{self, reverse_fixups, SyntaxFixupUndoInfo}, fixup::{self, reverse_fixups, SyntaxFixupUndoInfo},
hygiene::{apply_mark, SyntaxContextData, Transparency}, hygiene::{
span::{RealSpanMap, SpanMap, SpanMapRef}, apply_mark, span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt,
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, SyntaxContextData, Transparency,
ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, HirFileId, HirFileIdRepr, MacroCallId, },
MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFileId, ProcMacroExpander, proc_macro::ProcMacros,
span_map::{RealSpanMap, SpanMap, SpanMapRef},
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander,
CustomProcMacroExpander, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap,
HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
MacroFileId,
}; };
/// Total limit on the number of tokens produced by any macro invocation. /// Total limit on the number of tokens produced by any macro invocation.
@ -39,10 +46,13 @@ static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
/// Old-style `macro_rules` or the new macros 2.0 /// Old-style `macro_rules` or the new macros 2.0
pub struct DeclarativeMacroExpander { pub struct DeclarativeMacroExpander {
pub mac: mbe::DeclarativeMacro<base_db::span::SpanData>, pub mac: mbe::DeclarativeMacro<span::Span>,
pub transparency: Transparency, pub transparency: Transparency,
} }
// FIXME: Remove this once we drop support for 1.76
static REQUIREMENT: OnceLock<VersionReq> = OnceLock::new();
impl DeclarativeMacroExpander { impl DeclarativeMacroExpander {
pub fn expand( pub fn expand(
&self, &self,
@ -50,25 +60,61 @@ impl DeclarativeMacroExpander {
tt: tt::Subtree, tt: tt::Subtree,
call_id: MacroCallId, call_id: MacroCallId,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(call_id);
let toolchain = &db.crate_graph()[loc.def.krate].toolchain;
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
&base_db::Version {
pre: base_db::Prerelease::EMPTY,
build: base_db::BuildMetadata::EMPTY,
major: version.major,
minor: version.minor,
patch: version.patch,
},
)
});
match self.mac.err() { match self.mac.err() {
Some(e) => ExpandResult::new( Some(e) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan::DUMMY), tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }),
ExpandError::other(format!("invalid macro definition: {e}")), ExpandError::other(format!("invalid macro definition: {e}")),
), ),
None => self None => self
.mac .mac
.expand(&tt, |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency)) .expand(
&tt,
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
new_meta_vars,
loc.call_site,
)
.map_err(Into::into), .map_err(Into::into),
} }
} }
pub fn expand_unhygienic(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> { pub fn expand_unhygienic(
&self,
db: &dyn ExpandDatabase,
tt: tt::Subtree,
krate: CrateId,
call_site: Span,
) -> ExpandResult<tt::Subtree> {
let toolchain = &db.crate_graph()[krate].toolchain;
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
&base_db::Version {
pre: base_db::Prerelease::EMPTY,
build: base_db::BuildMetadata::EMPTY,
major: version.major,
minor: version.minor,
patch: version.patch,
},
)
});
match self.mac.err() { match self.mac.err() {
Some(e) => ExpandResult::new( Some(e) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan::DUMMY), tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::other(format!("invalid macro definition: {e}")), ExpandError::other(format!("invalid macro definition: {e}")),
), ),
None => self.mac.expand(&tt, |_| ()).map_err(Into::into), None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into),
} }
} }
} }
@ -86,11 +132,15 @@ pub enum TokenExpander {
/// `derive(Copy)` and such. /// `derive(Copy)` and such.
BuiltInDerive(BuiltinDeriveExpander), BuiltInDerive(BuiltinDeriveExpander),
/// The thing we love the most here in rust-analyzer -- procedural macros. /// The thing we love the most here in rust-analyzer -- procedural macros.
ProcMacro(ProcMacroExpander), ProcMacro(CustomProcMacroExpander),
} }
#[salsa::query_group(ExpandDatabaseStorage)] #[salsa::query_group(ExpandDatabaseStorage)]
pub trait ExpandDatabase: SourceDatabase { pub trait ExpandDatabase: SourceDatabase {
/// The proc macros.
#[salsa::input]
fn proc_macros(&self) -> Arc<ProcMacros>;
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>; fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
/// Main public API -- parses a hir file, not caring whether it's a real /// Main public API -- parses a hir file, not caring whether it's a real
@ -164,7 +214,20 @@ pub fn span_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap {
} }
pub fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<RealSpanMap> { pub fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<RealSpanMap> {
Arc::new(RealSpanMap::from_file(db, file_id)) use syntax::ast::HasModuleItem;
let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)];
let ast_id_map = db.ast_id_map(file_id.into());
let tree = db.parse(file_id).tree();
pairs.extend(
tree.items()
.map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())),
);
Arc::new(RealSpanMap::from_file(
file_id,
pairs.into_boxed_slice(),
tree.syntax().text_range().end(),
))
} }
/// This expands the given macro call, but with different arguments. This is /// This expands the given macro call, but with different arguments. This is
@ -184,12 +247,13 @@ pub fn expand_speculative(
// Build the subtree and token mapping for the speculative args // Build the subtree and token mapping for the speculative args
let (mut tt, undo_info) = match loc.kind { let (mut tt, undo_info) = match loc.kind {
MacroCallKind::FnLike { .. } => { MacroCallKind::FnLike { .. } => (
(mbe::syntax_node_to_token_tree(speculative_args, span_map), SyntaxFixupUndoInfo::NONE) mbe::syntax_node_to_token_tree(speculative_args, span_map, loc.call_site),
} SyntaxFixupUndoInfo::NONE,
),
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => { MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
let censor = censor_for_macro_input(&loc, speculative_args); let censor = censor_for_macro_input(&loc, speculative_args);
let mut fixups = fixup::fixup_syntax(span_map, speculative_args); let mut fixups = fixup::fixup_syntax(span_map, speculative_args, loc.call_site);
fixups.append.retain(|it, _| match it { fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Node(it) => !censor.contains(it), syntax::NodeOrToken::Node(it) => !censor.contains(it),
syntax::NodeOrToken::Token(_) => true, syntax::NodeOrToken::Token(_) => true,
@ -201,6 +265,7 @@ pub fn expand_speculative(
span_map, span_map,
fixups.append, fixups.append,
fixups.remove, fixups.remove,
loc.call_site,
), ),
fixups.undo_info, fixups.undo_info,
) )
@ -222,8 +287,9 @@ pub fn expand_speculative(
}?; }?;
match attr.token_tree() { match attr.token_tree() {
Some(token_tree) => { Some(token_tree) => {
let mut tree = syntax_node_to_token_tree(token_tree.syntax(), span_map); let mut tree =
tree.delimiter = tt::Delimiter::DUMMY_INVISIBLE; syntax_node_to_token_tree(token_tree.syntax(), span_map, loc.call_site);
tree.delimiter = tt::Delimiter::invisible_spanned(loc.call_site);
Some(tree) Some(tree)
} }
@ -237,17 +303,16 @@ pub fn expand_speculative(
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead. // Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
let mut speculative_expansion = match loc.def.kind { let mut speculative_expansion = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => { MacroDefKind::ProcMacro(expander, ..) => {
tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE; tt.delimiter = tt::Delimiter::invisible_spanned(loc.call_site);
let call_site = loc.span(db);
expander.expand( expander.expand(
db, db,
loc.def.krate, loc.def.krate,
loc.krate, loc.krate,
&tt, &tt,
attr_arg.as_ref(), attr_arg.as_ref(),
call_site, span_with_def_site_ctxt(db, loc.def.span, actual_macro_call),
call_site, span_with_call_site_ctxt(db, loc.def.span, actual_macro_call),
call_site, span_with_mixed_site_ctxt(db, loc.def.span, actual_macro_call),
) )
} }
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => { MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
@ -258,9 +323,12 @@ pub fn expand_speculative(
let adt = ast::Adt::cast(speculative_args.clone()).unwrap(); let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
expander.expand(db, actual_macro_call, &adt, span_map) expander.expand(db, actual_macro_call, &adt, span_map)
} }
MacroDefKind::Declarative(it) => { MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand_unhygienic(
db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt) db,
} tt,
loc.def.krate,
loc.call_site,
),
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into), MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
MacroDefKind::BuiltInEager(it, _) => { MacroDefKind::BuiltInEager(it, _) => {
it.expand(db, actual_macro_call, &tt).map_err(Into::into) it.expand(db, actual_macro_call, &tt).map_err(Into::into)
@ -410,12 +478,13 @@ fn macro_arg(
MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(), MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(),
}; };
let (mut tt, undo_info) = match loc.kind { let (mut tt, undo_info) = match loc.kind {
MacroCallKind::FnLike { .. } => { MacroCallKind::FnLike { .. } => (
(mbe::syntax_node_to_token_tree(&syntax, map.as_ref()), SyntaxFixupUndoInfo::NONE) mbe::syntax_node_to_token_tree(&syntax, map.as_ref(), loc.call_site),
} SyntaxFixupUndoInfo::NONE,
),
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => { MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
let censor = censor_for_macro_input(&loc, &syntax); let censor = censor_for_macro_input(&loc, &syntax);
let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax); let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax, loc.call_site);
fixups.append.retain(|it, _| match it { fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Node(it) => !censor.contains(it), syntax::NodeOrToken::Node(it) => !censor.contains(it),
syntax::NodeOrToken::Token(_) => true, syntax::NodeOrToken::Token(_) => true,
@ -427,6 +496,7 @@ fn macro_arg(
map.as_ref(), map.as_ref(),
fixups.append.clone(), fixups.append.clone(),
fixups.remove.clone(), fixups.remove.clone(),
loc.call_site,
); );
reverse_fixups(&mut tt, &fixups.undo_info); reverse_fixups(&mut tt, &fixups.undo_info);
} }
@ -436,6 +506,7 @@ fn macro_arg(
map, map,
fixups.append, fixups.append,
fixups.remove, fixups.remove,
loc.call_site,
), ),
fixups.undo_info, fixups.undo_info,
) )
@ -444,7 +515,7 @@ fn macro_arg(
if loc.def.is_proc_macro() { if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included // proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE; tt.delimiter.kind = tt::DelimiterKind::Invisible;
} }
if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) { if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) {
@ -506,7 +577,8 @@ fn decl_macro_expander(
def_crate: CrateId, def_crate: CrateId,
id: AstId<ast::Macro>, id: AstId<ast::Macro>,
) -> Arc<DeclarativeMacroExpander> { ) -> Arc<DeclarativeMacroExpander> {
let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021; let crate_data = &db.crate_graph()[def_crate];
let is_2021 = crate_data.edition >= Edition::Edition2021;
let (root, map) = parse_with_map(db, id.file_id); let (root, map) = parse_with_map(db, id.file_id);
let root = root.syntax_node(); let root = root.syntax_node();
@ -530,13 +602,29 @@ fn decl_macro_expander(
_ => None, _ => None,
} }
}; };
let toolchain = crate_data.toolchain.as_ref();
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
&base_db::Version {
pre: base_db::Prerelease::EMPTY,
build: base_db::BuildMetadata::EMPTY,
major: version.major,
minor: version.minor,
patch: version.patch,
},
)
});
let (mac, transparency) = match id.to_ptr(db).to_node(&root) { let (mac, transparency) = match id.to_ptr(db).to_node(&root) {
ast::Macro::MacroRules(macro_rules) => ( ast::Macro::MacroRules(macro_rules) => (
match macro_rules.token_tree() { match macro_rules.token_tree() {
Some(arg) => { Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref()); let tt = mbe::syntax_node_to_token_tree(
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021); arg.syntax(),
map.as_ref(),
map.span_for_range(macro_rules.macro_rules_token().unwrap().text_range()),
);
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021, new_meta_vars);
mac mac
} }
None => mbe::DeclarativeMacro::from_err( None => mbe::DeclarativeMacro::from_err(
@ -549,8 +637,12 @@ fn decl_macro_expander(
ast::Macro::MacroDef(macro_def) => ( ast::Macro::MacroDef(macro_def) => (
match macro_def.body() { match macro_def.body() {
Some(arg) => { Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref()); let tt = mbe::syntax_node_to_token_tree(
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021); arg.syntax(),
map.as_ref(),
map.span_for_range(macro_def.macro_token().unwrap().text_range()),
);
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021, new_meta_vars);
mac mac
} }
None => mbe::DeclarativeMacro::from_err( None => mbe::DeclarativeMacro::from_err(
@ -601,7 +693,7 @@ fn macro_expand(
let Some((macro_arg, undo_info)) = value else { let Some((macro_arg, undo_info)) = value else {
return ExpandResult { return ExpandResult {
value: Arc::new(tt::Subtree { value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::DUMMY_INVISIBLE, delimiter: tt::Delimiter::invisible_spanned(loc.call_site),
token_trees: Vec::new(), token_trees: Vec::new(),
}), }),
// FIXME: We should make sure to enforce an invariant that invalid macro // FIXME: We should make sure to enforce an invariant that invalid macro
@ -660,7 +752,7 @@ fn macro_expand(
// Skip checking token tree limit for include! macro call // Skip checking token tree limit for include! macro call
if !loc.def.is_include() { if !loc.def.is_include() {
// Set a hard limit for the expanded tt // Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt) { if let Err(value) = check_tt_count(&tt, loc.call_site) {
return value; return value;
} }
} }
@ -673,7 +765,7 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
let Some((macro_arg, undo_info)) = db.macro_arg(id).value else { let Some((macro_arg, undo_info)) = db.macro_arg(id).value else {
return ExpandResult { return ExpandResult {
value: Arc::new(tt::Subtree { value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::DUMMY_INVISIBLE, delimiter: tt::Delimiter::invisible_spanned(loc.call_site),
token_trees: Vec::new(), token_trees: Vec::new(),
}), }),
// FIXME: We should make sure to enforce an invariant that invalid macro // FIXME: We should make sure to enforce an invariant that invalid macro
@ -692,22 +784,19 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
_ => None, _ => None,
}; };
let call_site = loc.span(db);
let ExpandResult { value: mut tt, err } = expander.expand( let ExpandResult { value: mut tt, err } = expander.expand(
db, db,
loc.def.krate, loc.def.krate,
loc.krate, loc.krate,
&macro_arg, &macro_arg,
attr_arg, attr_arg,
// FIXME span_with_def_site_ctxt(db, loc.def.span, id),
call_site, span_with_call_site_ctxt(db, loc.def.span, id),
call_site, span_with_mixed_site_ctxt(db, loc.def.span, id),
// FIXME
call_site,
); );
// Set a hard limit for the expanded tt // Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt) { if let Err(value) = check_tt_count(&tt, loc.call_site) {
return value; return value;
} }
@ -730,12 +819,12 @@ fn token_tree_to_syntax_node(
mbe::token_tree_to_syntax_node(tt, entry_point) mbe::token_tree_to_syntax_node(tt, entry_point)
} }
fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>> { fn check_tt_count(tt: &tt::Subtree, call_site: Span) -> Result<(), ExpandResult<Arc<tt::Subtree>>> {
let count = tt.count(); let count = tt.count();
if TOKEN_LIMIT.check(count).is_err() { if TOKEN_LIMIT.check(count).is_err() {
Err(ExpandResult { Err(ExpandResult {
value: Arc::new(tt::Subtree { value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::DUMMY_INVISIBLE, delimiter: tt::Delimiter::invisible_spanned(call_site),
token_trees: vec![], token_trees: vec![],
}), }),
err: Some(ExpandError::other(format!( err: Some(ExpandError::other(format!(

View file

@ -18,7 +18,8 @@
//! //!
//! //!
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros> //! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
use base_db::{span::SyntaxContextId, CrateId}; use base_db::CrateId;
use span::Span;
use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent}; use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent};
use triomphe::Arc; use triomphe::Arc;
@ -26,9 +27,9 @@ use crate::{
ast::{self, AstNode}, ast::{self, AstNode},
db::ExpandDatabase, db::ExpandDatabase,
mod_path::ModPath, mod_path::ModPath,
span::SpanMapRef, span_map::SpanMapRef,
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, MacroCallId, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, Intern,
MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
}; };
pub fn expand_eager_macro_input( pub fn expand_eager_macro_input(
@ -36,7 +37,7 @@ pub fn expand_eager_macro_input(
krate: CrateId, krate: CrateId,
macro_call: InFile<ast::MacroCall>, macro_call: InFile<ast::MacroCall>,
def: MacroDefId, def: MacroDefId,
call_site: SyntaxContextId, call_site: Span,
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>, resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<MacroCallId>> { ) -> ExpandResult<Option<MacroCallId>> {
let ast_map = db.ast_id_map(macro_call.file_id); let ast_map = db.ast_id_map(macro_call.file_id);
@ -48,13 +49,14 @@ pub fn expand_eager_macro_input(
// When `lazy_expand` is called, its *parent* file must already exist. // When `lazy_expand` is called, its *parent* file must already exist.
// Here we store an eager macro id for the argument expanded subtree // Here we store an eager macro id for the argument expanded subtree
// for that purpose. // for that purpose.
let arg_id = db.intern_macro_call(MacroCallLoc { let arg_id = MacroCallLoc {
def, def,
krate, krate,
eager: None, eager: None,
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr }, kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
call_site, call_site,
}); }
.intern(db);
let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } = let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
db.parse_macro_expansion(arg_id.as_macro_file()); db.parse_macro_expansion(arg_id.as_macro_file());
@ -81,9 +83,9 @@ pub fn expand_eager_macro_input(
return ExpandResult { value: None, err }; return ExpandResult { value: None, err };
}; };
let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map); let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map, call_site);
subtree.delimiter = crate::tt::Delimiter::DUMMY_INVISIBLE; subtree.delimiter.kind = crate::tt::DelimiterKind::Invisible;
let loc = MacroCallLoc { let loc = MacroCallLoc {
def, def,
@ -93,7 +95,7 @@ pub fn expand_eager_macro_input(
call_site, call_site,
}; };
ExpandResult { value: Some(db.intern_macro_call(loc)), err } ExpandResult { value: Some(loc.intern(db)), err }
} }
fn lazy_expand( fn lazy_expand(
@ -101,7 +103,7 @@ fn lazy_expand(
def: &MacroDefId, def: &MacroDefId,
macro_call: InFile<ast::MacroCall>, macro_call: InFile<ast::MacroCall>,
krate: CrateId, krate: CrateId,
call_site: SyntaxContextId, call_site: Span,
) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<ExpansionSpanMap>)> { ) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<ExpansionSpanMap>)> {
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(&macro_call.value); let ast_id = db.ast_id_map(macro_call.file_id).ast_id(&macro_call.value);
@ -121,7 +123,7 @@ fn eager_macro_recur(
mut offset: TextSize, mut offset: TextSize,
curr: InFile<SyntaxNode>, curr: InFile<SyntaxNode>,
krate: CrateId, krate: CrateId,
call_site: SyntaxContextId, call_site: Span,
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>, macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<(SyntaxNode, TextSize)>> { ) -> ExpandResult<Option<(SyntaxNode, TextSize)>> {
let original = curr.value.clone_for_update(); let original = curr.value.clone_for_update();

View file

@ -1,11 +1,8 @@
//! Things to wrap other things in file ids. //! Things to wrap other things in file ids.
use std::iter; use std::iter;
use base_db::{
span::{HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId},
FileId, FileRange,
};
use either::Either; use either::Either;
use span::{FileId, FileRange, HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId};
use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize}; use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize};
use crate::{db, ExpansionInfo, MacroFileIdExt}; use crate::{db, ExpansionInfo, MacroFileIdExt};
@ -345,7 +342,7 @@ impl InFile<TextRange> {
} }
impl<N: AstNode> InFile<N> { impl<N: AstNode> InFile<N> {
pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<N>> { pub fn original_ast_node_rooted(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<N>> {
// This kind of upmapping can only be achieved in attribute expanded files, // This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
let file_id = match self.file_id.repr() { let file_id = match self.file_id.repr() {

View file

@ -1,23 +1,19 @@
//! To make attribute macros work reliably when typing, we need to take care to //! To make attribute macros work reliably when typing, we need to take care to
//! fix up syntax errors in the code we're passing to them. //! fix up syntax errors in the code we're passing to them.
use base_db::{
span::{ErasedFileAstId, SpanAnchor, SpanData},
FileId,
};
use la_arena::RawIdx;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec; use smallvec::SmallVec;
use span::{ErasedFileAstId, Span, SpanAnchor, SpanData, FIXUP_ERASED_FILE_AST_ID_MARKER};
use stdx::never; use stdx::never;
use syntax::{ use syntax::{
ast::{self, AstNode, HasLoopBody}, ast::{self, AstNode, HasLoopBody},
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize, match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
}; };
use triomphe::Arc; use triomphe::Arc;
use tt::{Spacing, Span}; use tt::Spacing;
use crate::{ use crate::{
span::SpanMapRef, span_map::SpanMapRef,
tt::{Ident, Leaf, Punct, Subtree}, tt::{Ident, Leaf, Punct, Subtree},
}; };
@ -42,28 +38,30 @@ impl SyntaxFixupUndoInfo {
pub(crate) const NONE: Self = SyntaxFixupUndoInfo { original: None }; pub(crate) const NONE: Self = SyntaxFixupUndoInfo { original: None };
} }
// censoring -> just don't convert the node // We mark spans with `FIXUP_DUMMY_AST_ID` to indicate that they are fake.
// replacement -> censor + append const FIXUP_DUMMY_AST_ID: ErasedFileAstId = FIXUP_ERASED_FILE_AST_ID_MARKER;
// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how
// to remove later
const FIXUP_DUMMY_FILE: FileId = FileId::from_raw(FileId::MAX_FILE_ID);
const FIXUP_DUMMY_AST_ID: ErasedFileAstId = ErasedFileAstId::from_raw(RawIdx::from_u32(!0));
const FIXUP_DUMMY_RANGE: TextRange = TextRange::empty(TextSize::new(0)); const FIXUP_DUMMY_RANGE: TextRange = TextRange::empty(TextSize::new(0));
// If the fake span has this range end, that means that the range start is an index into the
// `original` list in `SyntaxFixupUndoInfo`.
const FIXUP_DUMMY_RANGE_END: TextSize = TextSize::new(!0); const FIXUP_DUMMY_RANGE_END: TextSize = TextSize::new(!0);
pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> SyntaxFixups { pub(crate) fn fixup_syntax(
span_map: SpanMapRef<'_>,
node: &SyntaxNode,
call_site: Span,
) -> SyntaxFixups {
let mut append = FxHashMap::<SyntaxElement, _>::default(); let mut append = FxHashMap::<SyntaxElement, _>::default();
let mut remove = FxHashSet::<SyntaxNode>::default(); let mut remove = FxHashSet::<SyntaxNode>::default();
let mut preorder = node.preorder(); let mut preorder = node.preorder();
let mut original = Vec::new(); let mut original = Vec::new();
let dummy_range = FIXUP_DUMMY_RANGE; let dummy_range = FIXUP_DUMMY_RANGE;
// we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as let fake_span = |range| {
// the index into the replacement vec but only if the end points to !0 let span = span_map.span_for_range(range);
let dummy_anchor = SpanAnchor { file_id: FIXUP_DUMMY_FILE, ast_id: FIXUP_DUMMY_AST_ID }; SpanData {
let fake_span = |range| SpanData {
range: dummy_range, range: dummy_range,
anchor: dummy_anchor, anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor },
ctx: span_map.span_for_range(range).ctx, ctx: span.ctx,
}
}; };
while let Some(event) = preorder.next() { while let Some(event) = preorder.next() {
let syntax::WalkEvent::Enter(node) = event else { continue }; let syntax::WalkEvent::Enter(node) = event else { continue };
@ -72,15 +70,16 @@ pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> Synta
if can_handle_error(&node) && has_error_to_handle(&node) { if can_handle_error(&node) && has_error_to_handle(&node) {
remove.insert(node.clone().into()); remove.insert(node.clone().into());
// the node contains an error node, we have to completely replace it by something valid // the node contains an error node, we have to completely replace it by something valid
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map); let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site);
let idx = original.len() as u32; let idx = original.len() as u32;
original.push(original_tree); original.push(original_tree);
let span = span_map.span_for_range(node_range);
let replacement = Leaf::Ident(Ident { let replacement = Leaf::Ident(Ident {
text: "__ra_fixup".into(), text: "__ra_fixup".into(),
span: SpanData { span: SpanData {
range: TextRange::new(TextSize::new(idx), FIXUP_DUMMY_RANGE_END), range: TextRange::new(TextSize::new(idx), FIXUP_DUMMY_RANGE_END),
anchor: dummy_anchor, anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor },
ctx: span_map.span_for_range(node_range).ctx, ctx: span.ctx,
}, },
}); });
append.insert(node.clone().into(), vec![replacement]); append.insert(node.clone().into(), vec![replacement]);
@ -301,9 +300,10 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool {
pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) { pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) {
let Some(undo_info) = undo_info.original.as_deref() else { return }; let Some(undo_info) = undo_info.original.as_deref() else { return };
let undo_info = &**undo_info; let undo_info = &**undo_info;
#[allow(deprecated)]
if never!( if never!(
tt.delimiter.close.anchor.file_id == FIXUP_DUMMY_FILE tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID
|| tt.delimiter.open.anchor.file_id == FIXUP_DUMMY_FILE || tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID
) { ) {
tt.delimiter.close = SpanData::DUMMY; tt.delimiter.close = SpanData::DUMMY;
tt.delimiter.open = SpanData::DUMMY; tt.delimiter.open = SpanData::DUMMY;
@ -319,7 +319,7 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
.filter(|tt| match tt { .filter(|tt| match tt {
tt::TokenTree::Leaf(leaf) => { tt::TokenTree::Leaf(leaf) => {
let span = leaf.span(); let span = leaf.span();
let is_real_leaf = span.anchor.file_id != FIXUP_DUMMY_FILE; let is_real_leaf = span.anchor.ast_id != FIXUP_DUMMY_AST_ID;
let is_replaced_node = span.range.end() == FIXUP_DUMMY_RANGE_END; let is_replaced_node = span.range.end() == FIXUP_DUMMY_RANGE_END;
is_real_leaf || is_replaced_node is_real_leaf || is_replaced_node
} }
@ -327,8 +327,8 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
}) })
.flat_map(|tt| match tt { .flat_map(|tt| match tt {
tt::TokenTree::Subtree(mut tt) => { tt::TokenTree::Subtree(mut tt) => {
if tt.delimiter.close.anchor.file_id == FIXUP_DUMMY_FILE if tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID
|| tt.delimiter.open.anchor.file_id == FIXUP_DUMMY_FILE || tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID
{ {
// Even though fixup never creates subtrees with fixup spans, the old proc-macro server // Even though fixup never creates subtrees with fixup spans, the old proc-macro server
// might copy them if the proc-macro asks for it, so we need to filter those out // might copy them if the proc-macro asks for it, so we need to filter those out
@ -339,7 +339,7 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
SmallVec::from_const([tt.into()]) SmallVec::from_const([tt.into()])
} }
tt::TokenTree::Leaf(leaf) => { tt::TokenTree::Leaf(leaf) => {
if leaf.span().anchor.file_id == FIXUP_DUMMY_FILE { if leaf.span().anchor.ast_id == FIXUP_DUMMY_AST_ID {
// we have a fake node here, we need to replace it again with the original // we have a fake node here, we need to replace it again with the original
let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone(); let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone();
if original.delimiter.kind == tt::DelimiterKind::Invisible { if original.delimiter.kind == tt::DelimiterKind::Invisible {
@ -360,11 +360,12 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
mod tests { mod tests {
use base_db::FileId; use base_db::FileId;
use expect_test::{expect, Expect}; use expect_test::{expect, Expect};
use syntax::TextRange;
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
fixup::reverse_fixups, fixup::reverse_fixups,
span::{RealSpanMap, SpanMap}, span_map::{RealSpanMap, SpanMap},
tt, tt,
}; };
@ -397,12 +398,17 @@ mod tests {
fn check(ra_fixture: &str, mut expect: Expect) { fn check(ra_fixture: &str, mut expect: Expect) {
let parsed = syntax::SourceFile::parse(ra_fixture); let parsed = syntax::SourceFile::parse(ra_fixture);
let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0)))); let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
let fixups = super::fixup_syntax(span_map.as_ref(), &parsed.syntax_node()); let fixups = super::fixup_syntax(
span_map.as_ref(),
&parsed.syntax_node(),
span_map.span_for_range(TextRange::empty(0.into())),
);
let mut tt = mbe::syntax_node_to_token_tree_modified( let mut tt = mbe::syntax_node_to_token_tree_modified(
&parsed.syntax_node(), &parsed.syntax_node(),
span_map.as_ref(), span_map.as_ref(),
fixups.append, fixups.append,
fixups.remove, fixups.remove,
span_map.span_for_range(TextRange::empty(0.into())),
); );
let actual = format!("{tt}\n"); let actual = format!("{tt}\n");
@ -422,8 +428,11 @@ mod tests {
// the fixed-up + reversed version should be equivalent to the original input // the fixed-up + reversed version should be equivalent to the original input
// modulo token IDs and `Punct`s' spacing. // modulo token IDs and `Punct`s' spacing.
let original_as_tt = let original_as_tt = mbe::syntax_node_to_token_tree(
mbe::syntax_node_to_token_tree(&parsed.syntax_node(), span_map.as_ref()); &parsed.syntax_node(),
span_map.as_ref(),
span_map.span_for_range(TextRange::empty(0.into())),
);
assert!( assert!(
check_subtree_eq(&tt, &original_as_tt), check_subtree_eq(&tt, &original_as_tt),
"different token tree:\n{tt:?}\n\n{original_as_tt:?}" "different token tree:\n{tt:?}\n\n{original_as_tt:?}"

View file

@ -2,9 +2,12 @@
//! //!
//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at //! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at
//! this moment, this is horribly incomplete and handles only `$crate`. //! this moment, this is horribly incomplete and handles only `$crate`.
// FIXME: Consider moving this into the span crate.
use std::iter; use std::iter;
use base_db::span::{MacroCallId, SpanData, SyntaxContextId}; use span::{MacroCallId, Span, SyntaxContextId};
use crate::db::ExpandDatabase; use crate::db::ExpandDatabase;
@ -78,37 +81,29 @@ pub enum Transparency {
Opaque, Opaque,
} }
pub fn span_with_def_site_ctxt( pub fn span_with_def_site_ctxt(db: &dyn ExpandDatabase, span: Span, expn_id: MacroCallId) -> Span {
db: &dyn ExpandDatabase,
span: SpanData,
expn_id: MacroCallId,
) -> SpanData {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::Opaque) span_with_ctxt_from_mark(db, span, expn_id, Transparency::Opaque)
} }
pub fn span_with_call_site_ctxt( pub fn span_with_call_site_ctxt(db: &dyn ExpandDatabase, span: Span, expn_id: MacroCallId) -> Span {
db: &dyn ExpandDatabase,
span: SpanData,
expn_id: MacroCallId,
) -> SpanData {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::Transparent) span_with_ctxt_from_mark(db, span, expn_id, Transparency::Transparent)
} }
pub fn span_with_mixed_site_ctxt( pub fn span_with_mixed_site_ctxt(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
span: SpanData, span: Span,
expn_id: MacroCallId, expn_id: MacroCallId,
) -> SpanData { ) -> Span {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::SemiTransparent) span_with_ctxt_from_mark(db, span, expn_id, Transparency::SemiTransparent)
} }
fn span_with_ctxt_from_mark( fn span_with_ctxt_from_mark(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
span: SpanData, span: Span,
expn_id: MacroCallId, expn_id: MacroCallId,
transparency: Transparency, transparency: Transparency,
) -> SpanData { ) -> Span {
SpanData { ctx: apply_mark(db, SyntaxContextId::ROOT, expn_id, transparency), ..span } Span { ctx: apply_mark(db, SyntaxContextId::ROOT, expn_id, transparency), ..span }
} }
pub(super) fn apply_mark( pub(super) fn apply_mark(
@ -121,7 +116,7 @@ pub(super) fn apply_mark(
return apply_mark_internal(db, ctxt, Some(call_id), transparency); return apply_mark_internal(db, ctxt, Some(call_id), transparency);
} }
let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site; let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site.ctx;
let mut call_site_ctxt = if transparency == Transparency::SemiTransparent { let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
call_site_ctxt.normalize_to_macros_2_0(db) call_site_ctxt.normalize_to_macros_2_0(db)
} else { } else {
@ -154,15 +149,16 @@ fn apply_mark_internal(
transparency: Transparency, transparency: Transparency,
) -> SyntaxContextId { ) -> SyntaxContextId {
let syntax_context_data = db.lookup_intern_syntax_context(ctxt); let syntax_context_data = db.lookup_intern_syntax_context(ctxt);
let mut opaque = syntax_context_data.opaque; let mut opaque = handle_self_ref(ctxt, syntax_context_data.opaque);
let mut opaque_and_semitransparent = syntax_context_data.opaque_and_semitransparent; let mut opaque_and_semitransparent =
handle_self_ref(ctxt, syntax_context_data.opaque_and_semitransparent);
if transparency >= Transparency::Opaque { if transparency >= Transparency::Opaque {
let parent = opaque; let parent = opaque;
// Unlike rustc, with salsa we can't prefetch the to be allocated ID to create cycles with
// salsa when interning, so we use a sentinel value that effectively means the current
// syntax context.
let new_opaque = SyntaxContextId::SELF_REF; let new_opaque = SyntaxContextId::SELF_REF;
// But we can't just grab the to be allocated ID either as that would not deduplicate
// things!
// So we need a new salsa store type here ...
opaque = db.intern_syntax_context(SyntaxContextData { opaque = db.intern_syntax_context(SyntaxContextData {
outer_expn: call_id, outer_expn: call_id,
outer_transparency: transparency, outer_transparency: transparency,
@ -174,6 +170,9 @@ fn apply_mark_internal(
if transparency >= Transparency::SemiTransparent { if transparency >= Transparency::SemiTransparent {
let parent = opaque_and_semitransparent; let parent = opaque_and_semitransparent;
// Unlike rustc, with salsa we can't prefetch the to be allocated ID to create cycles with
// salsa when interning, so we use a sentinel value that effectively means the current
// syntax context.
let new_opaque_and_semitransparent = SyntaxContextId::SELF_REF; let new_opaque_and_semitransparent = SyntaxContextId::SELF_REF;
opaque_and_semitransparent = db.intern_syntax_context(SyntaxContextData { opaque_and_semitransparent = db.intern_syntax_context(SyntaxContextData {
outer_expn: call_id, outer_expn: call_id,

View file

@ -6,20 +6,21 @@
#![warn(rust_2018_idioms, unused_lifetimes)] #![warn(rust_2018_idioms, unused_lifetimes)]
pub mod db;
pub mod ast_id_map; pub mod ast_id_map;
pub mod name; pub mod attrs;
pub mod hygiene;
pub mod builtin_attr_macro; pub mod builtin_attr_macro;
pub mod builtin_derive_macro; pub mod builtin_derive_macro;
pub mod builtin_fn_macro; pub mod builtin_fn_macro;
pub mod db;
pub mod eager;
pub mod files;
pub mod change;
pub mod hygiene;
pub mod mod_path;
pub mod name;
pub mod proc_macro; pub mod proc_macro;
pub mod quote; pub mod quote;
pub mod eager; pub mod span_map;
pub mod mod_path;
pub mod attrs;
pub mod span;
pub mod files;
mod fixup; mod fixup;
use attrs::collect_attrs; use attrs::collect_attrs;
@ -27,11 +28,9 @@ use triomphe::Arc;
use std::{fmt, hash::Hash}; use std::{fmt, hash::Hash};
use base_db::{ use base_db::{CrateId, Edition, FileId};
span::{HirFileIdRepr, SpanData, SyntaxContextId},
CrateId, FileId, FileRange, ProcMacroKind,
};
use either::Either; use either::Either;
use span::{FileRange, HirFileIdRepr, Span, SyntaxContextId};
use syntax::{ use syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
SyntaxNode, SyntaxToken, TextRange, TextSize, SyntaxNode, SyntaxToken, TextRange, TextSize,
@ -42,35 +41,86 @@ use crate::{
builtin_attr_macro::BuiltinAttrExpander, builtin_attr_macro::BuiltinAttrExpander,
builtin_derive_macro::BuiltinDeriveExpander, builtin_derive_macro::BuiltinDeriveExpander,
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
db::TokenExpander, db::{ExpandDatabase, TokenExpander},
fixup::SyntaxFixupUndoInfo, fixup::SyntaxFixupUndoInfo,
hygiene::SyntaxContextData,
mod_path::ModPath, mod_path::ModPath,
proc_macro::ProcMacroExpander, proc_macro::{CustomProcMacroExpander, ProcMacroKind},
span::{ExpansionSpanMap, SpanMap}, span_map::{ExpansionSpanMap, SpanMap},
}; };
pub use crate::ast_id_map::{AstId, ErasedAstId, ErasedFileAstId}; pub use crate::ast_id_map::{AstId, ErasedAstId, ErasedFileAstId};
pub use crate::files::{InFile, InMacroFile, InRealFile}; pub use crate::files::{InFile, InMacroFile, InRealFile};
pub use base_db::span::{HirFileId, MacroCallId, MacroFileId};
pub use mbe::ValueResult; pub use mbe::ValueResult;
pub use span::{HirFileId, MacroCallId, MacroFileId};
pub type DeclarativeMacro = ::mbe::DeclarativeMacro<tt::SpanData>; pub type DeclarativeMacro = ::mbe::DeclarativeMacro<tt::Span>;
pub mod tt { pub mod tt {
pub use base_db::span::SpanData; pub use span::Span;
pub use tt::{DelimiterKind, Spacing, Span, SpanAnchor}; pub use tt::{DelimiterKind, Spacing};
pub type Delimiter = ::tt::Delimiter<SpanData>; pub type Delimiter = ::tt::Delimiter<Span>;
pub type DelimSpan = ::tt::DelimSpan<SpanData>; pub type DelimSpan = ::tt::DelimSpan<Span>;
pub type Subtree = ::tt::Subtree<SpanData>; pub type Subtree = ::tt::Subtree<Span>;
pub type Leaf = ::tt::Leaf<SpanData>; pub type Leaf = ::tt::Leaf<Span>;
pub type Literal = ::tt::Literal<SpanData>; pub type Literal = ::tt::Literal<Span>;
pub type Punct = ::tt::Punct<SpanData>; pub type Punct = ::tt::Punct<Span>;
pub type Ident = ::tt::Ident<SpanData>; pub type Ident = ::tt::Ident<Span>;
pub type TokenTree = ::tt::TokenTree<SpanData>; pub type TokenTree = ::tt::TokenTree<Span>;
} }
#[macro_export]
macro_rules! impl_intern_lookup {
($db:ident, $id:ident, $loc:ident, $intern:ident, $lookup:ident) => {
impl $crate::Intern for $loc {
type Database<'db> = dyn $db + 'db;
type ID = $id;
fn intern<'db>(self, db: &Self::Database<'db>) -> $id {
db.$intern(self)
}
}
impl $crate::Lookup for $id {
type Database<'db> = dyn $db + 'db;
type Data = $loc;
fn lookup<'db>(&self, db: &Self::Database<'db>) -> $loc {
db.$lookup(*self)
}
}
};
}
// ideally these would be defined in base-db, but the orphan rule doesn't let us
pub trait Intern {
type Database<'db>: ?Sized;
type ID;
fn intern<'db>(self, db: &Self::Database<'db>) -> Self::ID;
}
pub trait Lookup {
type Database<'db>: ?Sized;
type Data;
fn lookup<'db>(&self, db: &Self::Database<'db>) -> Self::Data;
}
impl_intern_lookup!(
ExpandDatabase,
MacroCallId,
MacroCallLoc,
intern_macro_call,
lookup_intern_macro_call
);
impl_intern_lookup!(
ExpandDatabase,
SyntaxContextId,
SyntaxContextData,
intern_syntax_context,
lookup_intern_syntax_context
);
pub type ExpandResult<T> = ValueResult<T, ExpandError>; pub type ExpandResult<T> = ValueResult<T, ExpandError>;
#[derive(Debug, PartialEq, Eq, Clone, Hash)] #[derive(Debug, PartialEq, Eq, Clone, Hash)]
@ -117,18 +167,20 @@ pub struct MacroCallLoc {
pub krate: CrateId, pub krate: CrateId,
/// Some if this is a macro call for an eager macro. Note that this is `None` /// Some if this is a macro call for an eager macro. Note that this is `None`
/// for the eager input macro file. /// for the eager input macro file.
// FIXME: This seems bad to save in an interned structure
eager: Option<Arc<EagerCallInfo>>, eager: Option<Arc<EagerCallInfo>>,
pub kind: MacroCallKind, pub kind: MacroCallKind,
pub call_site: SyntaxContextId, pub call_site: Span,
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroDefId { pub struct MacroDefId {
pub krate: CrateId, pub krate: CrateId,
pub edition: Edition,
pub kind: MacroDefKind, pub kind: MacroDefKind,
pub local_inner: bool, pub local_inner: bool,
pub allow_internal_unsafe: bool, pub allow_internal_unsafe: bool,
// pub def_site: SyntaxContextId, pub span: Span,
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -138,7 +190,7 @@ pub enum MacroDefKind {
BuiltInAttr(BuiltinAttrExpander, AstId<ast::Macro>), BuiltInAttr(BuiltinAttrExpander, AstId<ast::Macro>),
BuiltInDerive(BuiltinDeriveExpander, AstId<ast::Macro>), BuiltInDerive(BuiltinDeriveExpander, AstId<ast::Macro>),
BuiltInEager(EagerExpander, AstId<ast::Macro>), BuiltInEager(EagerExpander, AstId<ast::Macro>),
ProcMacro(ProcMacroExpander, ProcMacroKind, AstId<ast::Fn>), ProcMacro(CustomProcMacroExpander, ProcMacroKind, AstId<ast::Fn>),
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -179,40 +231,39 @@ pub enum MacroCallKind {
pub trait HirFileIdExt { pub trait HirFileIdExt {
/// Returns the original file of this macro call hierarchy. /// Returns the original file of this macro call hierarchy.
fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId; fn original_file(self, db: &dyn ExpandDatabase) -> FileId;
/// Returns the original file of this macro call hierarchy while going into the included file if /// Returns the original file of this macro call hierarchy while going into the included file if
/// one of the calls comes from an `include!``. /// one of the calls comes from an `include!``.
fn original_file_respecting_includes(self, db: &dyn db::ExpandDatabase) -> FileId; fn original_file_respecting_includes(self, db: &dyn ExpandDatabase) -> FileId;
/// If this is a macro call, returns the syntax node of the very first macro call this file resides in. /// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<SyntaxNode>>; fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>>;
/// Return expansion information if it is a macro-expansion file /// Return expansion information if it is a macro-expansion file
fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo>; fn expansion_info(self, db: &dyn ExpandDatabase) -> Option<ExpansionInfo>;
fn as_builtin_derive_attr_node(&self, db: &dyn db::ExpandDatabase) fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>>;
-> Option<InFile<ast::Attr>>;
} }
impl HirFileIdExt for HirFileId { impl HirFileIdExt for HirFileId {
fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId { fn original_file(self, db: &dyn ExpandDatabase) -> FileId {
let mut file_id = self; let mut file_id = self;
loop { loop {
match file_id.repr() { match file_id.repr() {
HirFileIdRepr::FileId(id) => break id, HirFileIdRepr::FileId(id) => break id,
HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => { HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => {
file_id = db.lookup_intern_macro_call(macro_call_id).kind.file_id(); file_id = macro_call_id.lookup(db).kind.file_id();
} }
} }
} }
} }
fn original_file_respecting_includes(mut self, db: &dyn db::ExpandDatabase) -> FileId { fn original_file_respecting_includes(mut self, db: &dyn ExpandDatabase) -> FileId {
loop { loop {
match self.repr() { match self.repr() {
base_db::span::HirFileIdRepr::FileId(id) => break id, HirFileIdRepr::FileId(id) => break id,
base_db::span::HirFileIdRepr::MacroFile(file) => { HirFileIdRepr::MacroFile(file) => {
let loc = db.lookup_intern_macro_call(file.macro_call_id); let loc = db.lookup_intern_macro_call(file.macro_call_id);
if loc.def.is_include() { if loc.def.is_include() {
if let Some(eager) = &loc.eager { if let Some(eager) = &loc.eager {
@ -231,7 +282,7 @@ impl HirFileIdExt for HirFileId {
} }
} }
fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<SyntaxNode>> { fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>> {
let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db); let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db);
loop { loop {
match call.file_id.repr() { match call.file_id.repr() {
@ -246,14 +297,11 @@ impl HirFileIdExt for HirFileId {
} }
/// Return expansion information if it is a macro-expansion file /// Return expansion information if it is a macro-expansion file
fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo> { fn expansion_info(self, db: &dyn ExpandDatabase) -> Option<ExpansionInfo> {
Some(ExpansionInfo::new(db, self.macro_file()?)) Some(ExpansionInfo::new(db, self.macro_file()?))
} }
fn as_builtin_derive_attr_node( fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>> {
&self,
db: &dyn db::ExpandDatabase,
) -> Option<InFile<ast::Attr>> {
let macro_file = self.macro_file()?; let macro_file = self.macro_file()?;
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let attr = match loc.def.kind { let attr = match loc.def.kind {
@ -265,32 +313,32 @@ impl HirFileIdExt for HirFileId {
} }
pub trait MacroFileIdExt { pub trait MacroFileIdExt {
fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32; fn expansion_level(self, db: &dyn ExpandDatabase) -> u32;
/// If this is a macro call, returns the syntax node of the call. /// If this is a macro call, returns the syntax node of the call.
fn call_node(self, db: &dyn db::ExpandDatabase) -> InFile<SyntaxNode>; fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode>;
fn expansion_info(self, db: &dyn db::ExpandDatabase) -> ExpansionInfo; fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo;
fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool; fn is_builtin_derive(&self, db: &dyn ExpandDatabase) -> bool;
fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool; fn is_custom_derive(&self, db: &dyn ExpandDatabase) -> bool;
/// Return whether this file is an include macro /// Return whether this file is an include macro
fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool; fn is_include_macro(&self, db: &dyn ExpandDatabase) -> bool;
fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool; fn is_eager(&self, db: &dyn ExpandDatabase) -> bool;
/// Return whether this file is an attr macro /// Return whether this file is an attr macro
fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool; fn is_attr_macro(&self, db: &dyn ExpandDatabase) -> bool;
/// Return whether this file is the pseudo expansion of the derive attribute. /// Return whether this file is the pseudo expansion of the derive attribute.
/// See [`crate::builtin_attr_macro::derive_attr_expand`]. /// See [`crate::builtin_attr_macro::derive_attr_expand`].
fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool; fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool;
} }
impl MacroFileIdExt for MacroFileId { impl MacroFileIdExt for MacroFileId {
fn call_node(self, db: &dyn db::ExpandDatabase) -> InFile<SyntaxNode> { fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode> {
db.lookup_intern_macro_call(self.macro_call_id).to_node(db) db.lookup_intern_macro_call(self.macro_call_id).to_node(db)
} }
fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 { fn expansion_level(self, db: &dyn ExpandDatabase) -> u32 {
let mut level = 0; let mut level = 0;
let mut macro_file = self; let mut macro_file = self;
loop { loop {
@ -305,39 +353,39 @@ impl MacroFileIdExt for MacroFileId {
} }
/// Return expansion information if it is a macro-expansion file /// Return expansion information if it is a macro-expansion file
fn expansion_info(self, db: &dyn db::ExpandDatabase) -> ExpansionInfo { fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo {
ExpansionInfo::new(db, self) ExpansionInfo::new(db, self)
} }
fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool { fn is_custom_derive(&self, db: &dyn ExpandDatabase) -> bool {
matches!( matches!(
db.lookup_intern_macro_call(self.macro_call_id).def.kind, db.lookup_intern_macro_call(self.macro_call_id).def.kind,
MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _) MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)
) )
} }
fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool { fn is_builtin_derive(&self, db: &dyn ExpandDatabase) -> bool {
matches!( matches!(
db.lookup_intern_macro_call(self.macro_call_id).def.kind, db.lookup_intern_macro_call(self.macro_call_id).def.kind,
MacroDefKind::BuiltInDerive(..) MacroDefKind::BuiltInDerive(..)
) )
} }
fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool { fn is_include_macro(&self, db: &dyn ExpandDatabase) -> bool {
db.lookup_intern_macro_call(self.macro_call_id).def.is_include() db.lookup_intern_macro_call(self.macro_call_id).def.is_include()
} }
fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool { fn is_eager(&self, db: &dyn ExpandDatabase) -> bool {
let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id); let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id);
matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
} }
fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool { fn is_attr_macro(&self, db: &dyn ExpandDatabase) -> bool {
let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id); let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id);
matches!(loc.kind, MacroCallKind::Attr { .. }) matches!(loc.kind, MacroCallKind::Attr { .. })
} }
fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool { fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool {
let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id); let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id);
loc.def.is_attribute_derive() loc.def.is_attribute_derive()
} }
@ -346,15 +394,15 @@ impl MacroFileIdExt for MacroFileId {
impl MacroDefId { impl MacroDefId {
pub fn as_lazy_macro( pub fn as_lazy_macro(
self, self,
db: &dyn db::ExpandDatabase, db: &dyn ExpandDatabase,
krate: CrateId, krate: CrateId,
kind: MacroCallKind, kind: MacroCallKind,
call_site: SyntaxContextId, call_site: Span,
) -> MacroCallId { ) -> MacroCallId {
db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind, call_site }) MacroCallLoc { def: self, krate, eager: None, kind, call_site }.intern(db)
} }
pub fn definition_range(&self, db: &dyn db::ExpandDatabase) -> InFile<TextRange> { pub fn definition_range(&self, db: &dyn ExpandDatabase) -> InFile<TextRange> {
match self.kind { match self.kind {
MacroDefKind::Declarative(id) MacroDefKind::Declarative(id)
| MacroDefKind::BuiltIn(_, id) | MacroDefKind::BuiltIn(_, id)
@ -419,19 +467,7 @@ impl MacroDefId {
} }
impl MacroCallLoc { impl MacroCallLoc {
pub fn span(&self, db: &dyn db::ExpandDatabase) -> SpanData { pub fn to_node(&self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode> {
let ast_id = self.kind.erased_ast_id();
let file_id = self.kind.file_id();
let range = db.ast_id_map(file_id).get_erased(ast_id).text_range();
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => db.real_span_map(file_id).span_for_range(range),
HirFileIdRepr::MacroFile(m) => {
db.parse_macro_expansion(m).value.1.span_at(range.start())
}
}
}
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> InFile<SyntaxNode> {
match self.kind { match self.kind {
MacroCallKind::FnLike { ast_id, .. } => { MacroCallKind::FnLike { ast_id, .. } => {
ast_id.with_value(ast_id.to_node(db).syntax().clone()) ast_id.with_value(ast_id.to_node(db).syntax().clone())
@ -498,7 +534,7 @@ impl MacroCallKind {
} }
} }
fn erased_ast_id(&self) -> ErasedFileAstId { pub fn erased_ast_id(&self) -> ErasedFileAstId {
match *self { match *self {
MacroCallKind::FnLike { ast_id: InFile { value, .. }, .. } => value.erase(), MacroCallKind::FnLike { ast_id: InFile { value, .. }, .. } => value.erase(),
MacroCallKind::Derive { ast_id: InFile { value, .. }, .. } => value.erase(), MacroCallKind::Derive { ast_id: InFile { value, .. }, .. } => value.erase(),
@ -509,7 +545,7 @@ impl MacroCallKind {
/// Returns the original file range that best describes the location of this macro call. /// Returns the original file range that best describes the location of this macro call.
/// ///
/// Unlike `MacroCallKind::original_call_range`, this also spans the item of attributes and derives. /// Unlike `MacroCallKind::original_call_range`, this also spans the item of attributes and derives.
pub fn original_call_range_with_body(self, db: &dyn db::ExpandDatabase) -> FileRange { pub fn original_call_range_with_body(self, db: &dyn ExpandDatabase) -> FileRange {
let mut kind = self; let mut kind = self;
let file_id = loop { let file_id = loop {
match kind.file_id().repr() { match kind.file_id().repr() {
@ -534,7 +570,7 @@ impl MacroCallKind {
/// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros /// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros
/// get the whole `ast::MacroCall`, attribute macros get the attribute's range, and derives /// get the whole `ast::MacroCall`, attribute macros get the attribute's range, and derives
/// get only the specific derive that is being referred to. /// get only the specific derive that is being referred to.
pub fn original_call_range(self, db: &dyn db::ExpandDatabase) -> FileRange { pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange {
let mut kind = self; let mut kind = self;
let file_id = loop { let file_id = loop {
match kind.file_id().repr() { match kind.file_id().repr() {
@ -573,7 +609,7 @@ impl MacroCallKind {
FileRange { range, file_id } FileRange { range, file_id }
} }
fn arg(&self, db: &dyn db::ExpandDatabase) -> InFile<Option<SyntaxNode>> { fn arg(&self, db: &dyn ExpandDatabase) -> InFile<Option<SyntaxNode>> {
match self { match self {
MacroCallKind::FnLike { ast_id, .. } => { MacroCallKind::FnLike { ast_id, .. } => {
ast_id.to_in_file_node(db).map(|it| Some(it.token_tree()?.syntax().clone())) ast_id.to_in_file_node(db).map(|it| Some(it.token_tree()?.syntax().clone()))
@ -617,7 +653,7 @@ impl ExpansionInfo {
/// Maps the passed in file range down into a macro expansion if it is the input to a macro call. /// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
pub fn map_range_down<'a>( pub fn map_range_down<'a>(
&'a self, &'a self,
span: SpanData, span: Span,
) -> Option<InMacroFile<impl Iterator<Item = SyntaxToken> + 'a>> { ) -> Option<InMacroFile<impl Iterator<Item = SyntaxToken> + 'a>> {
let tokens = self let tokens = self
.exp_map .exp_map
@ -630,7 +666,7 @@ impl ExpansionInfo {
/// Looks up the span at the given offset. /// Looks up the span at the given offset.
pub fn span_for_offset( pub fn span_for_offset(
&self, &self,
db: &dyn db::ExpandDatabase, db: &dyn ExpandDatabase,
offset: TextSize, offset: TextSize,
) -> (FileRange, SyntaxContextId) { ) -> (FileRange, SyntaxContextId) {
debug_assert!(self.expanded.value.text_range().contains(offset)); debug_assert!(self.expanded.value.text_range().contains(offset));
@ -646,12 +682,12 @@ impl ExpansionInfo {
/// Maps up the text range out of the expansion hierarchy back into the original file its from. /// Maps up the text range out of the expansion hierarchy back into the original file its from.
pub fn map_node_range_up( pub fn map_node_range_up(
&self, &self,
db: &dyn db::ExpandDatabase, db: &dyn ExpandDatabase,
range: TextRange, range: TextRange,
) -> Option<(FileRange, SyntaxContextId)> { ) -> Option<(FileRange, SyntaxContextId)> {
debug_assert!(self.expanded.value.text_range().contains_range(range)); debug_assert!(self.expanded.value.text_range().contains_range(range));
let mut spans = self.exp_map.spans_for_range(range); let mut spans = self.exp_map.spans_for_range(range);
let SpanData { range, anchor, ctx } = spans.next()?; let Span { range, anchor, ctx } = spans.next()?;
let mut start = range.start(); let mut start = range.start();
let mut end = range.end(); let mut end = range.end();
@ -676,7 +712,7 @@ impl ExpansionInfo {
/// Maps up the text range out of the expansion into is macro call. /// Maps up the text range out of the expansion into is macro call.
pub fn map_range_up_once( pub fn map_range_up_once(
&self, &self,
db: &dyn db::ExpandDatabase, db: &dyn ExpandDatabase,
token: TextRange, token: TextRange,
) -> InFile<smallvec::SmallVec<[TextRange; 1]>> { ) -> InFile<smallvec::SmallVec<[TextRange; 1]>> {
debug_assert!(self.expanded.value.text_range().contains_range(token)); debug_assert!(self.expanded.value.text_range().contains_range(token));
@ -705,7 +741,7 @@ impl ExpansionInfo {
} }
} }
pub fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo { pub fn new(db: &dyn ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let arg_tt = loc.kind.arg(db); let arg_tt = loc.kind.arg(db);
@ -718,7 +754,7 @@ impl ExpansionInfo {
let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
( (
Arc::new(tt::Subtree { Arc::new(tt::Subtree {
delimiter: tt::Delimiter::DUMMY_INVISIBLE, delimiter: tt::Delimiter::invisible_spanned(loc.call_site),
token_trees: Vec::new(), token_trees: Vec::new(),
}), }),
SyntaxFixupUndoInfo::NONE, SyntaxFixupUndoInfo::NONE,

View file

@ -9,10 +9,11 @@ use crate::{
db::ExpandDatabase, db::ExpandDatabase,
hygiene::{marks_rev, SyntaxContextExt, Transparency}, hygiene::{marks_rev, SyntaxContextExt, Transparency},
name::{known, AsName, Name}, name::{known, AsName, Name},
span::SpanMapRef, span_map::SpanMapRef,
}; };
use base_db::{span::SyntaxContextId, CrateId}; use base_db::CrateId;
use smallvec::SmallVec; use smallvec::SmallVec;
use span::SyntaxContextId;
use syntax::{ast, AstNode}; use syntax::{ast, AstNode};
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]

View file

@ -318,6 +318,10 @@ pub mod known {
new_lower_hex, new_lower_hex,
new_upper_hex, new_upper_hex,
from_usize, from_usize,
panic_2015,
panic_2021,
unreachable_2015,
unreachable_2021,
// Components of known path (type name) // Components of known path (type name)
Iterator, Iterator,
IntoIterator, IntoIterator,
@ -384,6 +388,7 @@ pub mod known {
log_syntax, log_syntax,
module_path, module_path,
option_env, option_env,
quote,
std_panic, std_panic,
stringify, stringify,
trace_macros, trace_macros,

View file

@ -1,18 +1,64 @@
//! Proc Macro Expander stub //! Proc Macro Expander stub
use base_db::{span::SpanData, CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind}; use core::fmt;
use std::{panic::RefUnwindSafe, sync};
use base_db::{CrateId, Env};
use rustc_hash::FxHashMap;
use span::Span;
use stdx::never; use stdx::never;
use syntax::SmolStr;
use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult}; use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult};
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct ProcMacroId(pub u32);
#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)]
pub enum ProcMacroKind {
CustomDerive,
FuncLike,
Attr,
}
pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
fn expand(
&self,
subtree: &tt::Subtree,
attrs: Option<&tt::Subtree>,
env: &Env,
def_site: Span,
call_site: Span,
mixed_site: Span,
) -> Result<tt::Subtree, ProcMacroExpansionError>;
}
#[derive(Debug)]
pub enum ProcMacroExpansionError {
Panic(String),
/// Things like "proc macro server was killed by OOM".
System(String),
}
pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, String>;
pub type ProcMacros = FxHashMap<CrateId, ProcMacroLoadResult>;
#[derive(Debug, Clone)]
pub struct ProcMacro {
pub name: SmolStr,
pub kind: ProcMacroKind,
pub expander: sync::Arc<dyn ProcMacroExpander>,
}
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub struct ProcMacroExpander { pub struct CustomProcMacroExpander {
proc_macro_id: ProcMacroId, proc_macro_id: ProcMacroId,
} }
const DUMMY_ID: u32 = !0; const DUMMY_ID: u32 = !0;
impl ProcMacroExpander { impl CustomProcMacroExpander {
pub fn new(proc_macro_id: ProcMacroId) -> Self { pub fn new(proc_macro_id: ProcMacroId) -> Self {
assert_ne!(proc_macro_id.0, DUMMY_ID); assert_ne!(proc_macro_id.0, DUMMY_ID);
Self { proc_macro_id } Self { proc_macro_id }
@ -33,9 +79,9 @@ impl ProcMacroExpander {
calling_crate: CrateId, calling_crate: CrateId,
tt: &tt::Subtree, tt: &tt::Subtree,
attr_arg: Option<&tt::Subtree>, attr_arg: Option<&tt::Subtree>,
def_site: SpanData, def_site: Span,
call_site: SpanData, call_site: Span,
mixed_site: SpanData, mixed_site: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
match self.proc_macro_id { match self.proc_macro_id {
ProcMacroId(DUMMY_ID) => ExpandResult::new( ProcMacroId(DUMMY_ID) => ExpandResult::new(

View file

@ -1,6 +1,8 @@
//! A simplified version of quote-crate like quasi quote macro //! A simplified version of quote-crate like quasi quote macro
use base_db::span::SpanData; use span::Span;
use crate::name::Name;
// A helper macro quote macro // A helper macro quote macro
// FIXME: // FIXME:
@ -130,12 +132,12 @@ macro_rules! quote {
} }
pub(crate) trait IntoTt { pub(crate) trait IntoTt {
fn to_subtree(self, span: SpanData) -> crate::tt::Subtree; fn to_subtree(self, span: Span) -> crate::tt::Subtree;
fn to_tokens(self) -> Vec<crate::tt::TokenTree>; fn to_tokens(self) -> Vec<crate::tt::TokenTree>;
} }
impl IntoTt for Vec<crate::tt::TokenTree> { impl IntoTt for Vec<crate::tt::TokenTree> {
fn to_subtree(self, span: SpanData) -> crate::tt::Subtree { fn to_subtree(self, span: Span) -> crate::tt::Subtree {
crate::tt::Subtree { crate::tt::Subtree {
delimiter: crate::tt::Delimiter::invisible_spanned(span), delimiter: crate::tt::Delimiter::invisible_spanned(span),
token_trees: self, token_trees: self,
@ -148,7 +150,7 @@ impl IntoTt for Vec<crate::tt::TokenTree> {
} }
impl IntoTt for crate::tt::Subtree { impl IntoTt for crate::tt::Subtree {
fn to_subtree(self, _: SpanData) -> crate::tt::Subtree { fn to_subtree(self, _: Span) -> crate::tt::Subtree {
self self
} }
@ -158,39 +160,39 @@ impl IntoTt for crate::tt::Subtree {
} }
pub(crate) trait ToTokenTree { pub(crate) trait ToTokenTree {
fn to_token(self, span: SpanData) -> crate::tt::TokenTree; fn to_token(self, span: Span) -> crate::tt::TokenTree;
} }
impl ToTokenTree for crate::tt::TokenTree { impl ToTokenTree for crate::tt::TokenTree {
fn to_token(self, _: SpanData) -> crate::tt::TokenTree { fn to_token(self, _: Span) -> crate::tt::TokenTree {
self self
} }
} }
impl ToTokenTree for &crate::tt::TokenTree { impl ToTokenTree for &crate::tt::TokenTree {
fn to_token(self, _: SpanData) -> crate::tt::TokenTree { fn to_token(self, _: Span) -> crate::tt::TokenTree {
self.clone() self.clone()
} }
} }
impl ToTokenTree for crate::tt::Subtree { impl ToTokenTree for crate::tt::Subtree {
fn to_token(self, _: SpanData) -> crate::tt::TokenTree { fn to_token(self, _: Span) -> crate::tt::TokenTree {
self.into() self.into()
} }
} }
macro_rules! impl_to_to_tokentrees { macro_rules! impl_to_to_tokentrees {
($($span:ident: $ty:ty => $this:ident $im:block);*) => { ($($span:ident: $ty:ty => $this:ident $im:block;)*) => {
$( $(
impl ToTokenTree for $ty { impl ToTokenTree for $ty {
fn to_token($this, $span: SpanData) -> crate::tt::TokenTree { fn to_token($this, $span: Span) -> crate::tt::TokenTree {
let leaf: crate::tt::Leaf = $im.into(); let leaf: crate::tt::Leaf = $im.into();
leaf.into() leaf.into()
} }
} }
impl ToTokenTree for &$ty { impl ToTokenTree for &$ty {
fn to_token($this, $span: SpanData) -> crate::tt::TokenTree { fn to_token($this, $span: Span) -> crate::tt::TokenTree {
let leaf: crate::tt::Leaf = $im.clone().into(); let leaf: crate::tt::Leaf = $im.clone().into();
leaf.into() leaf.into()
} }
@ -209,20 +211,19 @@ impl_to_to_tokentrees! {
_span: crate::tt::Ident => self { self }; _span: crate::tt::Ident => self { self };
_span: crate::tt::Punct => self { self }; _span: crate::tt::Punct => self { self };
span: &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}}; span: &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}};
span: String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}} span: String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}};
span: Name => self { crate::tt::Ident{text: self.to_smol_str(), span}};
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::tt; use crate::tt;
use base_db::{ use base_db::FileId;
span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
FileId,
};
use expect_test::expect; use expect_test::expect;
use span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
use syntax::{TextRange, TextSize}; use syntax::{TextRange, TextSize};
const DUMMY: tt::SpanData = tt::SpanData { const DUMMY: tt::Span = tt::Span {
range: TextRange::empty(TextSize::new(0)), range: TextRange::empty(TextSize::new(0)),
anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID }, anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID },
ctx: SyntaxContextId::ROOT, ctx: SyntaxContextId::ROOT,

View file

@ -1,124 +0,0 @@
//! Spanmaps allow turning absolute ranges into relative ranges for incrementality purposes as well
//! as associating spans with text ranges in a particular file.
use base_db::{
span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
FileId,
};
use syntax::{ast::HasModuleItem, AstNode, TextRange, TextSize};
use triomphe::Arc;
use crate::db::ExpandDatabase;
pub type ExpansionSpanMap = mbe::SpanMap<SpanData>;
/// Spanmap for a macro file or a real file
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum SpanMap {
/// Spanmap for a macro file
ExpansionSpanMap(Arc<ExpansionSpanMap>),
/// Spanmap for a real file
RealSpanMap(Arc<RealSpanMap>),
}
#[derive(Copy, Clone)]
pub enum SpanMapRef<'a> {
/// Spanmap for a macro file
ExpansionSpanMap(&'a ExpansionSpanMap),
/// Spanmap for a real file
RealSpanMap(&'a RealSpanMap),
}
impl mbe::SpanMapper<SpanData> for SpanMap {
fn span_for(&self, range: TextRange) -> SpanData {
self.span_for_range(range)
}
}
impl mbe::SpanMapper<SpanData> for SpanMapRef<'_> {
fn span_for(&self, range: TextRange) -> SpanData {
self.span_for_range(range)
}
}
impl mbe::SpanMapper<SpanData> for RealSpanMap {
fn span_for(&self, range: TextRange) -> SpanData {
self.span_for_range(range)
}
}
impl SpanMap {
pub fn span_for_range(&self, range: TextRange) -> SpanData {
match self {
Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()),
Self::RealSpanMap(span_map) => span_map.span_for_range(range),
}
}
pub fn as_ref(&self) -> SpanMapRef<'_> {
match self {
Self::ExpansionSpanMap(span_map) => SpanMapRef::ExpansionSpanMap(span_map),
Self::RealSpanMap(span_map) => SpanMapRef::RealSpanMap(span_map),
}
}
}
impl SpanMapRef<'_> {
pub fn span_for_range(self, range: TextRange) -> SpanData {
match self {
Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()),
Self::RealSpanMap(span_map) => span_map.span_for_range(range),
}
}
}
#[derive(PartialEq, Eq, Hash, Debug)]
pub struct RealSpanMap {
file_id: FileId,
/// Invariant: Sorted vec over TextSize
// FIXME: SortedVec<(TextSize, ErasedFileAstId)>?
pairs: Box<[(TextSize, ErasedFileAstId)]>,
end: TextSize,
}
impl RealSpanMap {
/// Creates a real file span map that returns absolute ranges (relative ranges to the root ast id).
pub fn absolute(file_id: FileId) -> Self {
RealSpanMap {
file_id,
pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]),
end: TextSize::new(!0),
}
}
pub fn from_file(db: &dyn ExpandDatabase, file_id: FileId) -> Self {
let mut pairs = vec![(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)];
let ast_id_map = db.ast_id_map(file_id.into());
let tree = db.parse(file_id).tree();
pairs
.extend(tree.items().map(|item| {
(item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())
}));
RealSpanMap {
file_id,
pairs: pairs.into_boxed_slice(),
end: tree.syntax().text_range().end(),
}
}
pub fn span_for_range(&self, range: TextRange) -> SpanData {
assert!(
range.end() <= self.end,
"range {range:?} goes beyond the end of the file {:?}",
self.end
);
let start = range.start();
let idx = self
.pairs
.binary_search_by(|&(it, _)| it.cmp(&start).then(std::cmp::Ordering::Less))
.unwrap_err();
let (offset, ast_id) = self.pairs[idx - 1];
SpanData {
range: range - offset,
anchor: SpanAnchor { file_id: self.file_id, ast_id },
ctx: SyntaxContextId::ROOT,
}
}
}

View file

@ -0,0 +1,65 @@
//! Span maps for real files and macro expansions.
use span::Span;
use syntax::TextRange;
use triomphe::Arc;
pub use span::RealSpanMap;
pub type ExpansionSpanMap = span::SpanMap<Span>;
/// Spanmap for a macro file or a real file
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum SpanMap {
/// Spanmap for a macro file
ExpansionSpanMap(Arc<ExpansionSpanMap>),
/// Spanmap for a real file
RealSpanMap(Arc<RealSpanMap>),
}
#[derive(Copy, Clone)]
pub enum SpanMapRef<'a> {
/// Spanmap for a macro file
ExpansionSpanMap(&'a ExpansionSpanMap),
/// Spanmap for a real file
RealSpanMap(&'a RealSpanMap),
}
impl mbe::SpanMapper<Span> for SpanMap {
fn span_for(&self, range: TextRange) -> Span {
self.span_for_range(range)
}
}
impl mbe::SpanMapper<Span> for SpanMapRef<'_> {
fn span_for(&self, range: TextRange) -> Span {
self.span_for_range(range)
}
}
impl SpanMap {
pub fn span_for_range(&self, range: TextRange) -> Span {
match self {
// FIXME: Is it correct for us to only take the span at the start? This feels somewhat
// wrong. The context will be right, but the range could be considered wrong. See
// https://github.com/rust-lang/rust/issues/23480, we probably want to fetch the span at
// the start and end, then merge them like rustc does in `Span::to
Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()),
Self::RealSpanMap(span_map) => span_map.span_for_range(range),
}
}
pub fn as_ref(&self) -> SpanMapRef<'_> {
match self {
Self::ExpansionSpanMap(span_map) => SpanMapRef::ExpansionSpanMap(span_map),
Self::RealSpanMap(span_map) => SpanMapRef::RealSpanMap(span_map),
}
}
}
impl SpanMapRef<'_> {
pub fn span_for_range(self, range: TextRange) -> Span {
match self {
Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()),
Self::RealSpanMap(span_map) => span_map.span_for_range(range),
}
}
}

View file

@ -14,14 +14,14 @@ doctest = false
[dependencies] [dependencies]
cov-mark = "2.0.0-pre.1" cov-mark = "2.0.0-pre.1"
itertools.workspace = true itertools.workspace = true
arrayvec = "0.7.2" arrayvec.workspace = true
bitflags.workspace = true bitflags.workspace = true
smallvec.workspace = true smallvec.workspace = true
ena = "0.14.0" ena = "0.14.0"
either.workspace = true either.workspace = true
oorandom = "11.1.3" oorandom = "11.1.3"
tracing.workspace = true tracing.workspace = true
rustc-hash = "1.1.0" rustc-hash.workspace = true
scoped-tls = "1.0.0" scoped-tls = "1.0.0"
chalk-solve = { version = "0.95.0", default-features = false } chalk-solve = { version = "0.95.0", default-features = false }
chalk-ir = "0.95.0" chalk-ir = "0.95.0"
@ -54,6 +54,10 @@ project-model = { path = "../project-model" }
# local deps # local deps
test-utils.workspace = true test-utils.workspace = true
test-fixture.workspace = true
[features] [features]
in-rust-tree = ["rustc-dependencies/in-rust-tree"] in-rust-tree = ["rustc-dependencies/in-rust-tree"]
[lints]
workspace = true

View file

@ -1,6 +1,7 @@
use base_db::{fixture::WithFixture, FileId}; use base_db::FileId;
use chalk_ir::Substitution; use chalk_ir::Substitution;
use hir_def::db::DefDatabase; use hir_def::db::DefDatabase;
use test_fixture::WithFixture;
use test_utils::skip_slow_tests; use test_utils::skip_slow_tests;
use crate::{ use crate::{

View file

@ -217,6 +217,10 @@ pub enum InferenceDiagnostic {
name: Name, name: Name,
/// Contains the type the field resolves to /// Contains the type the field resolves to
field_with_same_name: Option<Ty>, field_with_same_name: Option<Ty>,
assoc_func_with_same_name: Option<AssocItemId>,
},
UnresolvedAssocItem {
id: ExprOrPatId,
}, },
// FIXME: This should be emitted in body lowering // FIXME: This should be emitted in body lowering
BreakOutsideOfLoop { BreakOutsideOfLoop {
@ -1200,6 +1204,12 @@ impl<'a> InferenceContext<'a> {
path: &ModPath, path: &ModPath,
) -> (Ty, Option<VariantId>) { ) -> (Ty, Option<VariantId>) {
let remaining = unresolved.map(|it| path.segments()[it..].len()).filter(|it| it > &0); let remaining = unresolved.map(|it| path.segments()[it..].len()).filter(|it| it > &0);
let ty = match ty.kind(Interner) {
TyKind::Alias(AliasTy::Projection(proj_ty)) => {
self.db.normalize_projection(proj_ty.clone(), self.table.trait_env.clone())
}
_ => ty,
};
match remaining { match remaining {
None => { None => {
let variant = ty.as_adt().and_then(|(adt_id, _)| match adt_id { let variant = ty.as_adt().and_then(|(adt_id, _)| match adt_id {

View file

@ -1575,11 +1575,30 @@ impl InferenceContext<'_> {
} }
None => None, None => None,
}; };
let assoc_func_with_same_name = method_resolution::iterate_method_candidates(
&canonicalized_receiver.value,
self.db,
self.table.trait_env.clone(),
self.get_traits_in_scope().as_ref().left_or_else(|&it| it),
VisibleFromModule::Filter(self.resolver.module()),
Some(method_name),
method_resolution::LookupMode::Path,
|_ty, item, visible| {
if visible {
Some(item)
} else {
None
}
},
);
self.result.diagnostics.push(InferenceDiagnostic::UnresolvedMethodCall { self.result.diagnostics.push(InferenceDiagnostic::UnresolvedMethodCall {
expr: tgt_expr, expr: tgt_expr,
receiver: receiver_ty.clone(), receiver: receiver_ty.clone(),
name: method_name.clone(), name: method_name.clone(),
field_with_same_name: field_with_same_name_exists, field_with_same_name: field_with_same_name_exists,
assoc_func_with_same_name,
}); });
( (
receiver_ty, receiver_ty,

View file

@ -340,6 +340,9 @@ impl InferenceContext<'_> {
}, },
); );
let res = res.or(not_visible); let res = res.or(not_visible);
if res.is_none() {
self.push_diagnostic(InferenceDiagnostic::UnresolvedAssocItem { id });
}
let (item, visible) = res?; let (item, visible) = res?;
let (def, container) = match item { let (def, container) = match item {

View file

@ -1,9 +1,9 @@
use std::collections::HashMap; use std::collections::HashMap;
use base_db::fixture::WithFixture;
use chalk_ir::{AdtId, TyKind}; use chalk_ir::{AdtId, TyKind};
use either::Either; use either::Either;
use hir_def::db::DefDatabase; use hir_def::db::DefDatabase;
use test_fixture::WithFixture;
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{

View file

@ -1,6 +1,7 @@
use base_db::{fixture::WithFixture, FileId}; use base_db::FileId;
use hir_def::db::DefDatabase; use hir_def::db::DefDatabase;
use syntax::{TextRange, TextSize}; use syntax::{TextRange, TextSize};
use test_fixture::WithFixture;
use crate::{db::HirDatabase, test_db::TestDB, Interner, Substitution}; use crate::{db::HirDatabase, test_db::TestDB, Interner, Substitution};

View file

@ -12,7 +12,7 @@ mod diagnostics;
use std::{collections::HashMap, env}; use std::{collections::HashMap, env};
use base_db::{fixture::WithFixture, FileRange, SourceDatabaseExt}; use base_db::{FileRange, SourceDatabaseExt};
use expect_test::Expect; use expect_test::Expect;
use hir_def::{ use hir_def::{
body::{Body, BodySourceMap, SyntheticSyntax}, body::{Body, BodySourceMap, SyntheticSyntax},
@ -30,6 +30,7 @@ use syntax::{
ast::{self, AstNode, HasName}, ast::{self, AstNode, HasName},
SyntaxNode, SyntaxNode,
}; };
use test_fixture::WithFixture;
use tracing_subscriber::{layer::SubscriberExt, Registry}; use tracing_subscriber::{layer::SubscriberExt, Registry};
use tracing_tree::HierarchicalLayer; use tracing_tree::HierarchicalLayer;
use triomphe::Arc; use triomphe::Arc;

View file

@ -1,4 +1,5 @@
use base_db::{fixture::WithFixture, SourceDatabaseExt}; use base_db::SourceDatabaseExt;
use test_fixture::WithFixture;
use triomphe::Arc; use triomphe::Arc;
use crate::{db::HirDatabase, test_db::TestDB}; use crate::{db::HirDatabase, test_db::TestDB};

View file

@ -1154,6 +1154,40 @@ fn main() {
); );
} }
#[test]
fn generic_alias_with_qualified_path() {
check_types(
r#"
type Wrap<T> = T;
struct S;
trait Schematic {
type Props;
}
impl Schematic for S {
type Props = X;
}
enum X {
A { cool: u32, stuff: u32 },
B,
}
fn main() {
let wrapped = Wrap::<<S as Schematic>::Props>::A {
cool: 100,
stuff: 100,
};
if let Wrap::<<S as Schematic>::Props>::A { cool, ..} = &wrapped {}
//^^^^ &u32
}
"#,
);
}
#[test] #[test]
fn type_mismatch_pat_const_reference() { fn type_mismatch_pat_const_reference() {
check_no_mismatches( check_no_mismatches(

View file

@ -12,9 +12,9 @@ rust-version.workspace = true
doctest = false doctest = false
[dependencies] [dependencies]
rustc-hash = "1.1.0" rustc-hash.workspace = true
either.workspace = true either.workspace = true
arrayvec = "0.7.2" arrayvec.workspace = true
itertools.workspace = true itertools.workspace = true
smallvec.workspace = true smallvec.workspace = true
triomphe.workspace = true triomphe.workspace = true
@ -33,3 +33,6 @@ tt.workspace = true
[features] [features]
in-rust-tree = [] in-rust-tree = []
[lints]
workspace = true

View file

@ -11,7 +11,7 @@ use hir_def::{
}; };
use hir_expand::{ use hir_expand::{
name::Name, name::Name,
span::{RealSpanMap, SpanMapRef}, span_map::{RealSpanMap, SpanMapRef},
}; };
use hir_ty::db::HirDatabase; use hir_ty::db::HirDatabase;
use syntax::{ast, AstNode}; use syntax::{ast, AstNode};

View file

@ -24,6 +24,6 @@ pub use hir_def::db::{
pub use hir_expand::db::{ pub use hir_expand::db::{
AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage, AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery, ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery,
ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, RealSpanMapQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, ProcMacrosQuery, RealSpanMapQuery,
}; };
pub use hir_ty::db::*; pub use hir_ty::db::*;

View file

@ -8,7 +8,7 @@ pub use hir_ty::diagnostics::{CaseType, IncorrectCase};
use base_db::CrateId; use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions}; use cfg::{CfgExpr, CfgOptions};
use either::Either; use either::Either;
use hir_def::path::ModPath; use hir_def::{path::ModPath, AssocItemId};
use hir_expand::{name::Name, HirFileId, InFile}; use hir_expand::{name::Name, HirFileId, InFile};
use syntax::{ast, AstPtr, SyntaxError, SyntaxNodePtr, TextRange}; use syntax::{ast, AstPtr, SyntaxError, SyntaxNodePtr, TextRange};
@ -62,6 +62,7 @@ diagnostics![
UndeclaredLabel, UndeclaredLabel,
UnimplementedBuiltinMacro, UnimplementedBuiltinMacro,
UnreachableLabel, UnreachableLabel,
UnresolvedAssocItem,
UnresolvedExternCrate, UnresolvedExternCrate,
UnresolvedField, UnresolvedField,
UnresolvedImport, UnresolvedImport,
@ -215,6 +216,12 @@ pub struct UnresolvedMethodCall {
pub receiver: Type, pub receiver: Type,
pub name: Name, pub name: Name,
pub field_with_same_name: Option<Type>, pub field_with_same_name: Option<Type>,
pub assoc_func_with_same_name: Option<AssocItemId>,
}
#[derive(Debug)]
pub struct UnresolvedAssocItem {
pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, Either<ast::Pat, ast::SelfParam>>>>,
} }
#[derive(Debug)] #[derive(Debug)]

View file

@ -37,7 +37,7 @@ mod display;
use std::{iter, mem::discriminant, ops::ControlFlow}; use std::{iter, mem::discriminant, ops::ControlFlow};
use arrayvec::ArrayVec; use arrayvec::ArrayVec;
use base_db::{CrateDisplayName, CrateId, CrateOrigin, Edition, FileId, ProcMacroKind}; use base_db::{CrateDisplayName, CrateId, CrateOrigin, Edition, FileId};
use either::Either; use either::Either;
use hir_def::{ use hir_def::{
body::{BodyDiagnostic, SyntheticSyntax}, body::{BodyDiagnostic, SyntheticSyntax},
@ -47,7 +47,6 @@ use hir_def::{
item_tree::ItemTreeNode, item_tree::ItemTreeNode,
lang_item::LangItemTarget, lang_item::LangItemTarget,
layout::{self, ReprOptions, TargetDataLayout}, layout::{self, ReprOptions, TargetDataLayout},
macro_id_to_def_id,
nameres::{self, diagnostics::DefDiagnostic}, nameres::{self, diagnostics::DefDiagnostic},
path::ImportAlias, path::ImportAlias,
per_ns::PerNs, per_ns::PerNs,
@ -59,7 +58,7 @@ use hir_def::{
Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId,
TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
}; };
use hir_expand::{attrs::collect_attrs, name::name, MacroCallKind}; use hir_expand::{attrs::collect_attrs, name::name, proc_macro::ProcMacroKind, MacroCallKind};
use hir_ty::{ use hir_ty::{
all_super_traits, autoderef, check_orphan_rules, all_super_traits, autoderef, check_orphan_rules,
consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt}, consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt},
@ -125,8 +124,10 @@ pub use {
}, },
hir_expand::{ hir_expand::{
attrs::{Attr, AttrId}, attrs::{Attr, AttrId},
change::Change,
hygiene::{marks_rev, SyntaxContextExt}, hygiene::{marks_rev, SyntaxContextExt},
name::{known, Name}, name::{known, Name},
proc_macro::ProcMacros,
tt, ExpandResult, HirFileId, HirFileIdExt, InFile, InMacroFile, InRealFile, MacroFileId, tt, ExpandResult, HirFileId, HirFileIdExt, InFile, InMacroFile, InRealFile, MacroFileId,
MacroFileIdExt, MacroFileIdExt,
}, },
@ -146,7 +147,7 @@ use {
hir_def::path::Path, hir_def::path::Path,
hir_expand::{ hir_expand::{
name::AsName, name::AsName,
span::{ExpansionSpanMap, RealSpanMap, SpanMap, SpanMapRef}, span_map::{ExpansionSpanMap, RealSpanMap, SpanMap, SpanMapRef},
}, },
}; };
@ -808,7 +809,7 @@ impl Module {
} }
fn emit_macro_def_diagnostics(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, m: Macro) { fn emit_macro_def_diagnostics(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, m: Macro) {
let id = macro_id_to_def_id(db.upcast(), m.id); let id = db.macro_def(m.id);
if let hir_expand::db::TokenExpander::DeclarativeMacro(expander) = db.macro_expander(id) { if let hir_expand::db::TokenExpander::DeclarativeMacro(expander) = db.macro_expander(id) {
if let Some(e) = expander.mac.err() { if let Some(e) = expander.mac.err() {
let Some(ast) = id.ast_id().left() else { let Some(ast) = id.ast_id().left() else {
@ -1679,6 +1680,7 @@ impl DefWithBody {
receiver, receiver,
name, name,
field_with_same_name, field_with_same_name,
assoc_func_with_same_name,
} => { } => {
let expr = expr_syntax(*expr); let expr = expr_syntax(*expr);
@ -1690,10 +1692,18 @@ impl DefWithBody {
field_with_same_name: field_with_same_name field_with_same_name: field_with_same_name
.clone() .clone()
.map(|ty| Type::new(db, DefWithBodyId::from(self), ty)), .map(|ty| Type::new(db, DefWithBodyId::from(self), ty)),
assoc_func_with_same_name: assoc_func_with_same_name.clone(),
} }
.into(), .into(),
) )
} }
&hir_ty::InferenceDiagnostic::UnresolvedAssocItem { id } => {
let expr_or_pat = match id {
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right),
};
acc.push(UnresolvedAssocItem { expr_or_pat }.into())
}
&hir_ty::InferenceDiagnostic::BreakOutsideOfLoop { &hir_ty::InferenceDiagnostic::BreakOutsideOfLoop {
expr, expr,
is_break, is_break,
@ -2784,9 +2794,13 @@ impl AsAssocItem for DefWithBody {
} }
} }
fn as_assoc_item<ID, DEF, CTOR, AST>(db: &dyn HirDatabase, ctor: CTOR, id: ID) -> Option<AssocItem> fn as_assoc_item<'db, ID, DEF, CTOR, AST>(
db: &(dyn HirDatabase + 'db),
ctor: CTOR,
id: ID,
) -> Option<AssocItem>
where where
ID: Lookup<Data = AssocItemLoc<AST>>, ID: Lookup<Database<'db> = dyn DefDatabase + 'db, Data = AssocItemLoc<AST>>,
DEF: From<ID>, DEF: From<ID>,
CTOR: FnOnce(DEF) -> AssocItem, CTOR: FnOnce(DEF) -> AssocItem,
AST: ItemTreeNode, AST: ItemTreeNode,
@ -3520,7 +3534,7 @@ impl Impl {
let src = self.source(db)?; let src = self.source(db)?;
let macro_file = src.file_id.macro_file()?; let macro_file = src.file_id.macro_file()?;
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); let loc = macro_file.macro_call_id.lookup(db.upcast());
let (derive_attr, derive_index) = match loc.kind { let (derive_attr, derive_index) = match loc.kind {
MacroCallKind::Derive { ast_id, derive_attr_index, derive_index } => { MacroCallKind::Derive { ast_id, derive_attr_index, derive_index } => {
let module_id = self.id.lookup(db.upcast()).container; let module_id = self.id.lookup(db.upcast()).container;
@ -4652,6 +4666,9 @@ impl Callable {
pub fn return_type(&self) -> Type { pub fn return_type(&self) -> Type {
self.ty.derived(self.sig.ret().clone()) self.ty.derived(self.sig.ret().clone())
} }
pub fn sig(&self) -> &CallableSig {
&self.sig
}
} }
fn closure_source(db: &dyn HirDatabase, closure: ClosureId) -> Option<ast::ClosureExpr> { fn closure_source(db: &dyn HirDatabase, closure: ClosureId) -> Option<ast::ClosureExpr> {

View file

@ -13,7 +13,6 @@ use either::Either;
use hir_def::{ use hir_def::{
hir::Expr, hir::Expr,
lower::LowerCtx, lower::LowerCtx,
macro_id_to_def_id,
nameres::MacroSubNs, nameres::MacroSubNs,
resolver::{self, HasResolver, Resolver, TypeNs}, resolver::{self, HasResolver, Resolver, TypeNs},
type_ref::Mutability, type_ref::Mutability,
@ -40,8 +39,8 @@ use crate::{
source_analyzer::{resolve_hir_path, SourceAnalyzer}, source_analyzer::{resolve_hir_path, SourceAnalyzer},
Access, Adjust, Adjustment, AutoBorrow, BindingMode, BuiltinAttr, Callable, ConstParam, Crate, Access, Adjust, Adjustment, AutoBorrow, BindingMode, BuiltinAttr, Callable, ConstParam, Crate,
DeriveHelper, Field, Function, HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local, DeriveHelper, Field, Function, HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local,
Macro, Module, ModuleDef, Name, OverloadedDeref, Path, ScopeDef, ToolModule, Trait, Type, Macro, Module, ModuleDef, Name, OverloadedDeref, Path, ScopeDef, Struct, ToolModule, Trait,
TypeAlias, TypeParam, VariantDef, Type, TypeAlias, TypeParam, VariantDef,
}; };
pub enum DescendPreference { pub enum DescendPreference {
@ -229,6 +228,14 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
pub fn to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> { pub fn to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
self.imp.to_module_def(file) self.imp.to_module_def(file)
} }
pub fn to_struct_def(&self, s: &ast::Struct) -> Option<Struct> {
self.imp.to_def(s).map(Struct::from)
}
pub fn to_impl_def(&self, i: &ast::Impl) -> Option<Impl> {
self.imp.to_def(i).map(Impl::from)
}
} }
impl<'db> SemanticsImpl<'db> { impl<'db> SemanticsImpl<'db> {
@ -341,9 +348,7 @@ impl<'db> SemanticsImpl<'db> {
let macro_call = InFile::new(file_id, actual_macro_call); let macro_call = InFile::new(file_id, actual_macro_call);
let krate = resolver.krate(); let krate = resolver.krate();
let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
resolver resolver.resolve_path_as_macro_def(self.db.upcast(), &path, Some(MacroSubNs::Bang))
.resolve_path_as_macro(self.db.upcast(), &path, Some(MacroSubNs::Bang))
.map(|(it, _)| macro_id_to_def_id(self.db.upcast(), it))
})?; })?;
hir_expand::db::expand_speculative( hir_expand::db::expand_speculative(
self.db.upcast(), self.db.upcast(),
@ -512,8 +517,7 @@ impl<'db> SemanticsImpl<'db> {
} }
/// Descend the token into its macro call if it is part of one, returning the tokens in the /// Descend the token into its macro call if it is part of one, returning the tokens in the
/// expansion that it is associated with. If `offset` points into the token's range, it will /// expansion that it is associated with.
/// be considered for the mapping in case of inline format args.
pub fn descend_into_macros( pub fn descend_into_macros(
&self, &self,
mode: DescendPreference, mode: DescendPreference,
@ -674,7 +678,7 @@ impl<'db> SemanticsImpl<'db> {
_ => 0, _ => 0,
}; };
// FIXME: here, the attribute's text range is used to strip away all // FIXME: here, the attribute's text range is used to strip away all
// entries from the start of the attribute "list" up the the invoking // entries from the start of the attribute "list" up the invoking
// attribute. But in // attribute. But in
// ``` // ```
// mod foo { // mod foo {
@ -850,7 +854,7 @@ impl<'db> SemanticsImpl<'db> {
/// Attempts to map the node out of macro expanded files. /// Attempts to map the node out of macro expanded files.
/// This only work for attribute expansions, as other ones do not have nodes as input. /// This only work for attribute expansions, as other ones do not have nodes as input.
pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> { pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map( self.wrap_node_infile(node).original_ast_node_rooted(self.db.upcast()).map(
|InRealFile { file_id, value }| { |InRealFile { file_id, value }| {
self.cache(find_root(value.syntax()), file_id.into()); self.cache(find_root(value.syntax()), file_id.into());
value value

View file

@ -16,7 +16,6 @@ use hir_def::{
hir::{BindingId, ExprId, Pat, PatId}, hir::{BindingId, ExprId, Pat, PatId},
lang_item::LangItem, lang_item::LangItem,
lower::LowerCtx, lower::LowerCtx,
macro_id_to_def_id,
nameres::MacroSubNs, nameres::MacroSubNs,
path::{ModPath, Path, PathKind}, path::{ModPath, Path, PathKind},
resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs}, resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs},
@ -771,9 +770,7 @@ impl SourceAnalyzer {
) -> Option<MacroFileId> { ) -> Option<MacroFileId> {
let krate = self.resolver.krate(); let krate = self.resolver.krate();
let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| { let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| {
self.resolver self.resolver.resolve_path_as_macro_def(db.upcast(), &path, Some(MacroSubNs::Bang))
.resolve_path_as_macro(db.upcast(), &path, Some(MacroSubNs::Bang))
.map(|(it, _)| macro_id_to_def_id(db.upcast(), it))
})?; })?;
// why the 64? // why the 64?
Some(macro_call_id.as_macro_file()).filter(|it| it.expansion_level(db.upcast()) < 64) Some(macro_call_id.as_macro_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
@ -1163,9 +1160,40 @@ fn resolve_hir_path_qualifier(
resolver: &Resolver, resolver: &Resolver,
path: &Path, path: &Path,
) -> Option<PathResolution> { ) -> Option<PathResolution> {
resolver (|| {
.resolve_path_in_type_ns_fully(db.upcast(), &path) let (ty, unresolved) = match path.type_anchor() {
.map(|ty| match ty { Some(type_ref) => {
let (_, res) =
TyLoweringContext::new_maybe_unowned(db, resolver, resolver.type_owner())
.lower_ty_ext(type_ref);
res.map(|ty_ns| (ty_ns, path.segments().first()))
}
None => {
let (ty, remaining_idx, _) = resolver.resolve_path_in_type_ns(db.upcast(), path)?;
match remaining_idx {
Some(remaining_idx) => {
if remaining_idx + 1 == path.segments().len() {
Some((ty, path.segments().last()))
} else {
None
}
}
None => Some((ty, None)),
}
}
}?;
// If we are in a TypeNs for a Trait, and we have an unresolved name, try to resolve it as a type
// within the trait's associated types.
if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) {
if let Some(type_alias_id) =
db.trait_data(trait_id).associated_type_by_name(unresolved.name)
{
return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into()));
}
}
let res = match ty {
TypeNs::SelfType(it) => PathResolution::SelfType(it.into()), TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
TypeNs::GenericParam(id) => PathResolution::TypeParam(id.into()), TypeNs::GenericParam(id) => PathResolution::TypeParam(id.into()),
TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => { TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => {
@ -1176,7 +1204,24 @@ fn resolve_hir_path_qualifier(
TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()), TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()),
TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()), TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
TypeNs::TraitAliasId(it) => PathResolution::Def(TraitAlias::from(it).into()), TypeNs::TraitAliasId(it) => PathResolution::Def(TraitAlias::from(it).into()),
};
match unresolved {
Some(unresolved) => resolver
.generic_def()
.and_then(|def| {
hir_ty::associated_type_shorthand_candidates(
db,
def,
res.in_type_ns()?,
|name, id| (name == unresolved.name).then_some(id),
)
}) })
.map(TypeAlias::from)
.map(Into::into)
.map(PathResolution::Def),
None => Some(res),
}
})()
.or_else(|| { .or_else(|| {
resolver resolver
.resolve_module_path_in_items(db.upcast(), path.mod_path()?) .resolve_module_path_in_items(db.upcast(), path.mod_path()?)

View file

@ -2,13 +2,14 @@
use base_db::FileRange; use base_db::FileRange;
use hir_def::{ use hir_def::{
db::DefDatabase,
item_scope::ItemInNs, item_scope::ItemInNs,
src::{HasChildSource, HasSource}, src::{HasChildSource, HasSource},
AdtId, AssocItemId, DefWithBodyId, HasModule, ImplId, Lookup, MacroId, ModuleDefId, ModuleId, AdtId, AssocItemId, DefWithBodyId, HasModule, ImplId, Lookup, MacroId, ModuleDefId, ModuleId,
TraitId, TraitId,
}; };
use hir_expand::{HirFileId, InFile}; use hir_expand::{HirFileId, InFile};
use hir_ty::db::HirDatabase; use hir_ty::{db::HirDatabase, display::HirDisplay};
use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr}; use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr};
use crate::{Module, ModuleDef, Semantics}; use crate::{Module, ModuleDef, Semantics};
@ -230,9 +231,12 @@ impl<'a> SymbolCollector<'a> {
fn collect_from_impl(&mut self, impl_id: ImplId) { fn collect_from_impl(&mut self, impl_id: ImplId) {
let impl_data = self.db.impl_data(impl_id); let impl_data = self.db.impl_data(impl_id);
let impl_name = Some(SmolStr::new(impl_data.self_ty.display(self.db).to_string()));
self.with_container_name(impl_name, |s| {
for &assoc_item_id in &impl_data.items { for &assoc_item_id in &impl_data.items {
self.push_assoc_item(assoc_item_id) s.push_assoc_item(assoc_item_id)
} }
})
} }
fn collect_from_trait(&mut self, trait_id: TraitId) { fn collect_from_trait(&mut self, trait_id: TraitId) {
@ -274,9 +278,9 @@ impl<'a> SymbolCollector<'a> {
} }
} }
fn push_decl<L>(&mut self, id: L, is_assoc: bool) fn push_decl<'db, L>(&mut self, id: L, is_assoc: bool)
where where
L: Lookup + Into<ModuleDefId>, L: Lookup<Database<'db> = dyn DefDatabase + 'db> + Into<ModuleDefId>,
<L as Lookup>::Data: HasSource, <L as Lookup>::Data: HasSource,
<<L as Lookup>::Data as HasSource>::Value: HasName, <<L as Lookup>::Data as HasSource>::Value: HasName,
{ {

View file

@ -31,7 +31,11 @@ expect-test = "1.4.0"
# local deps # local deps
test-utils.workspace = true test-utils.workspace = true
test-fixture.workspace = true
sourcegen.workspace = true sourcegen.workspace = true
[features] [features]
in-rust-tree = [] in-rust-tree = []
[lints]
workspace = true

View file

@ -281,11 +281,8 @@ mod tests {
use super::*; use super::*;
use hir::Semantics; use hir::Semantics;
use ide_db::{ use ide_db::{assists::AssistResolveStrategy, base_db::FileRange, RootDatabase};
assists::AssistResolveStrategy, use test_fixture::WithFixture;
base_db::{fixture::WithFixture, FileRange},
RootDatabase,
};
use crate::tests::{ use crate::tests::{
check_assist, check_assist_by_label, check_assist_not_applicable, check_assist_target, check_assist, check_assist_by_label, check_assist_not_applicable, check_assist_target,

View file

@ -16,11 +16,14 @@ use syntax::{
edit_in_place::{AttrsOwnerEdit, Indent}, edit_in_place::{AttrsOwnerEdit, Indent},
make, HasName, make, HasName,
}, },
ted, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T,
}; };
use text_edit::TextRange; use text_edit::TextRange;
use crate::assist_context::{AssistContext, Assists}; use crate::{
assist_context::{AssistContext, Assists},
utils,
};
// Assist: bool_to_enum // Assist: bool_to_enum
// //
@ -73,7 +76,7 @@ pub(crate) fn bool_to_enum(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
let usages = definition.usages(&ctx.sema).all(); let usages = definition.usages(&ctx.sema).all();
add_enum_def(edit, ctx, &usages, target_node, &target_module); add_enum_def(edit, ctx, &usages, target_node, &target_module);
replace_usages(edit, ctx, &usages, definition, &target_module); replace_usages(edit, ctx, usages, definition, &target_module);
}, },
) )
} }
@ -169,8 +172,8 @@ fn replace_bool_expr(edit: &mut SourceChangeBuilder, expr: ast::Expr) {
/// Converts an expression of type `bool` to one of the new enum type. /// Converts an expression of type `bool` to one of the new enum type.
fn bool_expr_to_enum_expr(expr: ast::Expr) -> ast::Expr { fn bool_expr_to_enum_expr(expr: ast::Expr) -> ast::Expr {
let true_expr = make::expr_path(make::path_from_text("Bool::True")).clone_for_update(); let true_expr = make::expr_path(make::path_from_text("Bool::True"));
let false_expr = make::expr_path(make::path_from_text("Bool::False")).clone_for_update(); let false_expr = make::expr_path(make::path_from_text("Bool::False"));
if let ast::Expr::Literal(literal) = &expr { if let ast::Expr::Literal(literal) = &expr {
match literal.kind() { match literal.kind() {
@ -184,7 +187,6 @@ fn bool_expr_to_enum_expr(expr: ast::Expr) -> ast::Expr {
make::tail_only_block_expr(true_expr), make::tail_only_block_expr(true_expr),
Some(ast::ElseBranch::Block(make::tail_only_block_expr(false_expr))), Some(ast::ElseBranch::Block(make::tail_only_block_expr(false_expr))),
) )
.clone_for_update()
} }
} }
@ -192,21 +194,19 @@ fn bool_expr_to_enum_expr(expr: ast::Expr) -> ast::Expr {
fn replace_usages( fn replace_usages(
edit: &mut SourceChangeBuilder, edit: &mut SourceChangeBuilder,
ctx: &AssistContext<'_>, ctx: &AssistContext<'_>,
usages: &UsageSearchResult, usages: UsageSearchResult,
target_definition: Definition, target_definition: Definition,
target_module: &hir::Module, target_module: &hir::Module,
) { ) {
for (file_id, references) in usages.iter() { for (file_id, references) in usages {
edit.edit_file(*file_id); edit.edit_file(file_id);
let refs_with_imports = let refs_with_imports = augment_references_with_imports(ctx, references, target_module);
augment_references_with_imports(edit, ctx, references, target_module);
refs_with_imports.into_iter().rev().for_each( refs_with_imports.into_iter().rev().for_each(
|FileReferenceWithImport { range, old_name, new_name, import_data }| { |FileReferenceWithImport { range, name, import_data }| {
// replace the usages in patterns and expressions // replace the usages in patterns and expressions
if let Some(ident_pat) = old_name.syntax().ancestors().find_map(ast::IdentPat::cast) if let Some(ident_pat) = name.syntax().ancestors().find_map(ast::IdentPat::cast) {
{
cov_mark::hit!(replaces_record_pat_shorthand); cov_mark::hit!(replaces_record_pat_shorthand);
let definition = ctx.sema.to_def(&ident_pat).map(Definition::Local); let definition = ctx.sema.to_def(&ident_pat).map(Definition::Local);
@ -214,36 +214,35 @@ fn replace_usages(
replace_usages( replace_usages(
edit, edit,
ctx, ctx,
&def.usages(&ctx.sema).all(), def.usages(&ctx.sema).all(),
target_definition, target_definition,
target_module, target_module,
) )
} }
} else if let Some(initializer) = find_assignment_usage(&new_name) { } else if let Some(initializer) = find_assignment_usage(&name) {
cov_mark::hit!(replaces_assignment); cov_mark::hit!(replaces_assignment);
replace_bool_expr(edit, initializer); replace_bool_expr(edit, initializer);
} else if let Some((prefix_expr, inner_expr)) = find_negated_usage(&new_name) { } else if let Some((prefix_expr, inner_expr)) = find_negated_usage(&name) {
cov_mark::hit!(replaces_negation); cov_mark::hit!(replaces_negation);
edit.replace( edit.replace(
prefix_expr.syntax().text_range(), prefix_expr.syntax().text_range(),
format!("{} == Bool::False", inner_expr), format!("{} == Bool::False", inner_expr),
); );
} else if let Some((record_field, initializer)) = old_name } else if let Some((record_field, initializer)) = name
.as_name_ref() .as_name_ref()
.and_then(ast::RecordExprField::for_field_name) .and_then(ast::RecordExprField::for_field_name)
.and_then(|record_field| ctx.sema.resolve_record_field(&record_field)) .and_then(|record_field| ctx.sema.resolve_record_field(&record_field))
.and_then(|(got_field, _, _)| { .and_then(|(got_field, _, _)| {
find_record_expr_usage(&new_name, got_field, target_definition) find_record_expr_usage(&name, got_field, target_definition)
}) })
{ {
cov_mark::hit!(replaces_record_expr); cov_mark::hit!(replaces_record_expr);
let record_field = edit.make_mut(record_field);
let enum_expr = bool_expr_to_enum_expr(initializer); let enum_expr = bool_expr_to_enum_expr(initializer);
record_field.replace_expr(enum_expr); utils::replace_record_field_expr(ctx, edit, record_field, enum_expr);
} else if let Some(pat) = find_record_pat_field_usage(&old_name) { } else if let Some(pat) = find_record_pat_field_usage(&name) {
match pat { match pat {
ast::Pat::IdentPat(ident_pat) => { ast::Pat::IdentPat(ident_pat) => {
cov_mark::hit!(replaces_record_pat); cov_mark::hit!(replaces_record_pat);
@ -253,7 +252,7 @@ fn replace_usages(
replace_usages( replace_usages(
edit, edit,
ctx, ctx,
&def.usages(&ctx.sema).all(), def.usages(&ctx.sema).all(),
target_definition, target_definition,
target_module, target_module,
) )
@ -270,40 +269,44 @@ fn replace_usages(
} }
_ => (), _ => (),
} }
} else if let Some((ty_annotation, initializer)) = find_assoc_const_usage(&new_name) } else if let Some((ty_annotation, initializer)) = find_assoc_const_usage(&name) {
{
edit.replace(ty_annotation.syntax().text_range(), "Bool"); edit.replace(ty_annotation.syntax().text_range(), "Bool");
replace_bool_expr(edit, initializer); replace_bool_expr(edit, initializer);
} else if let Some(receiver) = find_method_call_expr_usage(&new_name) { } else if let Some(receiver) = find_method_call_expr_usage(&name) {
edit.replace( edit.replace(
receiver.syntax().text_range(), receiver.syntax().text_range(),
format!("({} == Bool::True)", receiver), format!("({} == Bool::True)", receiver),
); );
} else if new_name.syntax().ancestors().find_map(ast::UseTree::cast).is_none() { } else if name.syntax().ancestors().find_map(ast::UseTree::cast).is_none() {
// for any other usage in an expression, replace it with a check that it is the true variant // for any other usage in an expression, replace it with a check that it is the true variant
if let Some((record_field, expr)) = new_name if let Some((record_field, expr)) =
.as_name_ref() name.as_name_ref().and_then(ast::RecordExprField::for_field_name).and_then(
.and_then(ast::RecordExprField::for_field_name) |record_field| record_field.expr().map(|expr| (record_field, expr)),
.and_then(|record_field| { )
record_field.expr().map(|expr| (record_field, expr))
})
{ {
record_field.replace_expr( utils::replace_record_field_expr(
ctx,
edit,
record_field,
make::expr_bin_op( make::expr_bin_op(
expr, expr,
ast::BinaryOp::CmpOp(ast::CmpOp::Eq { negated: false }), ast::BinaryOp::CmpOp(ast::CmpOp::Eq { negated: false }),
make::expr_path(make::path_from_text("Bool::True")), make::expr_path(make::path_from_text("Bool::True")),
) ),
.clone_for_update(),
); );
} else { } else {
edit.replace(range, format!("{} == Bool::True", new_name.text())); edit.replace(range, format!("{} == Bool::True", name.text()));
} }
} }
// add imports across modules where needed // add imports across modules where needed
if let Some((import_scope, path)) = import_data { if let Some((import_scope, path)) = import_data {
insert_use(&import_scope, path, &ctx.config.insert_use); let scope = match import_scope.clone() {
ImportScope::File(it) => ImportScope::File(edit.make_mut(it)),
ImportScope::Module(it) => ImportScope::Module(edit.make_mut(it)),
ImportScope::Block(it) => ImportScope::Block(edit.make_mut(it)),
};
insert_use(&scope, path, &ctx.config.insert_use);
} }
}, },
) )
@ -312,37 +315,31 @@ fn replace_usages(
struct FileReferenceWithImport { struct FileReferenceWithImport {
range: TextRange, range: TextRange,
old_name: ast::NameLike, name: ast::NameLike,
new_name: ast::NameLike,
import_data: Option<(ImportScope, ast::Path)>, import_data: Option<(ImportScope, ast::Path)>,
} }
fn augment_references_with_imports( fn augment_references_with_imports(
edit: &mut SourceChangeBuilder,
ctx: &AssistContext<'_>, ctx: &AssistContext<'_>,
references: &[FileReference], references: Vec<FileReference>,
target_module: &hir::Module, target_module: &hir::Module,
) -> Vec<FileReferenceWithImport> { ) -> Vec<FileReferenceWithImport> {
let mut visited_modules = FxHashSet::default(); let mut visited_modules = FxHashSet::default();
references references
.iter() .into_iter()
.filter_map(|FileReference { range, name, .. }| { .filter_map(|FileReference { range, name, .. }| {
let name = name.clone().into_name_like()?; let name = name.clone().into_name_like()?;
ctx.sema.scope(name.syntax()).map(|scope| (*range, name, scope.module())) ctx.sema.scope(name.syntax()).map(|scope| (range, name, scope.module()))
}) })
.map(|(range, name, ref_module)| { .map(|(range, name, ref_module)| {
let old_name = name.clone();
let new_name = edit.make_mut(name.clone());
// if the referenced module is not the same as the target one and has not been seen before, add an import // if the referenced module is not the same as the target one and has not been seen before, add an import
let import_data = if ref_module.nearest_non_block_module(ctx.db()) != *target_module let import_data = if ref_module.nearest_non_block_module(ctx.db()) != *target_module
&& !visited_modules.contains(&ref_module) && !visited_modules.contains(&ref_module)
{ {
visited_modules.insert(ref_module); visited_modules.insert(ref_module);
let import_scope = let import_scope = ImportScope::find_insert_use_container(name.syntax(), &ctx.sema);
ImportScope::find_insert_use_container(new_name.syntax(), &ctx.sema);
let path = ref_module let path = ref_module
.find_use_path_prefixed( .find_use_path_prefixed(
ctx.sema.db, ctx.sema.db,
@ -360,7 +357,7 @@ fn augment_references_with_imports(
None None
}; };
FileReferenceWithImport { range, old_name, new_name, import_data } FileReferenceWithImport { range, name, import_data }
}) })
.collect() .collect()
} }
@ -405,14 +402,13 @@ fn find_record_expr_usage(
let record_field = ast::RecordExprField::for_field_name(name_ref)?; let record_field = ast::RecordExprField::for_field_name(name_ref)?;
let initializer = record_field.expr()?; let initializer = record_field.expr()?;
if let Definition::Field(expected_field) = target_definition { match target_definition {
if got_field != expected_field { Definition::Field(expected_field) if got_field == expected_field => {
return None;
}
}
Some((record_field, initializer)) Some((record_field, initializer))
} }
_ => None,
}
}
fn find_record_pat_field_usage(name: &ast::NameLike) -> Option<ast::Pat> { fn find_record_pat_field_usage(name: &ast::NameLike) -> Option<ast::Pat> {
let record_pat_field = name.syntax().parent().and_then(ast::RecordPatField::cast)?; let record_pat_field = name.syntax().parent().and_then(ast::RecordPatField::cast)?;
@ -466,12 +462,9 @@ fn add_enum_def(
let indent = IndentLevel::from_node(&insert_before); let indent = IndentLevel::from_node(&insert_before);
enum_def.reindent_to(indent); enum_def.reindent_to(indent);
ted::insert_all( edit.insert(
ted::Position::before(&edit.make_syntax_mut(insert_before)), insert_before.text_range().start(),
vec![ format!("{}\n\n{indent}", enum_def.syntax().text()),
enum_def.syntax().clone().into(),
make::tokens::whitespace(&format!("\n\n{indent}")).into(),
],
); );
} }
@ -800,6 +793,78 @@ fn main() {
) )
} }
#[test]
fn local_var_init_struct_usage() {
check_assist(
bool_to_enum,
r#"
struct Foo {
foo: bool,
}
fn main() {
let $0foo = true;
let s = Foo { foo };
}
"#,
r#"
struct Foo {
foo: bool,
}
#[derive(PartialEq, Eq)]
enum Bool { True, False }
fn main() {
let foo = Bool::True;
let s = Foo { foo: foo == Bool::True };
}
"#,
)
}
#[test]
fn local_var_init_struct_usage_in_macro() {
check_assist(
bool_to_enum,
r#"
struct Struct {
boolean: bool,
}
macro_rules! identity {
($body:expr) => {
$body
}
}
fn new() -> Struct {
let $0boolean = true;
identity![Struct { boolean }]
}
"#,
r#"
struct Struct {
boolean: bool,
}
macro_rules! identity {
($body:expr) => {
$body
}
}
#[derive(PartialEq, Eq)]
enum Bool { True, False }
fn new() -> Struct {
let boolean = Bool::True;
identity![Struct { boolean: boolean == Bool::True }]
}
"#,
)
}
#[test] #[test]
fn field_struct_basic() { fn field_struct_basic() {
cov_mark::check!(replaces_record_expr); cov_mark::check!(replaces_record_expr);
@ -1321,6 +1386,46 @@ fn main() {
) )
} }
#[test]
fn field_in_macro() {
check_assist(
bool_to_enum,
r#"
struct Struct {
$0boolean: bool,
}
fn boolean(x: Struct) {
let Struct { boolean } = x;
}
macro_rules! identity { ($body:expr) => { $body } }
fn new() -> Struct {
identity!(Struct { boolean: true })
}
"#,
r#"
#[derive(PartialEq, Eq)]
enum Bool { True, False }
struct Struct {
boolean: Bool,
}
fn boolean(x: Struct) {
let Struct { boolean } = x;
}
macro_rules! identity { ($body:expr) => { $body } }
fn new() -> Struct {
identity!(Struct { boolean: Bool::True })
}
"#,
)
}
#[test] #[test]
fn field_non_bool() { fn field_non_bool() {
cov_mark::check!(not_applicable_non_bool_field); cov_mark::check!(not_applicable_non_bool_field);

View file

@ -1,12 +1,8 @@
use hir::TypeInfo; use hir::TypeInfo;
use stdx::format_to;
use syntax::{ use syntax::{
ast::{self, AstNode}, ast::{self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasName},
NodeOrToken, ted, NodeOrToken,
SyntaxKind::{ SyntaxKind::{BLOCK_EXPR, BREAK_EXPR, COMMENT, LOOP_EXPR, MATCH_GUARD, PATH_EXPR, RETURN_EXPR},
BLOCK_EXPR, BREAK_EXPR, CLOSURE_EXPR, COMMENT, LOOP_EXPR, MATCH_ARM, MATCH_GUARD,
PATH_EXPR, RETURN_EXPR,
},
SyntaxNode, SyntaxNode,
}; };
@ -66,98 +62,140 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
.as_ref() .as_ref()
.map_or(false, |it| matches!(it, ast::Expr::FieldExpr(_) | ast::Expr::MethodCallExpr(_))); .map_or(false, |it| matches!(it, ast::Expr::FieldExpr(_) | ast::Expr::MethodCallExpr(_)));
let reference_modifier = match ty.filter(|_| needs_adjust) {
Some(receiver_type) if receiver_type.is_mutable_reference() => "&mut ",
Some(receiver_type) if receiver_type.is_reference() => "&",
_ => "",
};
let var_modifier = match parent {
Some(ast::Expr::RefExpr(expr)) if expr.mut_token().is_some() => "mut ",
_ => "",
};
let anchor = Anchor::from(&to_extract)?; let anchor = Anchor::from(&to_extract)?;
let indent = anchor.syntax().prev_sibling_or_token()?.as_token()?.clone();
let target = to_extract.syntax().text_range(); let target = to_extract.syntax().text_range();
acc.add( acc.add(
AssistId("extract_variable", AssistKind::RefactorExtract), AssistId("extract_variable", AssistKind::RefactorExtract),
"Extract into variable", "Extract into variable",
target, target,
move |edit| { move |edit| {
let field_shorthand = let field_shorthand = to_extract
match to_extract.syntax().parent().and_then(ast::RecordExprField::cast) { .syntax()
Some(field) => field.name_ref(), .parent()
None => None, .and_then(ast::RecordExprField::cast)
.filter(|field| field.name_ref().is_some());
let (var_name, expr_replace) = match field_shorthand {
Some(field) => (field.to_string(), field.syntax().clone()),
None => (
suggest_name::for_variable(&to_extract, &ctx.sema),
to_extract.syntax().clone(),
),
}; };
let mut buf = String::new(); let ident_pat = match parent {
Some(ast::Expr::RefExpr(expr)) if expr.mut_token().is_some() => {
make::ident_pat(false, true, make::name(&var_name))
}
_ => make::ident_pat(false, false, make::name(&var_name)),
};
let var_name = match &field_shorthand { let to_extract = match ty.as_ref().filter(|_| needs_adjust) {
Some(it) => it.to_string(), Some(receiver_type) if receiver_type.is_mutable_reference() => {
None => suggest_name::for_variable(&to_extract, &ctx.sema), make::expr_ref(to_extract, true)
}; }
let expr_range = match &field_shorthand { Some(receiver_type) if receiver_type.is_reference() => {
Some(it) => it.syntax().text_range().cover(to_extract.syntax().text_range()), make::expr_ref(to_extract, false)
None => to_extract.syntax().text_range(), }
_ => to_extract,
}; };
let expr_replace = edit.make_syntax_mut(expr_replace);
let let_stmt =
make::let_stmt(ident_pat.into(), None, Some(to_extract)).clone_for_update();
let name_expr = make::expr_path(make::ext::ident_path(&var_name)).clone_for_update();
match anchor { match anchor {
Anchor::Before(_) | Anchor::Replace(_) => { Anchor::Before(place) => {
format_to!(buf, "let {var_modifier}{var_name} = {reference_modifier}") let prev_ws = place.prev_sibling_or_token().and_then(|it| it.into_token());
} let indent_to = IndentLevel::from_node(&place);
Anchor::WrapInBlock(_) => { let insert_place = edit.make_syntax_mut(place);
format_to!(buf, "{{ let {var_name} = {reference_modifier}")
}
};
format_to!(buf, "{to_extract}");
if let Anchor::Replace(stmt) = anchor { // Adjust ws to insert depending on if this is all inline or on separate lines
cov_mark::hit!(test_extract_var_expr_stmt); let trailing_ws = if prev_ws.is_some_and(|it| it.text().starts_with("\n")) {
if stmt.semicolon_token().is_none() { format!("\n{indent_to}")
buf.push(';');
}
match ctx.config.snippet_cap {
Some(cap) => {
let snip = buf.replace(
&format!("let {var_modifier}{var_name}"),
&format!("let {var_modifier}$0{var_name}"),
);
edit.replace_snippet(cap, expr_range, snip)
}
None => edit.replace(expr_range, buf),
}
return;
}
buf.push(';');
// We want to maintain the indent level,
// but we do not want to duplicate possible
// extra newlines in the indent block
let text = indent.text();
if text.starts_with('\n') {
buf.push('\n');
buf.push_str(text.trim_start_matches('\n'));
} else { } else {
buf.push_str(text); format!(" ")
} };
edit.replace(expr_range, var_name.clone()); ted::insert_all_raw(
let offset = anchor.syntax().text_range().start(); ted::Position::before(insert_place),
match ctx.config.snippet_cap { vec![
Some(cap) => { let_stmt.syntax().clone().into(),
let snip = buf.replace( make::tokens::whitespace(&trailing_ws).into(),
&format!("let {var_modifier}{var_name}"), ],
&format!("let {var_modifier}$0{var_name}"),
); );
edit.insert_snippet(cap, offset, snip)
ted::replace(expr_replace, name_expr.syntax());
if let Some(cap) = ctx.config.snippet_cap {
if let Some(ast::Pat::IdentPat(ident_pat)) = let_stmt.pat() {
if let Some(name) = ident_pat.name() {
edit.add_tabstop_before(cap, name);
}
}
}
}
Anchor::Replace(stmt) => {
cov_mark::hit!(test_extract_var_expr_stmt);
let stmt_replace = edit.make_mut(stmt);
ted::replace(stmt_replace.syntax(), let_stmt.syntax());
if let Some(cap) = ctx.config.snippet_cap {
if let Some(ast::Pat::IdentPat(ident_pat)) = let_stmt.pat() {
if let Some(name) = ident_pat.name() {
edit.add_tabstop_before(cap, name);
}
}
}
}
Anchor::WrapInBlock(to_wrap) => {
let indent_to = to_wrap.indent_level();
let block = if to_wrap.syntax() == &expr_replace {
// Since `expr_replace` is the same that needs to be wrapped in a block,
// we can just directly replace it with a block
let block =
make::block_expr([let_stmt.into()], Some(name_expr)).clone_for_update();
ted::replace(expr_replace, block.syntax());
block
} else {
// `expr_replace` is a descendant of `to_wrap`, so both steps need to be
// handled seperately, otherwise we wrap the wrong expression
let to_wrap = edit.make_mut(to_wrap);
// Replace the target expr first so that we don't need to find where
// `expr_replace` is in the wrapped `to_wrap`
ted::replace(expr_replace, name_expr.syntax());
// Wrap `to_wrap` in a block
let block = make::block_expr([let_stmt.into()], Some(to_wrap.clone()))
.clone_for_update();
ted::replace(to_wrap.syntax(), block.syntax());
block
};
if let Some(cap) = ctx.config.snippet_cap {
// Adding a tabstop to `name` requires finding the let stmt again, since
// the existing `let_stmt` is not actually added to the tree
let pat = block.statements().find_map(|stmt| {
let ast::Stmt::LetStmt(let_stmt) = stmt else { return None };
let_stmt.pat()
});
if let Some(ast::Pat::IdentPat(ident_pat)) = pat {
if let Some(name) = ident_pat.name() {
edit.add_tabstop_before(cap, name);
}
} }
None => edit.insert(offset, buf),
} }
if let Anchor::WrapInBlock(_) = anchor { // fixup indentation of block
edit.insert(anchor.syntax().text_range().end(), " }"); block.indent(indent_to);
}
} }
}, },
) )
@ -181,7 +219,7 @@ fn valid_target_expr(node: SyntaxNode) -> Option<ast::Expr> {
enum Anchor { enum Anchor {
Before(SyntaxNode), Before(SyntaxNode),
Replace(ast::ExprStmt), Replace(ast::ExprStmt),
WrapInBlock(SyntaxNode), WrapInBlock(ast::Expr),
} }
impl Anchor { impl Anchor {
@ -204,16 +242,16 @@ impl Anchor {
} }
if let Some(parent) = node.parent() { if let Some(parent) = node.parent() {
if parent.kind() == CLOSURE_EXPR { if let Some(parent) = ast::ClosureExpr::cast(parent.clone()) {
cov_mark::hit!(test_extract_var_in_closure_no_block); cov_mark::hit!(test_extract_var_in_closure_no_block);
return Some(Anchor::WrapInBlock(node)); return parent.body().map(Anchor::WrapInBlock);
} }
if parent.kind() == MATCH_ARM { if let Some(parent) = ast::MatchArm::cast(parent) {
if node.kind() == MATCH_GUARD { if node.kind() == MATCH_GUARD {
cov_mark::hit!(test_extract_var_in_match_guard); cov_mark::hit!(test_extract_var_in_match_guard);
} else { } else {
cov_mark::hit!(test_extract_var_in_match_arm_no_block); cov_mark::hit!(test_extract_var_in_match_arm_no_block);
return Some(Anchor::WrapInBlock(node)); return parent.expr().map(Anchor::WrapInBlock);
} }
} }
} }
@ -229,13 +267,6 @@ impl Anchor {
None None
}) })
} }
fn syntax(&self) -> &SyntaxNode {
match self {
Anchor::Before(it) | Anchor::WrapInBlock(it) => it,
Anchor::Replace(stmt) => stmt.syntax(),
}
}
} }
#[cfg(test)] #[cfg(test)]
@ -502,7 +533,10 @@ fn main() {
fn main() { fn main() {
let x = true; let x = true;
let tuple = match x { let tuple = match x {
true => { let $0var_name = 2 + 2; (var_name, true) } true => {
let $0var_name = 2 + 2;
(var_name, true)
}
_ => (0, false) _ => (0, false)
}; };
} }
@ -579,7 +613,10 @@ fn main() {
"#, "#,
r#" r#"
fn main() { fn main() {
let lambda = |x: u32| { let $0var_name = x * 2; var_name }; let lambda = |x: u32| {
let $0var_name = x * 2;
var_name
};
} }
"#, "#,
); );

File diff suppressed because it is too large Load diff

View file

@ -114,7 +114,7 @@ fn add_variant_to_accumulator(
parent: PathParent, parent: PathParent,
) -> Option<()> { ) -> Option<()> {
let db = ctx.db(); let db = ctx.db();
let InRealFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?; let InRealFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node_rooted(db)?;
acc.add( acc.add(
AssistId("generate_enum_variant", AssistKind::Generate), AssistId("generate_enum_variant", AssistKind::Generate),

View file

@ -8,20 +8,21 @@ use ide_db::{
famous_defs::FamousDefs, famous_defs::FamousDefs,
helpers::is_editable_crate, helpers::is_editable_crate,
path_transform::PathTransform, path_transform::PathTransform,
source_change::SourceChangeBuilder,
FxHashMap, FxHashSet, RootDatabase, SnippetCap, FxHashMap, FxHashSet, RootDatabase, SnippetCap,
}; };
use itertools::Itertools;
use stdx::to_lower_snake_case; use stdx::to_lower_snake_case;
use syntax::{ use syntax::{
ast::{ ast::{
self, self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, CallExpr, HasArgList,
edit::{AstNodeEdit, IndentLevel}, HasGenericParams, HasModuleItem, HasTypeBounds,
make, AstNode, CallExpr, HasArgList, HasGenericParams, HasModuleItem, HasTypeBounds,
}, },
SyntaxKind, SyntaxNode, TextRange, TextSize, ted, SyntaxKind, SyntaxNode, TextRange, T,
}; };
use crate::{ use crate::{
utils::{convert_reference_type, find_struct_impl, render_snippet, Cursor}, utils::{convert_reference_type, find_struct_impl},
AssistContext, AssistId, AssistKind, Assists, AssistContext, AssistId, AssistKind, Assists,
}; };
@ -65,7 +66,7 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
} }
let fn_name = &*name_ref.text(); let fn_name = &*name_ref.text();
let TargetInfo { target_module, adt_name, target, file, insert_offset } = let TargetInfo { target_module, adt_name, target, file } =
fn_target_info(ctx, path, &call, fn_name)?; fn_target_info(ctx, path, &call, fn_name)?;
if let Some(m) = target_module { if let Some(m) = target_module {
@ -77,16 +78,7 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let function_builder = FunctionBuilder::from_call(ctx, &call, fn_name, target_module, target)?; let function_builder = FunctionBuilder::from_call(ctx, &call, fn_name, target_module, target)?;
let text_range = call.syntax().text_range(); let text_range = call.syntax().text_range();
let label = format!("Generate {} function", function_builder.fn_name); let label = format!("Generate {} function", function_builder.fn_name);
add_func_to_accumulator( add_func_to_accumulator(acc, ctx, text_range, function_builder, file, adt_name, label)
acc,
ctx,
text_range,
function_builder,
insert_offset,
file,
adt_name,
label,
)
} }
struct TargetInfo { struct TargetInfo {
@ -94,7 +86,6 @@ struct TargetInfo {
adt_name: Option<hir::Name>, adt_name: Option<hir::Name>,
target: GeneratedFunctionTarget, target: GeneratedFunctionTarget,
file: FileId, file: FileId,
insert_offset: TextSize,
} }
impl TargetInfo { impl TargetInfo {
@ -103,9 +94,8 @@ impl TargetInfo {
adt_name: Option<hir::Name>, adt_name: Option<hir::Name>,
target: GeneratedFunctionTarget, target: GeneratedFunctionTarget,
file: FileId, file: FileId,
insert_offset: TextSize,
) -> Self { ) -> Self {
Self { target_module, adt_name, target, file, insert_offset } Self { target_module, adt_name, target, file }
} }
} }
@ -156,7 +146,7 @@ fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
} }
let (impl_, file) = get_adt_source(ctx, &adt, fn_name.text().as_str())?; let (impl_, file) = get_adt_source(ctx, &adt, fn_name.text().as_str())?;
let (target, insert_offset) = get_method_target(ctx, &impl_, &adt)?; let target = get_method_target(ctx, &impl_, &adt)?;
let function_builder = FunctionBuilder::from_method_call( let function_builder = FunctionBuilder::from_method_call(
ctx, ctx,
@ -169,16 +159,7 @@ fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let text_range = call.syntax().text_range(); let text_range = call.syntax().text_range();
let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None }; let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None };
let label = format!("Generate {} method", function_builder.fn_name); let label = format!("Generate {} method", function_builder.fn_name);
add_func_to_accumulator( add_func_to_accumulator(acc, ctx, text_range, function_builder, file, adt_name, label)
acc,
ctx,
text_range,
function_builder,
insert_offset,
file,
adt_name,
label,
)
} }
fn add_func_to_accumulator( fn add_func_to_accumulator(
@ -186,23 +167,28 @@ fn add_func_to_accumulator(
ctx: &AssistContext<'_>, ctx: &AssistContext<'_>,
text_range: TextRange, text_range: TextRange,
function_builder: FunctionBuilder, function_builder: FunctionBuilder,
insert_offset: TextSize,
file: FileId, file: FileId,
adt_name: Option<hir::Name>, adt_name: Option<hir::Name>,
label: String, label: String,
) -> Option<()> { ) -> Option<()> {
acc.add(AssistId("generate_function", AssistKind::Generate), label, text_range, |builder| { acc.add(AssistId("generate_function", AssistKind::Generate), label, text_range, |edit| {
let indent = IndentLevel::from_node(function_builder.target.syntax()); edit.edit_file(file);
let function_template = function_builder.render(adt_name.is_some());
let mut func = function_template.to_string(ctx.config.snippet_cap); let target = function_builder.target.clone();
let function_template = function_builder.render();
let func = function_template.to_ast(ctx.config.snippet_cap, edit);
if let Some(name) = adt_name { if let Some(name) = adt_name {
let name = make::ty_path(make::ext::ident_path(&format!("{}", name.display(ctx.db()))));
// FIXME: adt may have generic params. // FIXME: adt may have generic params.
func = format!("\n{indent}impl {} {{\n{func}\n{indent}}}", name.display(ctx.db())); let impl_ = make::impl_(None, None, name, None, None).clone_for_update();
}
builder.edit_file(file); func.indent(IndentLevel(1));
match ctx.config.snippet_cap { impl_.get_or_create_assoc_item_list().add_item(func.into());
Some(cap) => builder.insert_snippet(cap, insert_offset, func), target.insert_impl_at(edit, impl_);
None => builder.insert(insert_offset, func), } else {
target.insert_fn_at(edit, func);
} }
}) })
} }
@ -220,36 +206,33 @@ fn get_adt_source(
} }
struct FunctionTemplate { struct FunctionTemplate {
leading_ws: String,
fn_def: ast::Fn, fn_def: ast::Fn,
ret_type: Option<ast::RetType>, ret_type: Option<ast::RetType>,
should_focus_return_type: bool, should_focus_return_type: bool,
trailing_ws: String,
tail_expr: ast::Expr, tail_expr: ast::Expr,
} }
impl FunctionTemplate { impl FunctionTemplate {
fn to_string(&self, cap: Option<SnippetCap>) -> String { fn to_ast(&self, cap: Option<SnippetCap>, edit: &mut SourceChangeBuilder) -> ast::Fn {
let Self { leading_ws, fn_def, ret_type, should_focus_return_type, trailing_ws, tail_expr } = let Self { fn_def, ret_type, should_focus_return_type, tail_expr } = self;
self;
let f = match cap { if let Some(cap) = cap {
Some(cap) => { if *should_focus_return_type {
let cursor = if *should_focus_return_type {
// Focus the return type if there is one // Focus the return type if there is one
match ret_type { match ret_type {
Some(ret_type) => ret_type.syntax(), Some(ret_type) => {
None => tail_expr.syntax(), edit.add_placeholder_snippet(cap, ret_type.clone());
}
None => {
edit.add_placeholder_snippet(cap, tail_expr.clone());
}
} }
} else { } else {
tail_expr.syntax() edit.add_placeholder_snippet(cap, tail_expr.clone());
}; }
render_snippet(cap, fn_def.syntax(), Cursor::Replace(cursor))
} }
None => fn_def.to_string(),
};
format!("{leading_ws}{f}{trailing_ws}") fn_def.clone()
} }
} }
@ -356,7 +339,7 @@ impl FunctionBuilder {
}) })
} }
fn render(self, is_method: bool) -> FunctionTemplate { fn render(self) -> FunctionTemplate {
let placeholder_expr = make::ext::expr_todo(); let placeholder_expr = make::ext::expr_todo();
let fn_body = make::block_expr(vec![], Some(placeholder_expr)); let fn_body = make::block_expr(vec![], Some(placeholder_expr));
let visibility = match self.visibility { let visibility = match self.visibility {
@ -364,7 +347,7 @@ impl FunctionBuilder {
Visibility::Crate => Some(make::visibility_pub_crate()), Visibility::Crate => Some(make::visibility_pub_crate()),
Visibility::Pub => Some(make::visibility_pub()), Visibility::Pub => Some(make::visibility_pub()),
}; };
let mut fn_def = make::fn_( let fn_def = make::fn_(
visibility, visibility,
self.fn_name, self.fn_name,
self.generic_param_list, self.generic_param_list,
@ -375,34 +358,10 @@ impl FunctionBuilder {
self.is_async, self.is_async,
false, // FIXME : const and unsafe are not handled yet. false, // FIXME : const and unsafe are not handled yet.
false, false,
); )
let leading_ws; .clone_for_update();
let trailing_ws;
match self.target {
GeneratedFunctionTarget::BehindItem(it) => {
let mut indent = IndentLevel::from_node(&it);
if is_method {
indent = indent + 1;
leading_ws = format!("{indent}");
} else {
leading_ws = format!("\n\n{indent}");
}
fn_def = fn_def.indent(indent);
trailing_ws = String::new();
}
GeneratedFunctionTarget::InEmptyItemList(it) => {
let indent = IndentLevel::from_node(&it);
let leading_indent = indent + 1;
leading_ws = format!("\n{leading_indent}");
fn_def = fn_def.indent(leading_indent);
trailing_ws = format!("\n{indent}");
}
};
FunctionTemplate { FunctionTemplate {
leading_ws,
ret_type: fn_def.ret_type(), ret_type: fn_def.ret_type(),
// PANIC: we guarantee we always create a function body with a tail expr // PANIC: we guarantee we always create a function body with a tail expr
tail_expr: fn_def tail_expr: fn_def
@ -412,7 +371,6 @@ impl FunctionBuilder {
.expect("function body should have a tail expression"), .expect("function body should have a tail expression"),
should_focus_return_type: self.should_focus_return_type, should_focus_return_type: self.should_focus_return_type,
fn_def, fn_def,
trailing_ws,
} }
} }
} }
@ -456,40 +414,37 @@ fn get_fn_target_info(
target_module: Option<Module>, target_module: Option<Module>,
call: CallExpr, call: CallExpr,
) -> Option<TargetInfo> { ) -> Option<TargetInfo> {
let (target, file, insert_offset) = get_fn_target(ctx, target_module, call)?; let (target, file) = get_fn_target(ctx, target_module, call)?;
Some(TargetInfo::new(target_module, None, target, file, insert_offset)) Some(TargetInfo::new(target_module, None, target, file))
} }
fn get_fn_target( fn get_fn_target(
ctx: &AssistContext<'_>, ctx: &AssistContext<'_>,
target_module: Option<Module>, target_module: Option<Module>,
call: CallExpr, call: CallExpr,
) -> Option<(GeneratedFunctionTarget, FileId, TextSize)> { ) -> Option<(GeneratedFunctionTarget, FileId)> {
let mut file = ctx.file_id(); let mut file = ctx.file_id();
let target = match target_module { let target = match target_module {
Some(target_module) => { Some(target_module) => {
let module_source = target_module.definition_source(ctx.db()); let (in_file, target) = next_space_for_fn_in_module(ctx.db(), target_module);
let (in_file, target) = next_space_for_fn_in_module(ctx.sema.db, &module_source)?;
file = in_file; file = in_file;
target target
} }
None => next_space_for_fn_after_call_site(ast::CallableExpr::Call(call))?, None => next_space_for_fn_after_call_site(ast::CallableExpr::Call(call))?,
}; };
Some((target.clone(), file, get_insert_offset(&target))) Some((target.clone(), file))
} }
fn get_method_target( fn get_method_target(
ctx: &AssistContext<'_>, ctx: &AssistContext<'_>,
impl_: &Option<ast::Impl>, impl_: &Option<ast::Impl>,
adt: &Adt, adt: &Adt,
) -> Option<(GeneratedFunctionTarget, TextSize)> { ) -> Option<GeneratedFunctionTarget> {
let target = match impl_ { let target = match impl_ {
Some(impl_) => next_space_for_fn_in_impl(impl_)?, Some(impl_) => GeneratedFunctionTarget::InImpl(impl_.clone()),
None => { None => GeneratedFunctionTarget::AfterItem(adt.source(ctx.sema.db)?.syntax().value.clone()),
GeneratedFunctionTarget::BehindItem(adt.source(ctx.sema.db)?.syntax().value.clone())
}
}; };
Some((target.clone(), get_insert_offset(&target))) Some(target)
} }
fn assoc_fn_target_info( fn assoc_fn_target_info(
@ -505,36 +460,120 @@ fn assoc_fn_target_info(
return None; return None;
} }
let (impl_, file) = get_adt_source(ctx, &adt, fn_name)?; let (impl_, file) = get_adt_source(ctx, &adt, fn_name)?;
let (target, insert_offset) = get_method_target(ctx, &impl_, &adt)?; let target = get_method_target(ctx, &impl_, &adt)?;
let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None }; let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None };
Some(TargetInfo::new(target_module, adt_name, target, file, insert_offset)) Some(TargetInfo::new(target_module, adt_name, target, file))
}
fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize {
match target {
GeneratedFunctionTarget::BehindItem(it) => it.text_range().end(),
GeneratedFunctionTarget::InEmptyItemList(it) => it.text_range().start() + TextSize::of('{'),
}
} }
#[derive(Clone)] #[derive(Clone)]
enum GeneratedFunctionTarget { enum GeneratedFunctionTarget {
BehindItem(SyntaxNode), AfterItem(SyntaxNode),
InEmptyItemList(SyntaxNode), InEmptyItemList(SyntaxNode),
InImpl(ast::Impl),
} }
impl GeneratedFunctionTarget { impl GeneratedFunctionTarget {
fn syntax(&self) -> &SyntaxNode { fn syntax(&self) -> &SyntaxNode {
match self { match self {
GeneratedFunctionTarget::BehindItem(it) => it, GeneratedFunctionTarget::AfterItem(it) => it,
GeneratedFunctionTarget::InEmptyItemList(it) => it, GeneratedFunctionTarget::InEmptyItemList(it) => it,
GeneratedFunctionTarget::InImpl(it) => it.syntax(),
} }
} }
fn parent(&self) -> SyntaxNode { fn parent(&self) -> SyntaxNode {
match self { match self {
GeneratedFunctionTarget::BehindItem(it) => it.parent().expect("item without parent"), GeneratedFunctionTarget::AfterItem(it) => it.parent().expect("item without parent"),
GeneratedFunctionTarget::InEmptyItemList(it) => it.clone(), GeneratedFunctionTarget::InEmptyItemList(it) => it.clone(),
GeneratedFunctionTarget::InImpl(it) => it.syntax().clone(),
}
}
fn insert_impl_at(&self, edit: &mut SourceChangeBuilder, impl_: ast::Impl) {
match self {
GeneratedFunctionTarget::AfterItem(item) => {
let item = edit.make_syntax_mut(item.clone());
let position = if item.parent().is_some() {
ted::Position::after(&item)
} else {
ted::Position::first_child_of(&item)
};
let indent = IndentLevel::from_node(&item);
let leading_ws = make::tokens::whitespace(&format!("\n{indent}"));
impl_.indent(indent);
ted::insert_all(position, vec![leading_ws.into(), impl_.syntax().clone().into()]);
}
GeneratedFunctionTarget::InEmptyItemList(item_list) => {
let item_list = edit.make_syntax_mut(item_list.clone());
let insert_after =
item_list.children_with_tokens().find_or_first(|child| child.kind() == T!['{']);
let position = match insert_after {
Some(child) => ted::Position::after(child),
None => ted::Position::first_child_of(&item_list),
};
let indent = IndentLevel::from_node(&item_list);
let leading_indent = indent + 1;
let leading_ws = make::tokens::whitespace(&format!("\n{leading_indent}"));
impl_.indent(indent);
ted::insert_all(position, vec![leading_ws.into(), impl_.syntax().clone().into()]);
}
GeneratedFunctionTarget::InImpl(_) => {
unreachable!("can't insert an impl inside an impl")
}
}
}
fn insert_fn_at(&self, edit: &mut SourceChangeBuilder, func: ast::Fn) {
match self {
GeneratedFunctionTarget::AfterItem(item) => {
let item = edit.make_syntax_mut(item.clone());
let position = if item.parent().is_some() {
ted::Position::after(&item)
} else {
ted::Position::first_child_of(&item)
};
let indent = IndentLevel::from_node(&item);
let leading_ws = make::tokens::whitespace(&format!("\n\n{indent}"));
func.indent(indent);
ted::insert_all_raw(
position,
vec![leading_ws.into(), func.syntax().clone().into()],
);
}
GeneratedFunctionTarget::InEmptyItemList(item_list) => {
let item_list = edit.make_syntax_mut(item_list.clone());
let insert_after =
item_list.children_with_tokens().find_or_first(|child| child.kind() == T!['{']);
let position = match insert_after {
Some(child) => ted::Position::after(child),
None => ted::Position::first_child_of(&item_list),
};
let indent = IndentLevel::from_node(&item_list);
let leading_indent = indent + 1;
let leading_ws = make::tokens::whitespace(&format!("\n{leading_indent}"));
let trailing_ws = make::tokens::whitespace(&format!("\n{indent}"));
func.indent(leading_indent);
ted::insert_all(
position,
vec![leading_ws.into(), func.syntax().clone().into(), trailing_ws.into()],
);
}
GeneratedFunctionTarget::InImpl(impl_) => {
let impl_ = edit.make_mut(impl_.clone());
let leading_indent = impl_.indent_level() + 1;
func.indent(leading_indent);
impl_.get_or_create_assoc_item_list().add_item(func.into());
}
} }
} }
} }
@ -1026,43 +1065,40 @@ fn next_space_for_fn_after_call_site(expr: ast::CallableExpr) -> Option<Generate
} }
last_ancestor = Some(next_ancestor); last_ancestor = Some(next_ancestor);
} }
last_ancestor.map(GeneratedFunctionTarget::BehindItem) last_ancestor.map(GeneratedFunctionTarget::AfterItem)
} }
fn next_space_for_fn_in_module( fn next_space_for_fn_in_module(
db: &dyn hir::db::ExpandDatabase, db: &dyn hir::db::HirDatabase,
module_source: &hir::InFile<hir::ModuleSource>, target_module: hir::Module,
) -> Option<(FileId, GeneratedFunctionTarget)> { ) -> (FileId, GeneratedFunctionTarget) {
let file = module_source.file_id.original_file(db); let module_source = target_module.definition_source(db);
let file = module_source.file_id.original_file(db.upcast());
let assist_item = match &module_source.value { let assist_item = match &module_source.value {
hir::ModuleSource::SourceFile(it) => match it.items().last() { hir::ModuleSource::SourceFile(it) => match it.items().last() {
Some(last_item) => GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()), Some(last_item) => GeneratedFunctionTarget::AfterItem(last_item.syntax().clone()),
None => GeneratedFunctionTarget::BehindItem(it.syntax().clone()), None => GeneratedFunctionTarget::AfterItem(it.syntax().clone()),
}, },
hir::ModuleSource::Module(it) => match it.item_list().and_then(|it| it.items().last()) { hir::ModuleSource::Module(it) => match it.item_list().and_then(|it| it.items().last()) {
Some(last_item) => GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()), Some(last_item) => GeneratedFunctionTarget::AfterItem(last_item.syntax().clone()),
None => GeneratedFunctionTarget::InEmptyItemList(it.item_list()?.syntax().clone()), None => {
let item_list =
it.item_list().expect("module definition source should have an item list");
GeneratedFunctionTarget::InEmptyItemList(item_list.syntax().clone())
}
}, },
hir::ModuleSource::BlockExpr(it) => { hir::ModuleSource::BlockExpr(it) => {
if let Some(last_item) = if let Some(last_item) =
it.statements().take_while(|stmt| matches!(stmt, ast::Stmt::Item(_))).last() it.statements().take_while(|stmt| matches!(stmt, ast::Stmt::Item(_))).last()
{ {
GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()) GeneratedFunctionTarget::AfterItem(last_item.syntax().clone())
} else { } else {
GeneratedFunctionTarget::InEmptyItemList(it.syntax().clone()) GeneratedFunctionTarget::InEmptyItemList(it.syntax().clone())
} }
} }
}; };
Some((file, assist_item))
}
fn next_space_for_fn_in_impl(impl_: &ast::Impl) -> Option<GeneratedFunctionTarget> { (file, assist_item)
let assoc_item_list = impl_.assoc_item_list()?;
if let Some(last_item) = assoc_item_list.assoc_items().last() {
Some(GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()))
} else {
Some(GeneratedFunctionTarget::InEmptyItemList(assoc_item_list.syntax().clone()))
}
} }
#[derive(Clone, Copy)] #[derive(Clone, Copy)]

View file

@ -315,17 +315,6 @@ fn inline(
} else { } else {
fn_body.clone_for_update() fn_body.clone_for_update()
}; };
if let Some(imp) = body.syntax().ancestors().find_map(ast::Impl::cast) {
if !node.syntax().ancestors().any(|anc| &anc == imp.syntax()) {
if let Some(t) = imp.self_ty() {
body.syntax()
.descendants_with_tokens()
.filter_map(NodeOrToken::into_token)
.filter(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW)
.for_each(|tok| ted::replace(tok, t.syntax()));
}
}
}
let usages_for_locals = |local| { let usages_for_locals = |local| {
Definition::Local(local) Definition::Local(local)
.usages(sema) .usages(sema)
@ -381,6 +370,27 @@ fn inline(
} }
} }
// We should place the following code after last usage of `usages_for_locals`
// because `ted::replace` will change the offset in syntax tree, which makes
// `FileReference` incorrect
if let Some(imp) =
sema.ancestors_with_macros(fn_body.syntax().clone()).find_map(ast::Impl::cast)
{
if !node.syntax().ancestors().any(|anc| &anc == imp.syntax()) {
if let Some(t) = imp.self_ty() {
while let Some(self_tok) = body
.syntax()
.descendants_with_tokens()
.filter_map(NodeOrToken::into_token)
.find(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW)
{
let replace_with = t.clone_subtree().syntax().clone_for_update();
ted::replace(self_tok, replace_with);
}
}
}
}
let mut func_let_vars: BTreeSet<String> = BTreeSet::new(); let mut func_let_vars: BTreeSet<String> = BTreeSet::new();
// grab all of the local variable declarations in the function // grab all of the local variable declarations in the function
@ -1510,4 +1520,106 @@ fn main() {
"#, "#,
); );
} }
#[test]
fn inline_call_with_multiple_self_types_eq() {
check_assist(
inline_call,
r#"
#[derive(PartialEq, Eq)]
enum Enum {
A,
B,
}
impl Enum {
fn a_or_b_eq(&self) -> bool {
self == &Self::A || self == &Self::B
}
}
fn a() -> bool {
Enum::A.$0a_or_b_eq()
}
"#,
r#"
#[derive(PartialEq, Eq)]
enum Enum {
A,
B,
}
impl Enum {
fn a_or_b_eq(&self) -> bool {
self == &Self::A || self == &Self::B
}
}
fn a() -> bool {
{
let ref this = Enum::A;
this == &Enum::A || this == &Enum::B
}
}
"#,
)
}
#[test]
fn inline_call_with_self_type_in_macros() {
check_assist(
inline_call,
r#"
trait Trait<T1> {
fn f(a: T1) -> Self;
}
macro_rules! impl_from {
($t: ty) => {
impl Trait<$t> for $t {
fn f(a: $t) -> Self {
a as Self
}
}
};
}
struct A {}
impl_from!(A);
fn main() {
let a: A = A{};
let b = <A as Trait<A>>::$0f(a);
}
"#,
r#"
trait Trait<T1> {
fn f(a: T1) -> Self;
}
macro_rules! impl_from {
($t: ty) => {
impl Trait<$t> for $t {
fn f(a: $t) -> Self {
a as Self
}
}
};
}
struct A {}
impl_from!(A);
fn main() {
let a: A = A{};
let b = {
let a = a;
a as A
};
}
"#,
)
}
} }

View file

@ -18,7 +18,7 @@ use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
// ``` // ```
pub(crate) fn introduce_named_generic(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { pub(crate) fn introduce_named_generic(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let impl_trait_type = ctx.find_node_at_offset::<ast::ImplTraitType>()?; let impl_trait_type = ctx.find_node_at_offset::<ast::ImplTraitType>()?;
let param = impl_trait_type.syntax().parent().and_then(ast::Param::cast)?; let param = impl_trait_type.syntax().ancestors().find_map(|node| ast::Param::cast(node))?;
let fn_ = param.syntax().ancestors().find_map(ast::Fn::cast)?; let fn_ = param.syntax().ancestors().find_map(ast::Fn::cast)?;
let type_bound_list = impl_trait_type.type_bound_list()?; let type_bound_list = impl_trait_type.type_bound_list()?;
@ -31,15 +31,16 @@ pub(crate) fn introduce_named_generic(acc: &mut Assists, ctx: &AssistContext<'_>
|edit| { |edit| {
let impl_trait_type = edit.make_mut(impl_trait_type); let impl_trait_type = edit.make_mut(impl_trait_type);
let fn_ = edit.make_mut(fn_); let fn_ = edit.make_mut(fn_);
let fn_generic_param_list = fn_.get_or_create_generic_param_list();
let type_param_name = suggest_name::for_generic_parameter(&impl_trait_type); let type_param_name =
suggest_name::for_impl_trait_as_generic(&impl_trait_type, &fn_generic_param_list);
let type_param = make::type_param(make::name(&type_param_name), Some(type_bound_list)) let type_param = make::type_param(make::name(&type_param_name), Some(type_bound_list))
.clone_for_update(); .clone_for_update();
let new_ty = make::ty(&type_param_name).clone_for_update(); let new_ty = make::ty(&type_param_name).clone_for_update();
ted::replace(impl_trait_type.syntax(), new_ty.syntax()); ted::replace(impl_trait_type.syntax(), new_ty.syntax());
fn_.get_or_create_generic_param_list().add_generic_param(type_param.into()); fn_generic_param_list.add_generic_param(type_param.into());
if let Some(cap) = ctx.config.snippet_cap { if let Some(cap) = ctx.config.snippet_cap {
if let Some(generic_param) = if let Some(generic_param) =
@ -111,12 +112,19 @@ fn foo<$0B: Bar
#[test] #[test]
fn replace_impl_trait_with_exist_generic_letter() { fn replace_impl_trait_with_exist_generic_letter() {
// FIXME: This is wrong, we should pick a different name if the one we
// want is already bound.
check_assist( check_assist(
introduce_named_generic, introduce_named_generic,
r#"fn foo<B>(bar: $0impl Bar) {}"#, r#"fn foo<B>(bar: $0impl Bar) {}"#,
r#"fn foo<B, $0B: Bar>(bar: B) {}"#, r#"fn foo<B, $0B0: Bar>(bar: B0) {}"#,
);
}
#[test]
fn replace_impl_trait_with_more_exist_generic_letter() {
check_assist(
introduce_named_generic,
r#"fn foo<B, B0, B1, B3>(bar: $0impl Bar) {}"#,
r#"fn foo<B, B0, B1, B3, $0B2: Bar>(bar: B2) {}"#,
); );
} }
@ -149,4 +157,22 @@ fn foo<
r#"fn foo<$0F: Foo + Bar>(bar: F) {}"#, r#"fn foo<$0F: Foo + Bar>(bar: F) {}"#,
); );
} }
#[test]
fn replace_impl_with_mut() {
check_assist(
introduce_named_generic,
r#"fn f(iter: &mut $0impl Iterator<Item = i32>) {}"#,
r#"fn f<$0I: Iterator<Item = i32>>(iter: &mut I) {}"#,
);
}
#[test]
fn replace_impl_inside() {
check_assist(
introduce_named_generic,
r#"fn f(x: &mut Vec<$0impl Iterator<Item = i32>>) {}"#,
r#"fn f<$0I: Iterator<Item = i32>>(x: &mut Vec<I>) {}"#,
);
}
} }

View file

@ -11,7 +11,10 @@ use syntax::{
ted, AstNode, WalkEvent, ted, AstNode, WalkEvent,
}; };
use crate::assist_context::{AssistContext, Assists}; use crate::{
assist_context::{AssistContext, Assists},
utils,
};
// Assist: promote_local_to_const // Assist: promote_local_to_const
// //
@ -79,15 +82,13 @@ pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>)
let name_ref = make::name_ref(&name); let name_ref = make::name_ref(&name);
for usage in usages { for usage in usages {
let Some(usage) = usage.name.as_name_ref().cloned() else { continue }; let Some(usage_name) = usage.name.as_name_ref().cloned() else { continue };
if let Some(record_field) = ast::RecordExprField::for_name_ref(&usage) { if let Some(record_field) = ast::RecordExprField::for_name_ref(&usage_name) {
let record_field = edit.make_mut(record_field); let name_expr = make::expr_path(make::path_from_text(&name));
let name_expr = utils::replace_record_field_expr(ctx, edit, record_field, name_expr);
make::expr_path(make::path_from_text(&name)).clone_for_update();
record_field.replace_expr(name_expr);
} else { } else {
let usage = edit.make_mut(usage); let usage_range = usage.range;
ted::replace(usage.syntax(), name_ref.clone_for_update().syntax()); edit.replace(usage_range, name_ref.syntax().text());
} }
} }
} }
@ -212,6 +213,76 @@ fn main() {
) )
} }
#[test]
fn usage_in_macro() {
check_assist(
promote_local_to_const,
r"
macro_rules! identity {
($body:expr) => {
$body
}
}
fn baz() -> usize {
let $0foo = 2;
identity![foo]
}
",
r"
macro_rules! identity {
($body:expr) => {
$body
}
}
fn baz() -> usize {
const $0FOO: usize = 2;
identity![FOO]
}
",
)
}
#[test]
fn usage_shorthand_in_macro() {
check_assist(
promote_local_to_const,
r"
struct Foo {
foo: usize,
}
macro_rules! identity {
($body:expr) => {
$body
};
}
fn baz() -> Foo {
let $0foo = 2;
identity![Foo { foo }]
}
",
r"
struct Foo {
foo: usize,
}
macro_rules! identity {
($body:expr) => {
$body
};
}
fn baz() -> Foo {
const $0FOO: usize = 2;
identity![Foo { foo: FOO }]
}
",
)
}
#[test] #[test]
fn not_applicable_non_const_meth_call() { fn not_applicable_non_const_meth_call() {
cov_mark::check!(promote_local_non_const); cov_mark::check!(promote_local_non_const);

View file

@ -423,7 +423,7 @@ mod z {
struct X(); struct X();
struct Y(); struct Y();
mod z { mod z {
use super::{X}; use super::X;
fn w() { fn w() {
let x = X(); let x = X();
@ -495,7 +495,7 @@ struct X();
mod y { mod y {
struct Y(); struct Y();
mod z { mod z {
use crate::{X}; use crate::X;
fn f() { fn f() {
let x = X(); let x = X();
} }
@ -526,7 +526,7 @@ struct X();
mod y { mod y {
struct Y(); struct Y();
mod z { mod z {
use crate::{y::Y}; use crate::y::Y;
fn f() { fn f() {
let y = Y(); let y = Y();
} }
@ -536,6 +536,184 @@ mod y {
); );
} }
#[test]
fn remove_unused_auto_remove_brace_nested() {
check_assist(
remove_unused_imports,
r#"
mod a {
pub struct A();
}
mod b {
struct F();
mod c {
$0use {{super::{{
{d::{{{{{{{S, U}}}}}}}},
{{{{e::{H, L, {{{R}}}}}}}},
F, super::a::A
}}}};$0
fn f() {
let f = F();
let l = L();
let a = A();
let s = S();
let h = H();
}
}
mod d {
pub struct S();
pub struct U();
}
mod e {
pub struct H();
pub struct L();
pub struct R();
}
}
"#,
r#"
mod a {
pub struct A();
}
mod b {
struct F();
mod c {
use super::{
d::S,
e::{H, L},
F, super::a::A
};
fn f() {
let f = F();
let l = L();
let a = A();
let s = S();
let h = H();
}
}
mod d {
pub struct S();
pub struct U();
}
mod e {
pub struct H();
pub struct L();
pub struct R();
}
}
"#,
);
}
#[test]
fn remove_comma_after_auto_remove_brace() {
check_assist(
remove_unused_imports,
r#"
mod m {
pub mod x {
pub struct A;
pub struct B;
}
pub mod y {
pub struct C;
}
}
$0use m::{
x::{A, B},
y::C,
};$0
fn main() {
B;
}
"#,
r#"
mod m {
pub mod x {
pub struct A;
pub struct B;
}
pub mod y {
pub struct C;
}
}
use m::
x::B
;
fn main() {
B;
}
"#,
);
check_assist(
remove_unused_imports,
r#"
mod m {
pub mod x {
pub struct A;
pub struct B;
}
pub mod y {
pub struct C;
pub struct D;
}
pub mod z {
pub struct E;
pub struct F;
}
}
$0use m::{
x::{A, B},
y::{C, D,},
z::{E, F},
};$0
fn main() {
B;
C;
F;
}
"#,
r#"
mod m {
pub mod x {
pub struct A;
pub struct B;
}
pub mod y {
pub struct C;
pub struct D;
}
pub mod z {
pub struct E;
pub struct F;
}
}
use m::{
x::B,
y::C,
z::F,
};
fn main() {
B;
C;
F;
}
"#,
);
}
#[test] #[test]
fn remove_nested_all_unused() { fn remove_nested_all_unused() {
check_assist( check_assist(

View file

@ -1,4 +1,7 @@
use syntax::ast::{self, AstNode}; use syntax::{
ast::{self, make, AstNode},
ted,
};
use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists}; use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
@ -42,19 +45,34 @@ pub(crate) fn replace_is_method_with_if_let_method(
suggest_name::for_variable(&receiver, &ctx.sema) suggest_name::for_variable(&receiver, &ctx.sema)
}; };
let target = call_expr.syntax().text_range();
let (assist_id, message, text) = if name_ref.text() == "is_some" { let (assist_id, message, text) = if name_ref.text() == "is_some" {
("replace_is_some_with_if_let_some", "Replace `is_some` with `if let Some`", "Some") ("replace_is_some_with_if_let_some", "Replace `is_some` with `if let Some`", "Some")
} else { } else {
("replace_is_ok_with_if_let_ok", "Replace `is_ok` with `if let Ok`", "Ok") ("replace_is_ok_with_if_let_ok", "Replace `is_ok` with `if let Ok`", "Ok")
}; };
acc.add(AssistId(assist_id, AssistKind::RefactorRewrite), message, target, |edit| { acc.add(
let var_name = format!("${{0:{}}}", var_name); AssistId(assist_id, AssistKind::RefactorRewrite),
let replacement = format!("let {}({}) = {}", text, var_name, receiver); message,
edit.replace(target, replacement); call_expr.syntax().text_range(),
}) |edit| {
let call_expr = edit.make_mut(call_expr);
let var_pat = make::ident_pat(false, false, make::name(&var_name));
let pat = make::tuple_struct_pat(make::ext::ident_path(text), [var_pat.into()]);
let let_expr = make::expr_let(pat.into(), receiver).clone_for_update();
if let Some(cap) = ctx.config.snippet_cap {
if let Some(ast::Pat::TupleStructPat(pat)) = let_expr.pat() {
if let Some(first_var) = pat.fields().next() {
edit.add_placeholder_snippet(cap, first_var);
}
}
}
ted::replace(call_expr.syntax(), let_expr.syntax());
},
)
} }
_ => return None, _ => return None,
} }

View file

@ -5,13 +5,14 @@ mod sourcegen;
use expect_test::expect; use expect_test::expect;
use hir::Semantics; use hir::Semantics;
use ide_db::{ use ide_db::{
base_db::{fixture::WithFixture, FileId, FileRange, SourceDatabaseExt}, base_db::{FileId, FileRange, SourceDatabaseExt},
imports::insert_use::{ImportGranularity, InsertUseConfig}, imports::insert_use::{ImportGranularity, InsertUseConfig},
source_change::FileSystemEdit, source_change::FileSystemEdit,
RootDatabase, SnippetCap, RootDatabase, SnippetCap,
}; };
use stdx::{format_to, trim_indent}; use stdx::{format_to, trim_indent};
use syntax::TextRange; use syntax::TextRange;
use test_fixture::WithFixture;
use test_utils::{assert_eq_text, extract_offset}; use test_utils::{assert_eq_text, extract_offset};
use crate::{ use crate::{
@ -504,16 +505,33 @@ pub fn test_some_range(a: int) -> bool {
TextEdit { TextEdit {
indels: [ indels: [
Indel { Indel {
insert: "let $0var_name = 5;\n ", insert: "let",
delete: 45..45, delete: 45..47,
}, },
Indel { Indel {
insert: "var_name", insert: "var_name",
delete: 59..60, delete: 48..60,
},
Indel {
insert: "=",
delete: 61..81,
},
Indel {
insert: "5;\n if let 2..6 = var_name {\n true\n } else {\n false\n }",
delete: 82..108,
}, },
], ],
}, },
None, Some(
SnippetEdit(
[
(
0,
49..49,
),
],
),
),
), ),
}, },
file_system_edits: [], file_system_edits: [],
@ -566,16 +584,33 @@ pub fn test_some_range(a: int) -> bool {
TextEdit { TextEdit {
indels: [ indels: [
Indel { Indel {
insert: "let $0var_name = 5;\n ", insert: "let",
delete: 45..45, delete: 45..47,
}, },
Indel { Indel {
insert: "var_name", insert: "var_name",
delete: 59..60, delete: 48..60,
},
Indel {
insert: "=",
delete: 61..81,
},
Indel {
insert: "5;\n if let 2..6 = var_name {\n true\n } else {\n false\n }",
delete: 82..108,
}, },
], ],
}, },
None, Some(
SnippetEdit(
[
(
0,
49..49,
),
],
),
),
), ),
}, },
file_system_edits: [], file_system_edits: [],

View file

@ -813,3 +813,21 @@ fn test_required_hashes() {
assert_eq!(3, required_hashes("#ab\"##c")); assert_eq!(3, required_hashes("#ab\"##c"));
assert_eq!(5, required_hashes("#ab\"##\"####c")); assert_eq!(5, required_hashes("#ab\"##\"####c"));
} }
/// Replaces the record expression, handling field shorthands including inside macros.
pub(crate) fn replace_record_field_expr(
ctx: &AssistContext<'_>,
edit: &mut SourceChangeBuilder,
record_field: ast::RecordExprField,
initializer: ast::Expr,
) {
if let Some(ast::Expr::PathExpr(path_expr)) = record_field.expr() {
// replace field shorthand
let file_range = ctx.sema.original_range(path_expr.syntax());
edit.insert(file_range.range.end(), format!(": {}", initializer.syntax().text()))
} else if let Some(expr) = record_field.expr() {
// just replace expr
let file_range = ctx.sema.original_range(expr.syntax());
edit.replace(file_range.range, initializer.syntax().text());
}
}

View file

@ -1,5 +1,7 @@
//! This module contains functions to suggest names for expressions, functions and other items //! This module contains functions to suggest names for expressions, functions and other items
use std::collections::HashSet;
use hir::Semantics; use hir::Semantics;
use ide_db::RootDatabase; use ide_db::RootDatabase;
use itertools::Itertools; use itertools::Itertools;
@ -58,12 +60,59 @@ const USELESS_METHODS: &[&str] = &[
"into_future", "into_future",
]; ];
pub(crate) fn for_generic_parameter(ty: &ast::ImplTraitType) -> SmolStr { /// Suggest a unique name for generic parameter.
///
/// `existing_params` is used to check if the name conflicts with existing
/// generic parameters.
///
/// The function checks if the name conflicts with existing generic parameters.
/// If so, it will try to resolve the conflict by adding a number suffix, e.g.
/// `T`, `T0`, `T1`, ...
pub(crate) fn for_unique_generic_name(
name: &str,
existing_params: &ast::GenericParamList,
) -> SmolStr {
let param_names = existing_params
.generic_params()
.map(|param| match param {
ast::GenericParam::TypeParam(t) => t.name().unwrap().to_string(),
p => p.to_string(),
})
.collect::<HashSet<_>>();
let mut name = name.to_string();
let base_len = name.len();
let mut count = 0;
while param_names.contains(&name) {
name.truncate(base_len);
name.push_str(&count.to_string());
count += 1;
}
name.into()
}
/// Suggest name of impl trait type
///
/// `existing_params` is used to check if the name conflicts with existing
/// generic parameters.
///
/// # Current implementation
///
/// In current implementation, the function tries to get the name from the first
/// character of the name for the first type bound.
///
/// If the name conflicts with existing generic parameters, it will try to
/// resolve the conflict with `for_unique_generic_name`.
pub(crate) fn for_impl_trait_as_generic(
ty: &ast::ImplTraitType,
existing_params: &ast::GenericParamList,
) -> SmolStr {
let c = ty let c = ty
.type_bound_list() .type_bound_list()
.and_then(|bounds| bounds.syntax().text().char_at(0.into())) .and_then(|bounds| bounds.syntax().text().char_at(0.into()))
.unwrap_or('T'); .unwrap_or('T');
c.encode_utf8(&mut [0; 4]).into()
for_unique_generic_name(c.encode_utf8(&mut [0; 4]), existing_params)
} }
/// Suggest name of variable for given expression /// Suggest name of variable for given expression
@ -275,7 +324,8 @@ fn from_field_name(expr: &ast::Expr) -> Option<String> {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use ide_db::base_db::{fixture::WithFixture, FileRange}; use ide_db::base_db::FileRange;
use test_fixture::WithFixture;
use super::*; use super::*;

View file

@ -35,3 +35,7 @@ expect-test = "1.4.0"
# local deps # local deps
test-utils.workspace = true test-utils.workspace = true
test-fixture.workspace = true
[lints]
workspace = true

View file

@ -26,6 +26,7 @@ mod cfg;
mod derive; mod derive;
mod lint; mod lint;
mod repr; mod repr;
mod macro_use;
pub(crate) use self::derive::complete_derive_path; pub(crate) use self::derive::complete_derive_path;
@ -35,6 +36,7 @@ pub(crate) fn complete_known_attribute_input(
ctx: &CompletionContext<'_>, ctx: &CompletionContext<'_>,
&colon_prefix: &bool, &colon_prefix: &bool,
fake_attribute_under_caret: &ast::Attr, fake_attribute_under_caret: &ast::Attr,
extern_crate: Option<&ast::ExternCrate>,
) -> Option<()> { ) -> Option<()> {
let attribute = fake_attribute_under_caret; let attribute = fake_attribute_under_caret;
let name_ref = match attribute.path() { let name_ref = match attribute.path() {
@ -66,6 +68,9 @@ pub(crate) fn complete_known_attribute_input(
lint::complete_lint(acc, ctx, colon_prefix, &existing_lints, &lints); lint::complete_lint(acc, ctx, colon_prefix, &existing_lints, &lints);
} }
"cfg" => cfg::complete_cfg(acc, ctx), "cfg" => cfg::complete_cfg(acc, ctx),
"macro_use" => {
macro_use::complete_macro_use(acc, ctx, extern_crate, &parse_tt_as_comma_sep_paths(tt)?)
}
_ => (), _ => (),
} }
Some(()) Some(())

View file

@ -0,0 +1,35 @@
//! Completion for macros in `#[macro_use(...)]`
use hir::ModuleDef;
use ide_db::SymbolKind;
use syntax::ast;
use crate::{context::CompletionContext, item::CompletionItem, Completions};
pub(super) fn complete_macro_use(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
extern_crate: Option<&ast::ExternCrate>,
existing_imports: &[ast::Path],
) {
let Some(extern_crate) = extern_crate else { return };
let Some(extern_crate) = ctx.sema.to_def(extern_crate) else { return };
let Some(krate) = extern_crate.resolved_crate(ctx.db) else { return };
for mod_def in krate.root_module().declarations(ctx.db) {
if let ModuleDef::Macro(mac) = mod_def {
let mac_name = mac.name(ctx.db);
let Some(mac_name) = mac_name.as_str() else { continue };
let existing_import = existing_imports
.iter()
.filter_map(|p| p.as_single_name_ref())
.find(|n| n.text() == mac_name);
if existing_import.is_some() {
continue;
}
let item = CompletionItem::new(SymbolKind::Macro, ctx.source_range(), mac_name);
item.add_to(acc, ctx.db);
}
}
}

View file

@ -27,6 +27,8 @@ pub(crate) fn complete_dot(
} }
let is_field_access = matches!(dot_access.kind, DotAccessKind::Field { .. }); let is_field_access = matches!(dot_access.kind, DotAccessKind::Field { .. });
let is_method_acces_with_parens =
matches!(dot_access.kind, DotAccessKind::Method { has_parens: true });
complete_fields( complete_fields(
acc, acc,
@ -35,6 +37,7 @@ pub(crate) fn complete_dot(
|acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty), |acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty),
|acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty), |acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty),
is_field_access, is_field_access,
is_method_acces_with_parens,
); );
complete_methods(ctx, receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None)); complete_methods(ctx, receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None));
@ -83,6 +86,7 @@ pub(crate) fn complete_undotted_self(
}, },
|acc, field, ty| acc.add_tuple_field(ctx, Some(hir::known::SELF_PARAM), field, &ty), |acc, field, ty| acc.add_tuple_field(ctx, Some(hir::known::SELF_PARAM), field, &ty),
true, true,
false,
); );
complete_methods(ctx, &ty, |func| { complete_methods(ctx, &ty, |func| {
acc.add_method( acc.add_method(
@ -106,12 +110,14 @@ fn complete_fields(
mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type), mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type),
mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type), mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type),
is_field_access: bool, is_field_access: bool,
is_method_acess_with_parens: bool,
) { ) {
let mut seen_names = FxHashSet::default(); let mut seen_names = FxHashSet::default();
for receiver in receiver.autoderef(ctx.db) { for receiver in receiver.autoderef(ctx.db) {
for (field, ty) in receiver.fields(ctx.db) { for (field, ty) in receiver.fields(ctx.db) {
if seen_names.insert(field.name(ctx.db)) if seen_names.insert(field.name(ctx.db))
&& (is_field_access || ty.is_fn() || ty.is_closure()) && (is_field_access
|| (is_method_acess_with_parens && (ty.is_fn() || ty.is_closure())))
{ {
named_field(acc, field, ty); named_field(acc, field, ty);
} }
@ -120,7 +126,8 @@ fn complete_fields(
// Tuples are always the last type in a deref chain, so just check if the name is // Tuples are always the last type in a deref chain, so just check if the name is
// already seen without inserting into the hashset. // already seen without inserting into the hashset.
if !seen_names.contains(&hir::Name::new_tuple_field(i)) if !seen_names.contains(&hir::Name::new_tuple_field(i))
&& (is_field_access || ty.is_fn() || ty.is_closure()) && (is_field_access
|| (is_method_acess_with_parens && (ty.is_fn() || ty.is_closure())))
{ {
// Tuple fields are always public (tuple struct fields are handled above). // Tuple fields are always public (tuple struct fields are handled above).
tuple_index(acc, i, ty); tuple_index(acc, i, ty);
@ -1236,4 +1243,24 @@ fn foo() {
"#, "#,
) )
} }
#[test]
fn test_fn_field_dot_access_method_has_parens_false() {
check(
r#"
struct Foo { baz: fn() }
impl Foo {
fn bar<T>(self, t: T): T { t }
}
fn baz() {
let foo = Foo{ baz: || {} };
foo.ba$0::<>;
}
"#,
expect![[r#"
me bar() fn(self, T)
"#]],
);
}
} }

View file

@ -371,6 +371,7 @@ pub(super) enum CompletionAnalysis {
UnexpandedAttrTT { UnexpandedAttrTT {
colon_prefix: bool, colon_prefix: bool,
fake_attribute_under_caret: Option<ast::Attr>, fake_attribute_under_caret: Option<ast::Attr>,
extern_crate: Option<ast::ExternCrate>,
}, },
} }
@ -693,7 +694,7 @@ impl<'a> CompletionContext<'a> {
let krate = scope.krate(); let krate = scope.krate();
let module = scope.module(); let module = scope.module();
let toolchain = db.crate_graph()[krate.into()].channel; let toolchain = db.crate_graph()[krate.into()].channel();
// `toolchain == None` means we're in some detached files. Since we have no information on // `toolchain == None` means we're in some detached files. Since we have no information on
// the toolchain being used, let's just allow unstable items to be listed. // the toolchain being used, let's just allow unstable items to be listed.
let is_nightly = matches!(toolchain, Some(base_db::ReleaseChannel::Nightly) | None); let is_nightly = matches!(toolchain, Some(base_db::ReleaseChannel::Nightly) | None);

Some files were not shown because too many files have changed in this diff Show more