Auto merge of #118817 - lnicola:sync-from-ra, r=lnicola

Subtree update of `rust-analyzer`

r? `@ghost`
This commit is contained in:
bors 2023-12-12 08:22:37 +00:00
commit be035e80e8
263 changed files with 9788 additions and 6258 deletions

127
Cargo.lock generated
View file

@ -276,7 +276,7 @@ dependencies = [
"autocfg",
"cfg-if",
"crossbeam-utils",
"memoffset 0.9.0",
"memoffset",
"scopeguard",
]
@ -301,12 +301,12 @@ dependencies = [
[[package]]
name = "dashmap"
version = "5.4.0"
version = "5.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc"
checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856"
dependencies = [
"cfg-if",
"hashbrown 0.12.3",
"hashbrown",
"lock_api",
"once_cell",
"parking_lot_core",
@ -448,15 +448,9 @@ checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e"
[[package]]
name = "hashbrown"
version = "0.12.3"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
[[package]]
name = "hashbrown"
version = "0.14.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156"
checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
[[package]]
name = "heck"
@ -509,7 +503,7 @@ dependencies = [
"either",
"expect-test",
"fst",
"hashbrown 0.12.3",
"hashbrown",
"hir-expand",
"indexmap",
"intern",
@ -539,7 +533,7 @@ dependencies = [
"cov-mark",
"either",
"expect-test",
"hashbrown 0.12.3",
"hashbrown",
"intern",
"itertools",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -608,6 +602,7 @@ dependencies = [
name = "ide"
version = "0.0.0"
dependencies = [
"arrayvec",
"cfg",
"cov-mark",
"crossbeam-channel",
@ -764,7 +759,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f"
dependencies = [
"equivalent",
"hashbrown 0.14.2",
"hashbrown",
]
[[package]]
@ -792,7 +787,7 @@ name = "intern"
version = "0.0.0"
dependencies = [
"dashmap",
"hashbrown 0.12.3",
"hashbrown",
"rustc-hash",
"triomphe",
]
@ -938,23 +933,23 @@ checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
[[package]]
name = "lsp-server"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b52dccdf3302eefab8c8a1273047f0a3c3dca4b527c8458d00c09484c8371928"
dependencies = [
"crossbeam-channel",
"ctrlc",
"log",
"lsp-types",
"serde",
"serde_json",
]
[[package]]
name = "lsp-server"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b52dccdf3302eefab8c8a1273047f0a3c3dca4b527c8458d00c09484c8371928"
version = "0.7.5"
dependencies = [
"crossbeam-channel",
"ctrlc",
"log",
"lsp-types",
"serde",
"serde_json",
]
@ -1002,15 +997,6 @@ dependencies = [
"libc",
]
[[package]]
name = "memoffset"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1"
dependencies = [
"autocfg",
]
[[package]]
name = "memoffset"
version = "0.9.0"
@ -1061,11 +1047,11 @@ dependencies = [
[[package]]
name = "miow"
version = "0.5.0"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52ffbca2f655e33c08be35d87278e5b18b89550a37dbd598c20db92f6a471123"
checksum = "359f76430b20a79f9e20e115b3428614e654f04fab314482fc0fda0ebd3c6044"
dependencies = [
"windows-sys 0.42.0",
"windows-sys 0.48.0",
]
[[package]]
@ -1177,15 +1163,15 @@ dependencies = [
[[package]]
name = "parking_lot_core"
version = "0.9.6"
version = "0.9.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba1ef8814b5c993410bb3adfad7a5ed269563e4a2f90c41f5d85be7fb47133bf"
checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e"
dependencies = [
"cfg-if",
"libc",
"redox_syscall 0.2.16",
"redox_syscall 0.4.1",
"smallvec",
"windows-sys 0.42.0",
"windows-targets",
]
[[package]]
@ -1255,6 +1241,9 @@ checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
name = "proc-macro-api"
version = "0.0.0"
dependencies = [
"base-db",
"indexmap",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"memmap2",
"object 0.32.0",
"paths",
@ -1263,6 +1252,7 @@ dependencies = [
"serde_json",
"snap",
"stdx",
"text-size",
"tracing",
"triomphe",
"tt",
@ -1402,9 +1392,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_abi"
version = "0.20.0"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5f38444d48da534b3bb612713fce9b0aeeffb2e0dfa242764f55482acc5b52d"
checksum = "7816f980fab89e878ff2e916e2077d484e3aa1c619a3cc982c8a417c3dfe45fa"
dependencies = [
"bitflags 1.3.2",
"ra-ap-rustc_index",
@ -1413,9 +1403,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_index"
version = "0.20.0"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69fb5da07e1a39222d9c311203123c3b6a86420fa06dc695aa1661b0aecf8d16"
checksum = "8352918d61aa4afab9f2ed7314cf638976b20949b3d61d2f468c975b0d251f24"
dependencies = [
"arrayvec",
"ra-ap-rustc_index_macros",
@ -1424,9 +1414,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_index_macros"
version = "0.20.0"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d69f9f6af58124f2da0cb8b0c3d8494e0d883a5fe0c6732258bde81ac5a87cc"
checksum = "66a9424018828155a3e3596515598f90e68427d8f35eff6df7f0856c73fc58a8"
dependencies = [
"proc-macro2",
"quote",
@ -1436,9 +1426,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_lexer"
version = "0.20.0"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d5e8650195795c4023d8321846466994a975bc457cb8a91c0b3b17a5fc8ba40"
checksum = "dc741c7a78103efab416b562e35bd73c8d4967478575010c86c6062f8d3cbf29"
dependencies = [
"unicode-properties",
"unicode-xid",
@ -1446,9 +1436,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_parse_format"
version = "0.20.0"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a6b325ee1ec90e4dbd4394913adf4ef32e4fcf2b311ec9563a0fa50cd549af6"
checksum = "d557201d71792487bd2bab637ab5be9aa6fff59b88e25e12de180b0f9d2df60f"
dependencies = [
"ra-ap-rustc_index",
"ra-ap-rustc_lexer",
@ -1474,15 +1464,6 @@ dependencies = [
"crossbeam-utils",
]
[[package]]
name = "redox_syscall"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
dependencies = [
"bitflags 1.3.2",
]
[[package]]
name = "redox_syscall"
version = "0.3.5"
@ -1493,14 +1474,23 @@ dependencies = [
]
[[package]]
name = "rowan"
version = "0.15.11"
name = "redox_syscall"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64449cfef9483a475ed56ae30e2da5ee96448789fb2aa240a04beb6a055078bf"
checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
dependencies = [
"bitflags 1.3.2",
]
[[package]]
name = "rowan"
version = "0.15.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a58fa8a7ccff2aec4f39cc45bf5f985cec7125ab271cf681c279fd00192b49"
dependencies = [
"countme",
"hashbrown 0.12.3",
"memoffset 0.8.0",
"hashbrown",
"memoffset",
"rustc-hash",
"text-size",
]
@ -1524,16 +1514,14 @@ dependencies = [
"ide-ssr",
"itertools",
"load-cargo",
"lsp-server 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)",
"lsp-server 0.7.4",
"lsp-types",
"mbe",
"mimalloc",
"mio",
"nohash-hasher",
"num_cpus",
"oorandom",
"parking_lot",
"parking_lot_core",
"parser",
"proc-macro-api",
"profile",
@ -1564,11 +1552,10 @@ dependencies = [
[[package]]
name = "rust-analyzer-salsa"
version = "0.17.0-pre.3"
version = "0.17.0-pre.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ca92b657d614d076800aa7bf5d5ba33564e71fa7f16cd79eacdfe301a50ab1c"
checksum = "16c42b8737c320578b441a82daf7cdf8d897468de64e8a774fa54b53a50b6cc0"
dependencies = [
"crossbeam-utils",
"indexmap",
"lock_api",
"log",
@ -1581,9 +1568,9 @@ dependencies = [
[[package]]
name = "rust-analyzer-salsa-macros"
version = "0.17.0-pre.3"
version = "0.17.0-pre.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b190359266d293f2ee13eaa502a766dc8b77b63fbaa5d460d24fd0210675ceef"
checksum = "db72b0883f3592ade2be15a10583c75e0b269ec26e1190800fda2e2ce5ae6634"
dependencies = [
"heck",
"proc-macro2",
@ -1751,6 +1738,7 @@ dependencies = [
"always-assert",
"backtrace",
"crossbeam-channel",
"itertools",
"jod-thread",
"libc",
"miow",
@ -2010,6 +1998,7 @@ version = "0.0.0"
dependencies = [
"smol_str",
"stdx",
"text-size",
]
[[package]]

View file

@ -42,7 +42,7 @@ debug = 0
# ungrammar = { path = "../ungrammar" }
# salsa = { path = "../salsa" }
# rust-analyzer-salsa = { path = "../salsa" }
[workspace.dependencies]
# local crates
@ -95,14 +95,25 @@ bitflags = "2.4.1"
cargo_metadata = "0.18.1"
dissimilar = "1.0.7"
either = "1.9.0"
hashbrown = { version = "0.14", features = [
"inline-more",
], default-features = false }
indexmap = "2.1.0"
itertools = "0.12.0"
libc = "0.2.150"
nohash-hasher = "0.2.0"
rayon = "1.8.0"
rust-analyzer-salsa = "0.17.0-pre.4"
rustc-hash = "1.1.0"
serde = { version = "1.0.192", features = ["derive"] }
serde_json = "1.0.108"
smallvec = { version = "1.10.0", features = [
"const_new",
"union",
"const_generics",
] }
smol_str = "0.2.0"
text-size = "1.1.1"
tracing = "0.1.40"
tracing-tree = "0.3.0"
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
@ -110,15 +121,8 @@ tracing-subscriber = { version = "0.3.18", default-features = false, features =
"fmt",
"tracing-log",
] }
smol_str = "0.2.0"
nohash-hasher = "0.2.0"
text-size = "1.1.1"
rayon = "1.8.0"
serde = { version = "1.0.192", features = ["derive"] }
serde_json = "1.0.108"
triomphe = { version = "0.1.10", default-features = false, features = ["std"] }
# can't upgrade due to dashmap depending on 0.12.3 currently
hashbrown = { version = "0.12.3", features = [
"inline-more",
], default-features = false }
xshell = "0.2.5"
# We need to freeze the version of the crate, as the raw-api feature is considered unstable
dashmap = { version = "=5.5.3", features = ["raw-api"] }

View file

@ -12,12 +12,10 @@ rust-version.workspace = true
doctest = false
[dependencies]
rust-analyzer-salsa = "0.17.0-pre.3"
rustc-hash = "1.1.0"
triomphe.workspace = true
la-arena.workspace = true
rust-analyzer-salsa.workspace = true
rustc-hash.workspace = true
triomphe.workspace = true
# local deps
cfg.workspace = true

View file

@ -8,11 +8,12 @@ use test_utils::{
ESCAPED_CURSOR_MARKER,
};
use triomphe::Arc;
use tt::token_id::{Leaf, Subtree, TokenTree};
use tt::{Leaf, Subtree, TokenTree};
use vfs::{file_set::FileSet, VfsPath};
use crate::{
input::{CrateName, CrateOrigin, LangCrateOrigin},
span::SpanData,
Change, CrateDisplayName, CrateGraph, CrateId, Dependency, DependencyKind, Edition, Env,
FileId, FilePosition, FileRange, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
ProcMacros, ReleaseChannel, SourceDatabaseExt, SourceRoot, SourceRootId,
@ -134,7 +135,7 @@ impl ChangeFixture {
let mut file_set = FileSet::default();
let mut current_source_root_kind = SourceRootKind::Local;
let mut file_id = FileId(0);
let mut file_id = FileId::from_raw(0);
let mut roots = Vec::new();
let mut file_position = None;
@ -209,7 +210,7 @@ impl ChangeFixture {
let path = VfsPath::new_virtual_path(meta.path);
file_set.insert(file_id, path);
files.push(file_id);
file_id.0 += 1;
file_id = FileId::from_raw(file_id.index() + 1);
}
if crates.is_empty() {
@ -254,7 +255,7 @@ impl ChangeFixture {
if let Some(mini_core) = mini_core {
let core_file = file_id;
file_id.0 += 1;
file_id = FileId::from_raw(file_id.index() + 1);
let mut fs = FileSet::default();
fs.insert(core_file, VfsPath::new_virtual_path("/sysroot/core/lib.rs".to_string()));
@ -295,7 +296,6 @@ impl ChangeFixture {
let mut proc_macros = ProcMacros::default();
if !proc_macro_names.is_empty() {
let proc_lib_file = file_id;
file_id.0 += 1;
proc_macro_defs.extend(default_test_proc_macros());
let (proc_macro, source) = filter_test_proc_macros(&proc_macro_names, proc_macro_defs);
@ -539,10 +539,13 @@ struct IdentityProcMacroExpander;
impl ProcMacroExpander for IdentityProcMacroExpander {
fn expand(
&self,
subtree: &Subtree,
_: Option<&Subtree>,
subtree: &Subtree<SpanData>,
_: Option<&Subtree<SpanData>>,
_: &Env,
) -> Result<Subtree, ProcMacroExpansionError> {
_: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
Ok(subtree.clone())
}
}
@ -553,10 +556,13 @@ struct AttributeInputReplaceProcMacroExpander;
impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander {
fn expand(
&self,
_: &Subtree,
attrs: Option<&Subtree>,
_: &Subtree<SpanData>,
attrs: Option<&Subtree<SpanData>>,
_: &Env,
) -> Result<Subtree, ProcMacroExpansionError> {
_: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
attrs
.cloned()
.ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into()))
@ -568,11 +574,14 @@ struct MirrorProcMacroExpander;
impl ProcMacroExpander for MirrorProcMacroExpander {
fn expand(
&self,
input: &Subtree,
_: Option<&Subtree>,
input: &Subtree<SpanData>,
_: Option<&Subtree<SpanData>>,
_: &Env,
) -> Result<Subtree, ProcMacroExpansionError> {
fn traverse(input: &Subtree) -> Subtree {
_: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
fn traverse(input: &Subtree<SpanData>) -> Subtree<SpanData> {
let mut token_trees = vec![];
for tt in input.token_trees.iter().rev() {
let tt = match tt {
@ -595,13 +604,16 @@ struct ShortenProcMacroExpander;
impl ProcMacroExpander for ShortenProcMacroExpander {
fn expand(
&self,
input: &Subtree,
_: Option<&Subtree>,
input: &Subtree<SpanData>,
_: Option<&Subtree<SpanData>>,
_: &Env,
) -> Result<Subtree, ProcMacroExpansionError> {
_: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
return Ok(traverse(input));
fn traverse(input: &Subtree) -> Subtree {
fn traverse(input: &Subtree<SpanData>) -> Subtree<SpanData> {
let token_trees = input
.token_trees
.iter()
@ -613,7 +625,7 @@ impl ProcMacroExpander for ShortenProcMacroExpander {
Subtree { delimiter: input.delimiter, token_trees }
}
fn modify_leaf(leaf: &Leaf) -> Leaf {
fn modify_leaf(leaf: &Leaf<SpanData>) -> Leaf<SpanData> {
let mut leaf = leaf.clone();
match &mut leaf {
Leaf::Literal(it) => {

View file

@ -13,9 +13,10 @@ use la_arena::{Arena, Idx};
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::SmolStr;
use triomphe::Arc;
use tt::token_id::Subtree;
use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};
use crate::span::SpanData;
// Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`,
// then the crate for the proc-macro hasn't been build yet as the build data is missing.
pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>;
@ -242,6 +243,9 @@ impl CrateDisplayName {
}
}
// FIXME: These should not be defined in here? Why does base db know about proc-macros
// ProcMacroKind is used in [`fixture`], but that module probably shouldn't be in this crate either.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct ProcMacroId(pub u32);
@ -255,10 +259,13 @@ pub enum ProcMacroKind {
pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
fn expand(
&self,
subtree: &Subtree,
attrs: Option<&Subtree>,
subtree: &tt::Subtree<SpanData>,
attrs: Option<&tt::Subtree<SpanData>>,
env: &Env,
) -> Result<Subtree, ProcMacroExpansionError>;
def_site: SpanData,
call_site: SpanData,
mixed_site: SpanData,
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError>;
}
#[derive(Debug)]
@ -323,7 +330,9 @@ pub struct CrateData {
pub dependencies: Vec<Dependency>,
pub origin: CrateOrigin,
pub is_proc_macro: bool,
// FIXME: These things should not be per crate! These are more per workspace crate graph level things
// FIXME: These things should not be per crate! These are more per workspace crate graph level
// things. This info does need to be somewhat present though as to prevent deduplication from
// happening across different workspaces with different layouts.
pub target_layout: TargetLayoutLoadResult,
pub channel: Option<ReleaseChannel>,
}
@ -871,7 +880,7 @@ mod tests {
fn detect_cyclic_dependency_indirect() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
FileId(1u32),
FileId::from_raw(1u32),
Edition2018,
None,
None,
@ -884,7 +893,7 @@ mod tests {
None,
);
let crate2 = graph.add_crate_root(
FileId(2u32),
FileId::from_raw(2u32),
Edition2018,
None,
None,
@ -897,7 +906,7 @@ mod tests {
None,
);
let crate3 = graph.add_crate_root(
FileId(3u32),
FileId::from_raw(3u32),
Edition2018,
None,
None,
@ -933,7 +942,7 @@ mod tests {
fn detect_cyclic_dependency_direct() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
FileId(1u32),
FileId::from_raw(1u32),
Edition2018,
None,
None,
@ -946,7 +955,7 @@ mod tests {
None,
);
let crate2 = graph.add_crate_root(
FileId(2u32),
FileId::from_raw(2u32),
Edition2018,
None,
None,
@ -976,7 +985,7 @@ mod tests {
fn it_works() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
FileId(1u32),
FileId::from_raw(1u32),
Edition2018,
None,
None,
@ -989,7 +998,7 @@ mod tests {
None,
);
let crate2 = graph.add_crate_root(
FileId(2u32),
FileId::from_raw(2u32),
Edition2018,
None,
None,
@ -1002,7 +1011,7 @@ mod tests {
None,
);
let crate3 = graph.add_crate_root(
FileId(3u32),
FileId::from_raw(3u32),
Edition2018,
None,
None,
@ -1032,7 +1041,7 @@ mod tests {
fn dashes_are_normalized() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
FileId(1u32),
FileId::from_raw(1u32),
Edition2018,
None,
None,
@ -1045,7 +1054,7 @@ mod tests {
None,
);
let crate2 = graph.add_crate_root(
FileId(2u32),
FileId::from_raw(2u32),
Edition2018,
None,
None,

View file

@ -1,10 +1,11 @@
//! base_db defines basic database traits. The concrete DB is defined by ide.
#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
#![warn(rust_2018_idioms, unused_lifetimes)]
mod input;
mod change;
pub mod fixture;
pub mod span;
use std::panic;
@ -12,14 +13,13 @@ use rustc_hash::FxHashSet;
use syntax::{ast, Parse, SourceFile, TextRange, TextSize};
use triomphe::Arc;
pub use crate::input::DependencyKind;
pub use crate::{
change::Change,
input::{
CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency,
Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, ProcMacros,
ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
DependencyKind, Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander,
ProcMacroExpansionError, ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths,
ProcMacros, ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
},
};
pub use salsa::{self, Cancelled};
@ -68,8 +68,7 @@ pub trait FileLoader {
/// model. Everything else in rust-analyzer is derived from these queries.
#[salsa::query_group(SourceDatabaseStorage)]
pub trait SourceDatabase: FileLoader + std::fmt::Debug {
// Parses the file into the syntax tree.
#[salsa::invoke(parse_query)]
/// Parses the file into the syntax tree.
fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>;
/// The crate graph.
@ -81,7 +80,7 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug {
fn proc_macros(&self) -> Arc<ProcMacros>;
}
fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
let _p = profile::span("parse_query").detail(|| format!("{file_id:?}"));
let text = db.file_text(file_id);
SourceFile::parse(&text)

203
crates/base-db/src/span.rs Normal file
View file

@ -0,0 +1,203 @@
//! File and span related types.
// FIXME: This should probably be moved into its own crate.
use std::fmt;
use salsa::InternId;
use tt::SyntaxContext;
use vfs::FileId;
pub type ErasedFileAstId = la_arena::Idx<syntax::SyntaxNodePtr>;
// The first inde is always the root node's AstId
pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId =
la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(0));
pub type SpanData = tt::SpanData<SpanAnchor, SyntaxContextId>;
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct SyntaxContextId(InternId);
impl fmt::Debug for SyntaxContextId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if *self == Self::SELF_REF {
f.debug_tuple("SyntaxContextId")
.field(&{
#[derive(Debug)]
#[allow(non_camel_case_types)]
struct SELF_REF;
SELF_REF
})
.finish()
} else {
f.debug_tuple("SyntaxContextId").field(&self.0).finish()
}
}
}
crate::impl_intern_key!(SyntaxContextId);
impl fmt::Display for SyntaxContextId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0.as_u32())
}
}
impl SyntaxContext for SyntaxContextId {
const DUMMY: Self = Self::ROOT;
}
// inherent trait impls please tyvm
impl SyntaxContextId {
pub const ROOT: Self = SyntaxContextId(unsafe { InternId::new_unchecked(0) });
// veykril(HACK): FIXME salsa doesn't allow us fetching the id of the current input to be allocated so
// we need a special value that behaves as the current context.
pub const SELF_REF: Self =
SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) });
pub fn is_root(self) -> bool {
self == Self::ROOT
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct SpanAnchor {
pub file_id: FileId,
pub ast_id: ErasedFileAstId,
}
impl fmt::Debug for SpanAnchor {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("SpanAnchor").field(&self.file_id).field(&self.ast_id.into_raw()).finish()
}
}
impl tt::SpanAnchor for SpanAnchor {
const DUMMY: Self = SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID };
}
/// Input to the analyzer is a set of files, where each file is identified by
/// `FileId` and contains source code. However, another source of source code in
/// Rust are macros: each macro can be thought of as producing a "temporary
/// file". To assign an id to such a file, we use the id of the macro call that
/// produced the file. So, a `HirFileId` is either a `FileId` (source code
/// written by user), or a `MacroCallId` (source code produced by macro).
///
/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file
/// containing the call plus the offset of the macro call in the file. Note that
/// this is a recursive definition! However, the size_of of `HirFileId` is
/// finite (because everything bottoms out at the real `FileId`) and small
/// (`MacroCallId` uses the location interning. You can check details here:
/// <https://en.wikipedia.org/wiki/String_interning>).
///
/// The two variants are encoded in a single u32 which are differentiated by the MSB.
/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a
/// `MacroCallId`.
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct HirFileId(u32);
impl From<HirFileId> for u32 {
fn from(value: HirFileId) -> Self {
value.0
}
}
impl From<MacroCallId> for HirFileId {
fn from(value: MacroCallId) -> Self {
value.as_file()
}
}
impl fmt::Debug for HirFileId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.repr().fmt(f)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroFileId {
pub macro_call_id: MacroCallId,
}
/// `MacroCallId` identifies a particular macro invocation, like
/// `println!("Hello, {}", world)`.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct MacroCallId(salsa::InternId);
crate::impl_intern_key!(MacroCallId);
impl MacroCallId {
pub fn as_file(self) -> HirFileId {
MacroFileId { macro_call_id: self }.into()
}
pub fn as_macro_file(self) -> MacroFileId {
MacroFileId { macro_call_id: self }
}
}
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub enum HirFileIdRepr {
FileId(FileId),
MacroFile(MacroFileId),
}
impl fmt::Debug for HirFileIdRepr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::FileId(arg0) => f.debug_tuple("FileId").field(&arg0.index()).finish(),
Self::MacroFile(arg0) => {
f.debug_tuple("MacroFile").field(&arg0.macro_call_id.0).finish()
}
}
}
}
impl From<FileId> for HirFileId {
fn from(id: FileId) -> Self {
assert!(id.index() < Self::MAX_FILE_ID);
HirFileId(id.index())
}
}
impl From<MacroFileId> for HirFileId {
fn from(MacroFileId { macro_call_id: MacroCallId(id) }: MacroFileId) -> Self {
let id = id.as_u32();
assert!(id < Self::MAX_FILE_ID);
HirFileId(id | Self::MACRO_FILE_TAG_MASK)
}
}
impl HirFileId {
const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK;
const MACRO_FILE_TAG_MASK: u32 = 1 << 31;
#[inline]
pub fn is_macro(self) -> bool {
self.0 & Self::MACRO_FILE_TAG_MASK != 0
}
#[inline]
pub fn macro_file(self) -> Option<MacroFileId> {
match self.0 & Self::MACRO_FILE_TAG_MASK {
0 => None,
_ => Some(MacroFileId {
macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
}),
}
}
#[inline]
pub fn file_id(self) -> Option<FileId> {
match self.0 & Self::MACRO_FILE_TAG_MASK {
0 => Some(FileId::from_raw(self.0)),
_ => None,
}
}
#[inline]
pub fn repr(self) -> HirFileIdRepr {
match self.0 & Self::MACRO_FILE_TAG_MASK {
0 => HirFileIdRepr::FileId(FileId::from_raw(self.0)),
_ => HirFileIdRepr::MacroFile(MacroFileId {
macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
}),
}
}
}

View file

@ -1,6 +1,6 @@
//! cfg defines conditional compiling options, `cfg` attribute parser and evaluator
#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
#![warn(rust_2018_idioms, unused_lifetimes)]
mod cfg_expr;
mod dnf;

View file

@ -1,37 +1,31 @@
use arbitrary::{Arbitrary, Unstructured};
use expect_test::{expect, Expect};
use mbe::syntax_node_to_token_tree;
use mbe::{syntax_node_to_token_tree, DummyTestSpanMap};
use syntax::{ast, AstNode};
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
fn assert_parse_result(input: &str, expected: CfgExpr) {
let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
syntax_node_to_token_tree(tt.syntax())
};
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
assert_eq!(cfg, expected);
}
fn check_dnf(input: &str, expect: Expect) {
let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
syntax_node_to_token_tree(tt.syntax())
};
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
expect.assert_eq(&actual);
}
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
syntax_node_to_token_tree(tt.syntax())
};
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg);
let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
@ -40,11 +34,9 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
#[track_caller]
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
syntax_node_to_token_tree(tt.syntax())
};
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg);
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();

View file

@ -2,7 +2,7 @@
//! another compatible command (f.x. clippy) in a background thread and provide
//! LSP diagnostics based on the output of the command.
#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
#![warn(rust_2018_idioms, unused_lifetimes)]
use std::{
ffi::OsString,

View file

@ -15,8 +15,7 @@ doctest = false
arrayvec = "0.7.2"
bitflags.workspace = true
cov-mark = "2.0.0-pre.1"
# We need to freeze the version of the crate, as the raw-api feature is considered unstable
dashmap = { version = "=5.4.0", features = ["raw-api"] }
dashmap.workspace = true
drop_bomb = "0.1.5"
either.workspace = true
fst = { version = "0.4.7", default-features = false }

View file

@ -421,6 +421,7 @@ impl AttrsWithOwner {
RawAttrs::from_attrs_owner(
db.upcast(),
src.with_value(&src.value[it.local_id()]),
db.span_map(src.file_id).as_ref(),
)
}
GenericParamId::TypeParamId(it) => {
@ -428,11 +429,16 @@ impl AttrsWithOwner {
RawAttrs::from_attrs_owner(
db.upcast(),
src.with_value(&src.value[it.local_id()]),
db.span_map(src.file_id).as_ref(),
)
}
GenericParamId::LifetimeParamId(it) => {
let src = it.parent.child_source(db);
RawAttrs::from_attrs_owner(db.upcast(), src.with_value(&src.value[it.local_id]))
RawAttrs::from_attrs_owner(
db.upcast(),
src.with_value(&src.value[it.local_id]),
db.span_map(src.file_id).as_ref(),
)
}
},
AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it),

View file

@ -1,17 +1,20 @@
//! This module contains tests for doc-expression parsing.
//! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.
use base_db::FileId;
use hir_expand::span::{RealSpanMap, SpanMapRef};
use mbe::syntax_node_to_token_tree;
use syntax::{ast, AstNode};
use crate::attr::{DocAtom, DocExpr};
fn assert_parse_result(input: &str, expected: DocExpr) {
let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
syntax_node_to_token_tree(tt.syntax())
};
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(
tt.syntax(),
SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId::from_raw(0))),
);
let cfg = DocExpr::parse(&tt);
assert_eq!(cfg, expected);
}

View file

@ -95,6 +95,8 @@ pub struct BodySourceMap {
field_map_back: FxHashMap<ExprId, FieldSource>,
pat_field_map_back: FxHashMap<PatId, PatFieldSource>,
format_args_template_map: FxHashMap<ExprId, Vec<(syntax::TextRange, Name)>>,
expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, HirFileId>,
/// Diagnostics accumulated during body lowering. These contain `AstPtr`s and so are stored in
@ -387,6 +389,14 @@ impl BodySourceMap {
self.expr_map.get(&src).copied()
}
pub fn implicit_format_args(
&self,
node: InFile<&ast::FormatArgsExpr>,
) -> Option<&[(syntax::TextRange, Name)]> {
let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>);
self.format_args_template_map.get(self.expr_map.get(&src)?).map(std::ops::Deref::deref)
}
/// Get a reference to the body source map's diagnostics.
pub fn diagnostics(&self) -> &[BodyDiagnostic] {
&self.diagnostics
@ -403,8 +413,10 @@ impl BodySourceMap {
field_map_back,
pat_field_map_back,
expansions,
format_args_template_map,
diagnostics,
} = self;
format_args_template_map.shrink_to_fit();
expr_map.shrink_to_fit();
expr_map_back.shrink_to_fit();
pat_map.shrink_to_fit();

View file

@ -1025,7 +1025,7 @@ impl ExprCollector<'_> {
let id = collector(self, Some(expansion.tree()));
self.ast_id_map = prev_ast_id_map;
self.expander.exit(self.db, mark);
self.expander.exit(mark);
id
}
None => collector(self, None),
@ -1597,13 +1597,25 @@ impl ExprCollector<'_> {
});
let template = f.template();
let fmt_snippet = template.as_ref().map(ToString::to_string);
let mut mappings = vec![];
let fmt = match template.and_then(|it| self.expand_macros_to_string(it)) {
Some((s, is_direct_literal)) => {
format_args::parse(&s, fmt_snippet, args, is_direct_literal, |name| {
self.alloc_expr_desugared(Expr::Path(Path::from(name)))
})
}
None => FormatArgs { template: Default::default(), arguments: args.finish() },
Some((s, is_direct_literal)) => format_args::parse(
&s,
fmt_snippet,
args,
is_direct_literal,
|name| self.alloc_expr_desugared(Expr::Path(Path::from(name))),
|name, span| {
if let Some(span) = span {
mappings.push((span, name.clone()))
}
},
),
None => FormatArgs {
template: Default::default(),
arguments: args.finish(),
orphans: Default::default(),
},
};
// Create a list of all _unique_ (argument, format trait) combinations.
@ -1742,18 +1754,26 @@ impl ExprCollector<'_> {
});
let unsafe_arg_new = self.alloc_expr_desugared(Expr::Unsafe {
id: None,
statements: Box::default(),
// We collect the unused expressions here so that we still infer them instead of
// dropping them out of the expression tree
statements: fmt
.orphans
.into_iter()
.map(|expr| Statement::Expr { expr, has_semi: true })
.collect(),
tail: Some(unsafe_arg_new),
});
self.alloc_expr(
let idx = self.alloc_expr(
Expr::Call {
callee: new_v1_formatted,
args: Box::new([lit_pieces, args, format_options, unsafe_arg_new]),
is_assignee_expr: false,
},
syntax_ptr,
)
);
self.source_map.format_args_template_map.insert(idx, mappings);
idx
}
/// Generate a hir expression for a format_args placeholder specification.

View file

@ -143,7 +143,6 @@ mod m {
#[test]
fn desugar_builtin_format_args() {
// Regression test for a path resolution bug introduced with inner item handling.
let (db, body, def) = lower(
r#"
//- minicore: fmt
@ -161,7 +160,7 @@ fn main() {
let count = 10;
builtin#lang(Arguments::new_v1_formatted)(
&[
"\"hello ", " ", " friends, we ", " ", "", "\"",
"hello ", " ", " friends, we ", " ", "",
],
&[
builtin#lang(Argument::new_display)(
@ -221,3 +220,115 @@ fn main() {
}"#]]
.assert_eq(&body.pretty_print(&db, def))
}
#[test]
fn test_macro_hygiene() {
let (db, body, def) = lower(
r##"
//- minicore: fmt, from
//- /main.rs
mod error;
use crate::error::error;
fn main() {
// _ = forces body expansion instead of block def map expansion
_ = error!("Failed to resolve path `{}`", node.text());
}
//- /error.rs
macro_rules! _error {
($fmt:expr, $($arg:tt)+) => {$crate::error::intermediate!(format_args!($fmt, $($arg)+))}
}
pub(crate) use _error as error;
macro_rules! _intermediate {
($arg:expr) => {$crate::error::SsrError::new($arg)}
}
pub(crate) use _intermediate as intermediate;
pub struct SsrError(pub(crate) core::fmt::Arguments);
impl SsrError {
pub(crate) fn new(message: impl Into<core::fmt::Arguments>) -> SsrError {
SsrError(message.into())
}
}
"##,
);
assert_eq!(db.body_with_source_map(def.into()).1.diagnostics(), &[]);
expect![[r#"
fn main() {
_ = $crate::error::SsrError::new(
builtin#lang(Arguments::new_v1_formatted)(
&[
"Failed to resolve path `", "`",
],
&[
builtin#lang(Argument::new_display)(
&node.text(),
),
],
&[
builtin#lang(Placeholder::new)(
0usize,
' ',
builtin#lang(Alignment::Unknown),
0u32,
builtin#lang(Count::Implied),
builtin#lang(Count::Implied),
),
],
unsafe {
builtin#lang(UnsafeArg::new)()
},
),
);
}"#]]
.assert_eq(&body.pretty_print(&db, def))
}
#[test]
fn regression_10300() {
let (db, body, def) = lower(
r#"
//- minicore: concat, panic
mod private {
pub use core::concat;
}
macro_rules! m {
() => {
panic!(concat!($crate::private::concat!("cc")));
};
}
fn f() {
m!();
}
"#,
);
let (_, source_map) = db.body_with_source_map(def.into());
assert_eq!(source_map.diagnostics(), &[]);
for (_, def_map) in body.blocks(&db) {
assert_eq!(def_map.diagnostics(), &[]);
}
expect![[r#"
fn f() {
$crate::panicking::panic_fmt(
builtin#lang(Arguments::new_v1_formatted)(
&[
"cc",
],
&[],
&[],
unsafe {
builtin#lang(UnsafeArg::new)()
},
),
);
}"#]]
.assert_eq(&body.pretty_print(&db, def))
}

View file

@ -663,7 +663,7 @@ impl<'a> AssocItemCollector<'a> {
self.module_id.local_id,
MacroCallKind::Attr {
ast_id,
attr_args: Arc::new((tt::Subtree::empty(), Default::default())),
attr_args: None,
invoc_attr_index: attr.id,
},
attr.path().clone(),
@ -706,7 +706,7 @@ impl<'a> AssocItemCollector<'a> {
}
AssocItem::MacroCall(call) => {
let file_id = self.expander.current_file_id();
let MacroCall { ast_id, expand_to, ref path } = item_tree[call];
let MacroCall { ast_id, expand_to, call_site, ref path } = item_tree[call];
let module = self.expander.module.local_id;
let resolver = |path| {
@ -725,6 +725,7 @@ impl<'a> AssocItemCollector<'a> {
match macro_call_as_call_id(
self.db.upcast(),
&AstIdWithPath::new(file_id, ast_id, Clone::clone(path)),
call_site,
expand_to,
self.expander.module.krate(),
resolver,
@ -793,7 +794,7 @@ impl<'a> AssocItemCollector<'a> {
self.collect(&item_tree, tree_id, &iter);
self.expander.exit(self.db, mark);
self.expander.exit(mark);
}
}

View file

@ -4,21 +4,21 @@ use base_db::CrateId;
use cfg::CfgOptions;
use drop_bomb::DropBomb;
use hir_expand::{
attrs::RawAttrs, hygiene::Hygiene, mod_path::ModPath, ExpandError, ExpandResult, HirFileId,
InFile, MacroCallId, UnresolvedMacro,
attrs::RawAttrs, mod_path::ModPath, span::SpanMap, ExpandError, ExpandResult, HirFileId,
InFile, MacroCallId,
};
use limit::Limit;
use syntax::{ast, Parse, SyntaxNode};
use crate::{
attr::Attrs, db::DefDatabase, lower::LowerCtx, macro_id_to_def_id, path::Path, AsMacroCall,
MacroId, ModuleId,
MacroId, ModuleId, UnresolvedMacro,
};
#[derive(Debug)]
pub struct Expander {
cfg_options: CfgOptions,
hygiene: Hygiene,
span_map: SpanMap,
krate: CrateId,
pub(crate) current_file_id: HirFileId,
pub(crate) module: ModuleId,
@ -41,7 +41,7 @@ impl Expander {
recursion_depth: 0,
recursion_limit,
cfg_options: db.crate_graph()[module.krate].cfg_options.clone(),
hygiene: Hygiene::new(db.upcast(), current_file_id),
span_map: db.span_map(current_file_id),
krate: module.krate,
}
}
@ -94,8 +94,8 @@ impl Expander {
ExpandResult { value: Some(InFile::new(macro_file.into(), value.0)), err: error.or(err) }
}
pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) {
self.hygiene = Hygiene::new(db.upcast(), mark.file_id);
pub fn exit(&mut self, mut mark: Mark) {
self.span_map = mark.span_map;
self.current_file_id = mark.file_id;
if self.recursion_depth == u32::MAX {
// Recursion limit has been reached somewhere in the macro expansion tree. Reset the
@ -110,7 +110,7 @@ impl Expander {
}
pub fn ctx<'a>(&self, db: &'a dyn DefDatabase) -> LowerCtx<'a> {
LowerCtx::new(db, &self.hygiene, self.current_file_id)
LowerCtx::new(db, self.span_map.clone(), self.current_file_id)
}
pub(crate) fn to_source<T>(&self, value: T) -> InFile<T> {
@ -118,7 +118,7 @@ impl Expander {
}
pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, &self.hygiene))
Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, self.span_map.as_ref()))
}
pub(crate) fn cfg_options(&self) -> &CfgOptions {
@ -130,8 +130,8 @@ impl Expander {
}
pub(crate) fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option<Path> {
let ctx = LowerCtx::new(db, &self.hygiene, self.current_file_id);
Path::from_src(path, &ctx)
let ctx = LowerCtx::new(db, self.span_map.clone(), self.current_file_id);
Path::from_src(&ctx, path)
}
fn within_limit<F, T: ast::AstNode>(
@ -174,10 +174,11 @@ impl Expander {
let parse = value.cast::<T>()?;
self.recursion_depth += 1;
self.hygiene = Hygiene::new(db.upcast(), file_id);
let old_span_map = std::mem::replace(&mut self.span_map, db.span_map(file_id));
let old_file_id = std::mem::replace(&mut self.current_file_id, file_id);
let mark = Mark {
file_id: old_file_id,
span_map: old_span_map,
bomb: DropBomb::new("expansion mark dropped"),
};
Some((mark, parse))
@ -190,5 +191,6 @@ impl Expander {
#[derive(Debug)]
pub struct Mark {
file_id: HirFileId,
span_map: SpanMap,
bomb: DropBomb,
}

View file

@ -586,7 +586,7 @@ fn find_local_import_locations(
#[cfg(test)]
mod tests {
use base_db::fixture::WithFixture;
use hir_expand::hygiene::Hygiene;
use hir_expand::db::ExpandDatabase;
use syntax::ast::AstNode;
use crate::test_db::TestDB;
@ -608,7 +608,8 @@ mod tests {
let parsed_path_file = syntax::SourceFile::parse(&format!("use {path};"));
let ast_path =
parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap();
let mod_path = ModPath::from_src(&db, ast_path, &Hygiene::new_unhygienic()).unwrap();
let mod_path =
ModPath::from_src(&db, ast_path, db.span_map(pos.file_id.into()).as_ref()).unwrap();
let def_map = module.def_map(&db);
let resolved = def_map

View file

@ -21,7 +21,7 @@ use crate::{
db::DefDatabase,
dyn_map::{keys, DynMap},
expander::Expander,
item_tree::{AttrOwner, ItemTree},
item_tree::ItemTree,
lower::LowerCtx,
nameres::{DefMap, MacroSubNs},
src::{HasChildSource, HasSource},
@ -250,7 +250,10 @@ impl GenericParams {
&mut self,
lower_ctx: &LowerCtx<'_>,
node: &dyn HasGenericParams,
add_param_attrs: impl FnMut(AttrOwner, ast::GenericParam),
add_param_attrs: impl FnMut(
Either<Idx<TypeOrConstParamData>, Idx<LifetimeParamData>>,
ast::GenericParam,
),
) {
if let Some(params) = node.generic_param_list() {
self.fill_params(lower_ctx, params, add_param_attrs)
@ -275,7 +278,10 @@ impl GenericParams {
&mut self,
lower_ctx: &LowerCtx<'_>,
params: ast::GenericParamList,
mut add_param_attrs: impl FnMut(AttrOwner, ast::GenericParam),
mut add_param_attrs: impl FnMut(
Either<Idx<TypeOrConstParamData>, Idx<LifetimeParamData>>,
ast::GenericParam,
),
) {
for type_or_const_param in params.type_or_const_params() {
match type_or_const_param {
@ -297,7 +303,7 @@ impl GenericParams {
type_param.type_bound_list(),
Either::Left(type_ref),
);
add_param_attrs(idx.into(), ast::GenericParam::TypeParam(type_param));
add_param_attrs(Either::Left(idx), ast::GenericParam::TypeParam(type_param));
}
ast::TypeOrConstParam::Const(const_param) => {
let name = const_param.name().map_or_else(Name::missing, |it| it.as_name());
@ -310,7 +316,7 @@ impl GenericParams {
default: ConstRef::from_const_param(lower_ctx, &const_param),
};
let idx = self.type_or_consts.alloc(param.into());
add_param_attrs(idx.into(), ast::GenericParam::ConstParam(const_param));
add_param_attrs(Either::Left(idx), ast::GenericParam::ConstParam(const_param));
}
}
}
@ -325,7 +331,7 @@ impl GenericParams {
lifetime_param.type_bound_list(),
Either::Right(lifetime_ref),
);
add_param_attrs(idx.into(), ast::GenericParam::LifetimeParam(lifetime_param));
add_param_attrs(Either::Right(idx), ast::GenericParam::LifetimeParam(lifetime_param));
}
}
@ -433,7 +439,7 @@ impl GenericParams {
let ctx = expander.ctx(db);
let type_ref = TypeRef::from_ast(&ctx, expanded.tree());
self.fill_implicit_impl_trait_args(db, &mut *exp, &type_ref);
exp.1.exit(db, mark);
exp.1.exit(mark);
}
}
});
@ -518,7 +524,7 @@ fn file_id_and_params_of(
(src.file_id, src.value.generic_param_list())
}
// We won't be using this ID anyway
GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => (FileId(!0).into(), None),
GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => (FileId::BOGUS.into(), None),
}
}

View file

@ -3,9 +3,10 @@ use std::mem;
use hir_expand::name::Name;
use rustc_dependencies::parse_format as parse;
use stdx::TupleExt;
use syntax::{
ast::{self, IsString},
AstToken, SmolStr, TextRange,
SmolStr, TextRange, TextSize,
};
use crate::hir::ExprId;
@ -14,6 +15,7 @@ use crate::hir::ExprId;
pub struct FormatArgs {
pub template: Box<[FormatArgsPiece]>,
pub arguments: FormatArguments,
pub orphans: Vec<ExprId>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
@ -170,15 +172,18 @@ pub(crate) fn parse(
mut args: FormatArgumentsCollector,
is_direct_literal: bool,
mut synth: impl FnMut(Name) -> ExprId,
mut record_usage: impl FnMut(Name, Option<TextRange>),
) -> FormatArgs {
let text = s.text();
let text = s.text_without_quotes();
let str_style = match s.quote_offsets() {
Some(offsets) => {
let raw = u32::from(offsets.quotes.0.len()) - 1;
(raw != 0).then_some(raw as usize)
// subtract 1 for the `r` prefix
(raw != 0).then(|| raw as usize - 1)
}
None => None,
};
let mut parser =
parse::Parser::new(text, str_style, fmt_snippet, false, parse::ParseMode::Format);
@ -193,12 +198,17 @@ pub(crate) fn parse(
let is_source_literal = parser.is_source_literal;
if !parser.errors.is_empty() {
// FIXME: Diagnose
return FormatArgs { template: Default::default(), arguments: args.finish() };
return FormatArgs {
template: Default::default(),
arguments: args.finish(),
orphans: vec![],
};
}
let to_span = |inner_span: parse::InnerSpan| {
is_source_literal.then(|| {
TextRange::new(inner_span.start.try_into().unwrap(), inner_span.end.try_into().unwrap())
- TextSize::from(str_style.map(|it| it + 1).unwrap_or(0) as u32 + 1)
})
};
@ -230,9 +240,10 @@ pub(crate) fn parse(
Err(index)
}
}
ArgRef::Name(name, _span) => {
ArgRef::Name(name, span) => {
let name = Name::new_text_dont_use(SmolStr::new(name));
if let Some((index, _)) = args.by_name(&name) {
record_usage(name, span);
// Name found in `args`, so we resolve it to its index.
if index < args.explicit_args().len() {
// Mark it as used, if it was an explicit argument.
@ -246,6 +257,7 @@ pub(crate) fn parse(
// disabled (see RFC #2795)
// FIXME: Diagnose
}
record_usage(name.clone(), span);
Ok(args.add(FormatArgument {
kind: FormatArgumentKind::Captured(name.clone()),
// FIXME: This is problematic, we might want to synthesize a dummy
@ -413,7 +425,11 @@ pub(crate) fn parse(
// FIXME: Diagnose
}
FormatArgs { template: template.into_boxed_slice(), arguments: args.finish() }
FormatArgs {
template: template.into_boxed_slice(),
arguments: args.finish(),
orphans: unused.into_iter().map(TupleExt::head).collect(),
}
}
#[derive(Debug, Clone, PartialEq, Eq)]

View file

@ -112,6 +112,7 @@ pub struct ItemScope {
#[derive(Debug, PartialEq, Eq)]
struct DeriveMacroInvocation {
attr_id: AttrId,
/// The `#[derive]` call
attr_call_id: MacroCallId,
derive_call_ids: SmallVec<[Option<MacroCallId>; 1]>,
}
@ -401,6 +402,14 @@ impl ItemScope {
})
}
pub fn derive_macro_invoc(
&self,
ast_id: AstId<ast::Adt>,
attr_id: AttrId,
) -> Option<MacroCallId> {
Some(self.derive_macros.get(&ast_id)?.iter().find(|it| it.attr_id == attr_id)?.attr_call_id)
}
// FIXME: This is only used in collection, we should move the relevant parts of it out of ItemScope
pub(crate) fn unnamed_trait_vis(&self, tr: TraitId) -> Option<Visibility> {
self.unnamed_trait_imports.get(&tr).copied().map(|(a, _)| a)

View file

@ -42,12 +42,11 @@ use std::{
};
use ast::{AstNode, HasName, StructKind};
use base_db::CrateId;
use base_db::{span::SyntaxContextId, CrateId};
use either::Either;
use hir_expand::{
ast_id_map::{AstIdNode, FileAstId},
attrs::RawAttrs,
hygiene::Hygiene,
name::{name, AsName, Name},
ExpandTo, HirFileId, InFile,
};
@ -118,7 +117,7 @@ impl ItemTree {
let mut item_tree = match_ast! {
match syntax {
ast::SourceFile(file) => {
top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.hygiene()));
top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.span_map()));
ctx.lower_module_items(&file)
},
ast::MacroItems(items) => {
@ -749,6 +748,7 @@ pub struct MacroCall {
pub path: Interned<ModPath>,
pub ast_id: FileAstId<ast::MacroCall>,
pub expand_to: ExpandTo,
pub call_site: SyntaxContextId,
}
#[derive(Debug, Clone, Eq, PartialEq)]
@ -778,9 +778,9 @@ impl Use {
// Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
let hygiene = Hygiene::new(db.upcast(), file_id);
let (_, source_map) =
lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree");
let span_map = db.span_map(file_id);
let (_, source_map) = lower::lower_use_tree(db, span_map.as_ref(), ast_use_tree)
.expect("failed to lower use tree");
source_map[index].clone()
}
/// Maps a `UseTree` contained in this import back to its AST node.
@ -793,8 +793,10 @@ impl Use {
// Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
let hygiene = Hygiene::new(db.upcast(), file_id);
lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree").1
let span_map = db.span_map(file_id);
lower::lower_use_tree(db, span_map.as_ref(), ast_use_tree)
.expect("failed to lower use tree")
.1
}
}

View file

@ -2,12 +2,13 @@
use std::collections::hash_map::Entry;
use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId};
use hir_expand::{ast_id_map::AstIdMap, span::SpanMapRef, HirFileId};
use syntax::ast::{self, HasModuleItem, HasTypeBounds};
use crate::{
generics::{GenericParams, TypeParamData, TypeParamProvenance},
type_ref::{LifetimeRef, TraitBoundModifier, TraitRef},
LocalLifetimeParamId, LocalTypeOrConstParamId,
};
use super::*;
@ -33,8 +34,8 @@ impl<'a> Ctx<'a> {
}
}
pub(super) fn hygiene(&self) -> &Hygiene {
self.body_ctx.hygiene()
pub(super) fn span_map(&self) -> SpanMapRef<'_> {
self.body_ctx.span_map()
}
pub(super) fn lower_module_items(mut self, item_owner: &dyn HasModuleItem) -> ItemTree {
@ -79,7 +80,7 @@ impl<'a> Ctx<'a> {
pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree {
self.tree
.attrs
.insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.hygiene()));
.insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.span_map()));
self.tree.top_level = block
.statements()
.filter_map(|stmt| match stmt {
@ -109,8 +110,7 @@ impl<'a> Ctx<'a> {
}
fn lower_mod_item(&mut self, item: &ast::Item) -> Option<ModItem> {
let attrs = RawAttrs::new(self.db.upcast(), item, self.hygiene());
let item: ModItem = match item {
let mod_item: ModItem = match item {
ast::Item::Struct(ast) => self.lower_struct(ast)?.into(),
ast::Item::Union(ast) => self.lower_union(ast)?.into(),
ast::Item::Enum(ast) => self.lower_enum(ast)?.into(),
@ -129,10 +129,10 @@ impl<'a> Ctx<'a> {
ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(),
ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(),
};
let attrs = RawAttrs::new(self.db.upcast(), item, self.span_map());
self.add_attrs(mod_item.into(), attrs);
self.add_attrs(item.into(), attrs);
Some(item)
Some(mod_item)
}
fn add_attrs(&mut self, item: AttrOwner, attrs: RawAttrs) {
@ -146,21 +146,32 @@ impl<'a> Ctx<'a> {
}
}
fn lower_assoc_item(&mut self, item: &ast::AssocItem) -> Option<AssocItem> {
match item {
fn lower_assoc_item(&mut self, item_node: &ast::AssocItem) -> Option<AssocItem> {
let item: AssocItem = match item_node {
ast::AssocItem::Fn(ast) => self.lower_function(ast).map(Into::into),
ast::AssocItem::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into),
ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()),
ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into),
}
}?;
let attrs = RawAttrs::new(self.db.upcast(), item_node, self.span_map());
self.add_attrs(
match item {
AssocItem::Function(it) => AttrOwner::ModItem(ModItem::Function(it)),
AssocItem::TypeAlias(it) => AttrOwner::ModItem(ModItem::TypeAlias(it)),
AssocItem::Const(it) => AttrOwner::ModItem(ModItem::Const(it)),
AssocItem::MacroCall(it) => AttrOwner::ModItem(ModItem::MacroCall(it)),
},
attrs,
);
Some(item)
}
fn lower_struct(&mut self, strukt: &ast::Struct) -> Option<FileItemTreeId<Struct>> {
let visibility = self.lower_visibility(strukt);
let name = strukt.name()?.as_name();
let ast_id = self.source_ast_id_map.ast_id(strukt);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, strukt);
let fields = self.lower_fields(&strukt.kind());
let ast_id = self.source_ast_id_map.ast_id(strukt);
let res = Struct { name, visibility, generic_params, fields, ast_id };
Some(id(self.data().structs.alloc(res)))
}
@ -184,7 +195,10 @@ impl<'a> Ctx<'a> {
for field in fields.fields() {
if let Some(data) = self.lower_record_field(&field) {
let idx = self.data().fields.alloc(data);
self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene()));
self.add_attrs(
idx.into(),
RawAttrs::new(self.db.upcast(), &field, self.span_map()),
);
}
}
let end = self.next_field_idx();
@ -205,7 +219,7 @@ impl<'a> Ctx<'a> {
for (i, field) in fields.fields().enumerate() {
let data = self.lower_tuple_field(i, &field);
let idx = self.data().fields.alloc(data);
self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene()));
self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.span_map()));
}
let end = self.next_field_idx();
IdxRange::new(start..end)
@ -222,12 +236,12 @@ impl<'a> Ctx<'a> {
fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> {
let visibility = self.lower_visibility(union);
let name = union.name()?.as_name();
let ast_id = self.source_ast_id_map.ast_id(union);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, union);
let fields = match union.record_field_list() {
Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)),
None => Fields::Record(IdxRange::new(self.next_field_idx()..self.next_field_idx())),
};
let ast_id = self.source_ast_id_map.ast_id(union);
let res = Union { name, visibility, generic_params, fields, ast_id };
Some(id(self.data().unions.alloc(res)))
}
@ -235,12 +249,12 @@ impl<'a> Ctx<'a> {
fn lower_enum(&mut self, enum_: &ast::Enum) -> Option<FileItemTreeId<Enum>> {
let visibility = self.lower_visibility(enum_);
let name = enum_.name()?.as_name();
let ast_id = self.source_ast_id_map.ast_id(enum_);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, enum_);
let variants = match &enum_.variant_list() {
Some(variant_list) => self.lower_variants(variant_list),
None => IdxRange::new(self.next_variant_idx()..self.next_variant_idx()),
};
let ast_id = self.source_ast_id_map.ast_id(enum_);
let res = Enum { name, visibility, generic_params, variants, ast_id };
Some(id(self.data().enums.alloc(res)))
}
@ -252,7 +266,7 @@ impl<'a> Ctx<'a> {
let idx = self.data().variants.alloc(data);
self.add_attrs(
idx.into(),
RawAttrs::new(self.db.upcast(), &variant, self.hygiene()),
RawAttrs::new(self.db.upcast(), &variant, self.span_map()),
);
}
}
@ -303,28 +317,29 @@ impl<'a> Ctx<'a> {
});
self.add_attrs(
idx.into(),
RawAttrs::new(self.db.upcast(), &self_param, self.hygiene()),
RawAttrs::new(self.db.upcast(), &self_param, self.span_map()),
);
has_self_param = true;
}
for param in param_list.params() {
let ast_id = self.source_ast_id_map.ast_id(&param);
let idx = match param.dotdotdot_token() {
Some(_) => {
let ast_id = self.source_ast_id_map.ast_id(&param);
self.data()
.params
.alloc(Param { type_ref: None, ast_id: ParamAstId::Param(ast_id) })
}
Some(_) => self
.data()
.params
.alloc(Param { type_ref: None, ast_id: ParamAstId::Param(ast_id) }),
None => {
let type_ref = TypeRef::from_ast_opt(&self.body_ctx, param.ty());
let ty = Interned::new(type_ref);
let ast_id = self.source_ast_id_map.ast_id(&param);
self.data()
.params
.alloc(Param { type_ref: Some(ty), ast_id: ParamAstId::Param(ast_id) })
}
};
self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &param, self.hygiene()));
self.add_attrs(
idx.into(),
RawAttrs::new(self.db.upcast(), &param, self.span_map()),
);
}
}
let end_param = self.next_param_idx();
@ -394,8 +409,8 @@ impl<'a> Ctx<'a> {
let type_ref = type_alias.ty().map(|it| self.lower_type_ref(&it));
let visibility = self.lower_visibility(type_alias);
let bounds = self.lower_type_bounds(type_alias);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias);
let ast_id = self.source_ast_id_map.ast_id(type_alias);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias);
let res = TypeAlias { name, visibility, bounds, generic_params, type_ref, ast_id };
Some(id(self.data().type_aliases.alloc(res)))
}
@ -443,23 +458,17 @@ impl<'a> Ctx<'a> {
fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option<FileItemTreeId<Trait>> {
let name = trait_def.name()?.as_name();
let visibility = self.lower_visibility(trait_def);
let ast_id = self.source_ast_id_map.ast_id(trait_def);
let generic_params =
self.lower_generic_params(HasImplicitSelf::Yes(trait_def.type_bound_list()), trait_def);
let is_auto = trait_def.auto_token().is_some();
let is_unsafe = trait_def.unsafe_token().is_some();
let ast_id = self.source_ast_id_map.ast_id(trait_def);
let items = trait_def
.assoc_item_list()
.into_iter()
.flat_map(|list| list.assoc_items())
.filter_map(|item| {
let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
self.lower_assoc_item(&item).map(|item| {
self.add_attrs(ModItem::from(item).into(), attrs);
item
})
})
.filter_map(|item_node| self.lower_assoc_item(&item_node))
.collect();
let def = Trait { name, visibility, generic_params, is_auto, is_unsafe, items, ast_id };
@ -472,17 +481,18 @@ impl<'a> Ctx<'a> {
) -> Option<FileItemTreeId<TraitAlias>> {
let name = trait_alias_def.name()?.as_name();
let visibility = self.lower_visibility(trait_alias_def);
let ast_id = self.source_ast_id_map.ast_id(trait_alias_def);
let generic_params = self.lower_generic_params(
HasImplicitSelf::Yes(trait_alias_def.type_bound_list()),
trait_alias_def,
);
let ast_id = self.source_ast_id_map.ast_id(trait_alias_def);
let alias = TraitAlias { name, visibility, generic_params, ast_id };
Some(id(self.data().trait_aliases.alloc(alias)))
}
fn lower_impl(&mut self, impl_def: &ast::Impl) -> Option<FileItemTreeId<Impl>> {
let ast_id = self.source_ast_id_map.ast_id(impl_def);
// Note that trait impls don't get implicit `Self` unlike traits, because here they are a
// type alias rather than a type parameter, so this is handled by the resolver.
let generic_params = self.lower_generic_params(HasImplicitSelf::No, impl_def);
@ -499,14 +509,8 @@ impl<'a> Ctx<'a> {
.assoc_item_list()
.into_iter()
.flat_map(|it| it.assoc_items())
.filter_map(|item| {
let assoc = self.lower_assoc_item(&item)?;
let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
self.add_attrs(ModItem::from(assoc).into(), attrs);
Some(assoc)
})
.filter_map(|item| self.lower_assoc_item(&item))
.collect();
let ast_id = self.source_ast_id_map.ast_id(impl_def);
let res =
Impl { generic_params, target_trait, self_ty, is_negative, is_unsafe, items, ast_id };
Some(id(self.data().impls.alloc(res)))
@ -515,7 +519,7 @@ impl<'a> Ctx<'a> {
fn lower_use(&mut self, use_item: &ast::Use) -> Option<FileItemTreeId<Use>> {
let visibility = self.lower_visibility(use_item);
let ast_id = self.source_ast_id_map.ast_id(use_item);
let (use_tree, _) = lower_use_tree(self.db, self.hygiene(), use_item.use_tree()?)?;
let (use_tree, _) = lower_use_tree(self.db, self.span_map(), use_item.use_tree()?)?;
let res = Use { visibility, ast_id, use_tree };
Some(id(self.data().uses.alloc(res)))
@ -537,10 +541,16 @@ impl<'a> Ctx<'a> {
}
fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<FileItemTreeId<MacroCall>> {
let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, self.hygiene())?);
let span_map = self.span_map();
let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, span_map)?);
let ast_id = self.source_ast_id_map.ast_id(m);
let expand_to = hir_expand::ExpandTo::from_call_site(m);
let res = MacroCall { path, ast_id, expand_to };
let res = MacroCall {
path,
ast_id,
expand_to,
call_site: span_map.span_for_range(m.syntax().text_range()).ctx,
};
Some(id(self.data().macro_calls.alloc(res)))
}
@ -572,15 +582,15 @@ impl<'a> Ctx<'a> {
// (in other words, the knowledge that they're in an extern block must not be used).
// This is because an extern block can contain macros whose ItemTree's top-level items
// should be considered to be in an extern block too.
let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
let id: ModItem = match item {
ast::ExternItem::Fn(ast) => self.lower_function(&ast)?.into(),
ast::ExternItem::Static(ast) => self.lower_static(&ast)?.into(),
ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(&ty)?.into(),
ast::ExternItem::MacroCall(call) => self.lower_macro_call(&call)?.into(),
let mod_item: ModItem = match &item {
ast::ExternItem::Fn(ast) => self.lower_function(ast)?.into(),
ast::ExternItem::Static(ast) => self.lower_static(ast)?.into(),
ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(ty)?.into(),
ast::ExternItem::MacroCall(call) => self.lower_macro_call(call)?.into(),
};
self.add_attrs(id.into(), attrs);
Some(id)
let attrs = RawAttrs::new(self.db.upcast(), &item, self.span_map());
self.add_attrs(mod_item.into(), attrs);
Some(mod_item)
})
.collect()
});
@ -612,12 +622,16 @@ impl<'a> Ctx<'a> {
generics.fill_bounds(&self.body_ctx, bounds, Either::Left(self_param));
}
let add_param_attrs = |item, param| {
let attrs = RawAttrs::new(self.db.upcast(), &param, self.body_ctx.hygiene());
let add_param_attrs = |item: Either<LocalTypeOrConstParamId, LocalLifetimeParamId>,
param| {
let attrs = RawAttrs::new(self.db.upcast(), &param, self.body_ctx.span_map());
// This is identical to the body of `Ctx::add_attrs()` but we can't call that here
// because it requires `&mut self` and the call to `generics.fill()` below also
// references `self`.
match self.tree.attrs.entry(item) {
match self.tree.attrs.entry(match item {
Either::Right(id) => id.into(),
Either::Left(id) => id.into(),
}) {
Entry::Occupied(mut entry) => {
*entry.get_mut() = entry.get().merge(attrs);
}
@ -643,7 +657,8 @@ impl<'a> Ctx<'a> {
}
fn lower_visibility(&mut self, item: &dyn ast::HasVisibility) -> RawVisibilityId {
let vis = RawVisibility::from_ast_with_hygiene(self.db, item.visibility(), self.hygiene());
let vis =
RawVisibility::from_ast_with_span_map(self.db, item.visibility(), self.span_map());
self.data().vis.alloc(vis)
}
@ -721,7 +736,7 @@ fn lower_abi(abi: ast::Abi) -> Interned<str> {
struct UseTreeLowering<'a> {
db: &'a dyn DefDatabase,
hygiene: &'a Hygiene,
span_map: SpanMapRef<'a>,
mapping: Arena<ast::UseTree>,
}
@ -734,7 +749,7 @@ impl UseTreeLowering<'_> {
// E.g. `use something::{inner}` (prefix is `None`, path is `something`)
// or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`)
Some(path) => {
match ModPath::from_src(self.db.upcast(), path, self.hygiene) {
match ModPath::from_src(self.db.upcast(), path, self.span_map) {
Some(it) => Some(it),
None => return None, // FIXME: report errors somewhere
}
@ -753,7 +768,7 @@ impl UseTreeLowering<'_> {
} else {
let is_glob = tree.star_token().is_some();
let path = match tree.path() {
Some(path) => Some(ModPath::from_src(self.db.upcast(), path, self.hygiene)?),
Some(path) => Some(ModPath::from_src(self.db.upcast(), path, self.span_map)?),
None => None,
};
let alias = tree.rename().map(|a| {
@ -789,10 +804,10 @@ impl UseTreeLowering<'_> {
pub(crate) fn lower_use_tree(
db: &dyn DefDatabase,
hygiene: &Hygiene,
span_map: SpanMapRef<'_>,
tree: ast::UseTree,
) -> Option<(UseTree, Arena<ast::UseTree>)> {
let mut lowering = UseTreeLowering { db, hygiene, mapping: Arena::new() };
let mut lowering = UseTreeLowering { db, span_map, mapping: Arena::new() };
let tree = lowering.lower_use_tree(tree)?;
Some((tree, lowering.mapping))
}

View file

@ -457,7 +457,7 @@ impl Printer<'_> {
}
}
ModItem::MacroCall(it) => {
let MacroCall { path, ast_id: _, expand_to: _ } = &self.tree[it];
let MacroCall { path, ast_id: _, expand_to: _, call_site: _ } = &self.tree[it];
wln!(self, "{}!(...);", path.display(self.db.upcast()));
}
ModItem::MacroRules(it) => {

View file

@ -370,3 +370,15 @@ struct S<#[cfg(never)] T>;
"#]],
)
}
#[test]
fn pub_self() {
check(
r#"
pub(self) struct S;
"#,
expect![[r#"
pub(self) struct S;
"#]],
)
}

View file

@ -7,7 +7,7 @@
//! Note that `hir_def` is a work in progress, so not all of the above is
//! actually true.
#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
#![warn(rust_2018_idioms, unused_lifetimes)]
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#[allow(unused)]
@ -63,7 +63,7 @@ use std::{
panic::{RefUnwindSafe, UnwindSafe},
};
use base_db::{impl_intern_key, salsa, CrateId, ProcMacroKind};
use base_db::{impl_intern_key, salsa, span::SyntaxContextId, CrateId, ProcMacroKind};
use hir_expand::{
ast_id_map::{AstIdNode, FileAstId},
attrs::{Attr, AttrId, AttrInput},
@ -72,19 +72,18 @@ use hir_expand::{
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase,
eager::expand_eager_macro_input,
hygiene::Hygiene,
name::Name,
proc_macro::ProcMacroExpander,
AstId, ExpandError, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind,
MacroDefId, MacroDefKind, UnresolvedMacro,
MacroDefId, MacroDefKind,
};
use item_tree::ExternBlock;
use la_arena::Idx;
use nameres::DefMap;
use stdx::impl_from;
use syntax::ast;
use syntax::{ast, AstNode};
use ::tt::token_id as tt;
pub use hir_expand::tt;
use crate::{
builtin_type::BuiltinType,
@ -1166,16 +1165,20 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
let expands_to = hir_expand::ExpandTo::from_call_site(self.value);
let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
let h = Hygiene::new(db, self.file_id);
let path = self.value.path().and_then(|path| path::ModPath::from_src(db, path, &h));
let span_map = db.span_map(self.file_id);
let path =
self.value.path().and_then(|path| path::ModPath::from_src(db, path, span_map.as_ref()));
let Some(path) = path else {
return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
};
let call_site = span_map.span_for_range(self.value.syntax().text_range()).ctx;
macro_call_as_call_id_with_eager(
db,
&AstIdWithPath::new(ast_id.file_id, ast_id.value, path),
call_site,
expands_to,
krate,
resolver,
@ -1200,17 +1203,19 @@ impl<T: AstIdNode> AstIdWithPath<T> {
fn macro_call_as_call_id(
db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>,
call_site: SyntaxContextId,
expand_to: ExpandTo,
krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
) -> Result<Option<MacroCallId>, UnresolvedMacro> {
macro_call_as_call_id_with_eager(db, call, expand_to, krate, resolver, resolver)
macro_call_as_call_id_with_eager(db, call, call_site, expand_to, krate, resolver, resolver)
.map(|res| res.value)
}
fn macro_call_as_call_id_with_eager(
db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>,
call_site: SyntaxContextId,
expand_to: ExpandTo,
krate: CrateId,
resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>,
@ -1222,7 +1227,7 @@ fn macro_call_as_call_id_with_eager(
let res = match def.kind {
MacroDefKind::BuiltInEager(..) => {
let macro_call = InFile::new(call.ast_id.file_id, call.ast_id.to_node(db));
expand_eager_macro_input(db, krate, macro_call, def, &|path| {
expand_eager_macro_input(db, krate, macro_call, def, call_site, &|path| {
eager_resolver(path).filter(MacroDefId::is_fn_like)
})
}
@ -1231,6 +1236,7 @@ fn macro_call_as_call_id_with_eager(
db,
krate,
MacroCallKind::FnLike { ast_id: call.ast_id, expand_to },
call_site,
)),
err: None,
},
@ -1315,6 +1321,7 @@ fn derive_macro_as_call_id(
item_attr: &AstIdWithPath<ast::Adt>,
derive_attr_index: AttrId,
derive_pos: u32,
call_site: SyntaxContextId,
krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
@ -1329,6 +1336,7 @@ fn derive_macro_as_call_id(
derive_index: derive_pos,
derive_attr_index,
},
call_site,
);
Ok((macro_id, def_id, call_id))
}
@ -1341,15 +1349,13 @@ fn attr_macro_as_call_id(
def: MacroDefId,
) -> MacroCallId {
let arg = match macro_attr.input.as_deref() {
Some(AttrInput::TokenTree(tt)) => (
{
let mut tt = tt.0.clone();
tt.delimiter = tt::Delimiter::UNSPECIFIED;
tt
},
tt.1.clone(),
),
_ => (tt::Subtree::empty(), Default::default()),
Some(AttrInput::TokenTree(tt)) => {
let mut tt = tt.as_ref().clone();
tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
Some(tt)
}
_ => None,
};
def.as_lazy_macro(
@ -1357,11 +1363,18 @@ fn attr_macro_as_call_id(
krate,
MacroCallKind::Attr {
ast_id: item_attr.ast_id,
attr_args: Arc::new(arg),
attr_args: arg.map(Arc::new),
invoc_attr_index: macro_attr.id,
},
macro_attr.ctxt,
)
}
#[derive(Debug)]
pub struct UnresolvedMacro {
pub path: hir_expand::mod_path::ModPath,
}
intern::impl_internable!(
crate::type_ref::TypeRef,
crate::type_ref::TraitRef,

View file

@ -3,7 +3,7 @@ use std::cell::OnceCell;
use hir_expand::{
ast_id_map::{AstIdMap, AstIdNode},
hygiene::Hygiene,
span::{SpanMap, SpanMapRef},
AstId, HirFileId, InFile,
};
use syntax::ast;
@ -13,33 +13,34 @@ use crate::{db::DefDatabase, path::Path};
pub struct LowerCtx<'a> {
pub db: &'a dyn DefDatabase,
hygiene: Hygiene,
span_map: SpanMap,
// FIXME: This optimization is probably pointless, ast id map should pretty much always exist anyways.
ast_id_map: Option<(HirFileId, OnceCell<Arc<AstIdMap>>)>,
}
impl<'a> LowerCtx<'a> {
pub fn new(db: &'a dyn DefDatabase, hygiene: &Hygiene, file_id: HirFileId) -> Self {
LowerCtx { db, hygiene: hygiene.clone(), ast_id_map: Some((file_id, OnceCell::new())) }
pub fn new(db: &'a dyn DefDatabase, span_map: SpanMap, file_id: HirFileId) -> Self {
LowerCtx { db, span_map, ast_id_map: Some((file_id, OnceCell::new())) }
}
pub fn with_file_id(db: &'a dyn DefDatabase, file_id: HirFileId) -> Self {
LowerCtx {
db,
hygiene: Hygiene::new(db.upcast(), file_id),
span_map: db.span_map(file_id),
ast_id_map: Some((file_id, OnceCell::new())),
}
}
pub fn with_hygiene(db: &'a dyn DefDatabase, hygiene: &Hygiene) -> Self {
LowerCtx { db, hygiene: hygiene.clone(), ast_id_map: None }
pub fn with_span_map(db: &'a dyn DefDatabase, span_map: SpanMap) -> Self {
LowerCtx { db, span_map, ast_id_map: None }
}
pub(crate) fn hygiene(&self) -> &Hygiene {
&self.hygiene
pub(crate) fn span_map(&self) -> SpanMapRef<'_> {
self.span_map.as_ref()
}
pub(crate) fn lower_path(&self, ast: ast::Path) -> Option<Path> {
Path::from_src(ast, self)
Path::from_src(self, ast)
}
pub(crate) fn ast_id<N: AstIdNode>(&self, item: &N) -> Option<AstId<N>> {

View file

@ -468,12 +468,12 @@ macro_rules! concat_bytes {}
fn main() { concat_bytes!(b'A', b"BC", [68, b'E', 70]); }
"##,
expect![[r##"
expect![[r#"
#[rustc_builtin_macro]
macro_rules! concat_bytes {}
fn main() { [b'A', 66, 67, 68, b'E', 70]; }
"##]],
"#]],
);
}

View file

@ -15,7 +15,6 @@ use crate::macro_expansion_tests::check;
fn token_mapping_smoke_test() {
check(
r#"
// +tokenids
macro_rules! f {
( struct $ident:ident ) => {
struct $ident {
@ -24,26 +23,22 @@ macro_rules! f {
};
}
// +tokenids
// +spans+syntaxctxt
f!(struct MyTraitMap2);
"#,
expect![[r##"
// call ids will be shifted by Shift(30)
// +tokenids
macro_rules! f {#0
(#1 struct#2 $#3ident#4:#5ident#6 )#1 =#7>#8 {#9
struct#10 $#11ident#12 {#13
map#14:#15 :#16:#17std#18:#19:#20collections#21:#22:#23HashSet#24<#25(#26)#26>#27,#28
}#13
}#9;#29
}#0
expect![[r#"
macro_rules! f {
( struct $ident:ident ) => {
struct $ident {
map: ::std::collections::HashSet<()>,
}
};
}
// // +tokenids
// f!(struct#1 MyTraitMap2#2);
struct#10 MyTraitMap2#32 {#13
map#14:#15 ::std#18::collections#21::HashSet#24<#25(#26)#26>#27,#28
}#13
"##]],
struct#FileId(0):1@58..64\2# MyTraitMap2#FileId(0):2@31..42\0# {#FileId(0):1@72..73\2#
map#FileId(0):1@86..89\2#:#FileId(0):1@89..90\2# #FileId(0):1@89..90\2#::#FileId(0):1@91..92\2#std#FileId(0):1@93..96\2#::#FileId(0):1@96..97\2#collections#FileId(0):1@98..109\2#::#FileId(0):1@109..110\2#HashSet#FileId(0):1@111..118\2#<#FileId(0):1@118..119\2#(#FileId(0):1@119..120\2#)#FileId(0):1@120..121\2#>#FileId(0):1@121..122\2#,#FileId(0):1@122..123\2#
}#FileId(0):1@132..133\2#
"#]],
);
}
@ -53,49 +48,42 @@ fn token_mapping_floats() {
// (and related issues)
check(
r#"
// +tokenids
// +spans+syntaxctxt
macro_rules! f {
($($tt:tt)*) => {
$($tt)*
};
}
// +tokenids
// +spans+syntaxctxt
f! {
fn main() {
1;
1.0;
((1,),).0.0;
let x = 1;
}
}
"#,
expect![[r##"
// call ids will be shifted by Shift(18)
// +tokenids
macro_rules! f {#0
(#1$#2(#3$#4tt#5:#6tt#7)#3*#8)#1 =#9>#10 {#11
$#12(#13$#14tt#15)#13*#16
}#11;#17
}#0
expect![[r#"
// +spans+syntaxctxt
macro_rules! f {
($($tt:tt)*) => {
$($tt)*
};
}
// // +tokenids
// f! {
// fn#1 main#2() {
// 1#5;#6
// 1.0#7;#8
// let#9 x#10 =#11 1#12;#13
// }
// }
fn#19 main#20(#21)#21 {#22
1#23;#24
1.0#25;#26
let#27 x#28 =#29 1#30;#31
}#22
fn#FileId(0):2@30..32\0# main#FileId(0):2@33..37\0#(#FileId(0):2@37..38\0#)#FileId(0):2@38..39\0# {#FileId(0):2@40..41\0#
1#FileId(0):2@50..51\0#;#FileId(0):2@51..52\0#
1.0#FileId(0):2@61..64\0#;#FileId(0):2@64..65\0#
(#FileId(0):2@74..75\0#(#FileId(0):2@75..76\0#1#FileId(0):2@76..77\0#,#FileId(0):2@77..78\0# )#FileId(0):2@78..79\0#,#FileId(0):2@79..80\0# )#FileId(0):2@80..81\0#.#FileId(0):2@81..82\0#0#FileId(0):2@82..85\0#.#FileId(0):2@82..85\0#0#FileId(0):2@82..85\0#;#FileId(0):2@85..86\0#
let#FileId(0):2@95..98\0# x#FileId(0):2@99..100\0# =#FileId(0):2@101..102\0# 1#FileId(0):2@103..104\0#;#FileId(0):2@104..105\0#
}#FileId(0):2@110..111\0#
"##]],
"#]],
);
}
@ -105,59 +93,115 @@ fn eager_expands_with_unresolved_within() {
r#"
#[rustc_builtin_macro]
#[macro_export]
macro_rules! format_args {}
macro_rules! concat {}
macro_rules! identity {
($tt:tt) => {
$tt
}
}
fn main(foo: ()) {
format_args!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
concat!("hello", identity!("world"), unresolved!(), identity!("!"));
}
"#,
expect![[r##"
#[rustc_builtin_macro]
#[macro_export]
macro_rules! format_args {}
macro_rules! concat {}
macro_rules! identity {
($tt:tt) => {
$tt
}
}
fn main(foo: ()) {
builtin #format_args ("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
/* error: unresolved macro unresolved */"helloworld!";
}
"##]],
);
}
#[test]
fn token_mapping_eager() {
fn concat_spans() {
check(
r#"
#[rustc_builtin_macro]
#[macro_export]
macro_rules! format_args {}
macro_rules! concat {}
macro_rules! identity {
($expr:expr) => { $expr };
($tt:tt) => {
$tt
}
}
fn main(foo: ()) {
format_args/*+tokenids*/!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
#[rustc_builtin_macro]
#[macro_export]
macro_rules! concat {}
macro_rules! identity {
($tt:tt) => {
$tt
}
}
fn main(foo: ()) {
concat/*+spans+syntaxctxt*/!("hello", concat!("w", identity!("o")), identity!("rld"), unresolved!(), identity!("!"));
}
}
"#,
expect![[r##"
#[rustc_builtin_macro]
#[macro_export]
macro_rules! format_args {}
macro_rules! concat {}
macro_rules! identity {
($expr:expr) => { $expr };
($tt:tt) => {
$tt
}
}
fn main(foo: ()) {
// format_args/*+tokenids*/!("{} {} {}"#1,#2 format_args#3!#4("{}"#6,#7 0#8),#9 foo#10,#11 identity#12!#13(10#15),#16 "bar"#17)
builtin#4294967295 ##4294967295format_args#4294967295 (#0"{} {} {}"#1,#2 format_args#3!#4(#5"{}"#6,#7 0#8)#5,#9 foo#10,#11 identity#12!#13(#1410#15)#14,#16 "bar"#17)#0
#[rustc_builtin_macro]
#[macro_export]
macro_rules! concat {}
macro_rules! identity {
($tt:tt) => {
$tt
}
}
fn main(foo: ()) {
/* error: unresolved macro unresolved */"helloworld!"#FileId(0):3@207..323\6#;
}
}
"##]],
);
}
#[test]
fn token_mapping_across_files() {
check(
r#"
//- /lib.rs
#[macro_use]
mod foo;
mk_struct/*+spans+syntaxctxt*/!(Foo with u32);
//- /foo.rs
macro_rules! mk_struct {
($foo:ident with $ty:ty) => { struct $foo($ty); }
}
"#,
expect![[r#"
#[macro_use]
mod foo;
struct#FileId(1):1@59..65\2# Foo#FileId(0):2@32..35\0#(#FileId(1):1@70..71\2#u32#FileId(0):2@41..44\0#)#FileId(1):1@74..75\2#;#FileId(1):1@75..76\2#
"#]],
);
}
#[test]
fn float_field_access_macro_input() {
check(

View file

@ -1004,3 +1004,29 @@ fn main() {
"##]],
);
}
#[test]
fn eager_concat_bytes_panic() {
check(
r#"
#[rustc_builtin_macro]
#[macro_export]
macro_rules! concat_bytes {}
fn main() {
let x = concat_bytes!(2);
}
"#,
expect![[r#"
#[rustc_builtin_macro]
#[macro_export]
macro_rules! concat_bytes {}
fn main() {
let x = /* error: unexpected token in input */[];
}
"#]],
);
}

View file

@ -16,21 +16,16 @@ mod proc_macros;
use std::{iter, ops::Range, sync};
use ::mbe::TokenMap;
use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase};
use base_db::{fixture::WithFixture, span::SpanData, ProcMacro, SourceDatabase};
use expect_test::Expect;
use hir_expand::{
db::{DeclarativeMacroExpander, ExpandDatabase},
AstId, InFile, MacroFile,
};
use hir_expand::{db::ExpandDatabase, span::SpanMapRef, InFile, MacroFileId, MacroFileIdExt};
use stdx::format_to;
use syntax::{
ast::{self, edit::IndentLevel},
AstNode, SyntaxElement,
SyntaxKind::{self, COMMENT, EOF, IDENT, LIFETIME_IDENT},
SyntaxNode, TextRange, T,
AstNode,
SyntaxKind::{COMMENT, EOF, IDENT, LIFETIME_IDENT},
SyntaxNode, T,
};
use tt::token_id::{Subtree, TokenId};
use crate::{
db::DefDatabase,
@ -39,6 +34,7 @@ use crate::{
resolver::HasResolver,
src::HasSource,
test_db::TestDB,
tt::Subtree,
AdtId, AsMacroCall, Lookup, ModuleDefId,
};
@ -88,43 +84,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
let mut text_edits = Vec::new();
let mut expansions = Vec::new();
for macro_ in source_file.syntax().descendants().filter_map(ast::Macro::cast) {
let mut show_token_ids = false;
for comment in macro_.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
show_token_ids |= comment.to_string().contains("+tokenids");
}
if !show_token_ids {
continue;
}
let call_offset = macro_.syntax().text_range().start().into();
let file_ast_id = db.ast_id_map(source.file_id).ast_id(&macro_);
let ast_id = AstId::new(source.file_id, file_ast_id.upcast());
let DeclarativeMacroExpander { mac, def_site_token_map } =
&*db.decl_macro_expander(krate, ast_id);
assert_eq!(mac.err(), None);
let tt = match &macro_ {
ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(),
ast::Macro::MacroDef(_) => unimplemented!(""),
};
let tt_start = tt.syntax().text_range().start();
tt.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token).for_each(
|token| {
let range = token.text_range().checked_sub(tt_start).unwrap();
if let Some(id) = def_site_token_map.token_by_range(range) {
let offset = (range.end() + tt_start).into();
text_edits.push((offset..offset, format!("#{}", id.0)));
}
},
);
text_edits.push((
call_offset..call_offset,
format!("// call ids will be shifted by {:?}\n", mac.shift()),
));
}
for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
let macro_call = InFile::new(source.file_id, &macro_call);
let res = macro_call
@ -135,20 +94,22 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
})
.unwrap();
let macro_call_id = res.value.unwrap();
let macro_file = MacroFile { macro_call_id };
let macro_file = MacroFileId { macro_call_id };
let mut expansion_result = db.parse_macro_expansion(macro_file);
expansion_result.err = expansion_result.err.or(res.err);
expansions.push((macro_call.value.clone(), expansion_result, db.macro_arg(macro_call_id)));
expansions.push((macro_call.value.clone(), expansion_result));
}
for (call, exp, arg) in expansions.into_iter().rev() {
for (call, exp) in expansions.into_iter().rev() {
let mut tree = false;
let mut expect_errors = false;
let mut show_token_ids = false;
let mut show_spans = false;
let mut show_ctxt = false;
for comment in call.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
tree |= comment.to_string().contains("+tree");
expect_errors |= comment.to_string().contains("+errors");
show_token_ids |= comment.to_string().contains("+tokenids");
show_spans |= comment.to_string().contains("+spans");
show_ctxt |= comment.to_string().contains("+syntaxctxt");
}
let mut expn_text = String::new();
@ -164,13 +125,16 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
} else {
assert!(
parse.errors().is_empty(),
"parse errors in expansion: \n{:#?}",
parse.errors()
"parse errors in expansion: \n{:#?}\n```\n{}\n```",
parse.errors(),
parse.syntax_node(),
);
}
let pp = pretty_print_macro_expansion(
parse.syntax_node(),
show_token_ids.then_some(&*token_map),
SpanMapRef::ExpansionSpanMap(&token_map),
show_spans,
show_ctxt,
);
let indent = IndentLevel::from_node(call.syntax());
let pp = reindent(indent, pp);
@ -185,27 +149,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
}
let range = call.syntax().text_range();
let range: Range<usize> = range.into();
if show_token_ids {
if let Some((tree, map, _)) = arg.value.as_deref() {
let tt_range = call.token_tree().unwrap().syntax().text_range();
let mut ranges = Vec::new();
extract_id_ranges(&mut ranges, map, tree);
for (range, id) in ranges {
let idx = (tt_range.start() + range.end()).into();
text_edits.push((idx..idx, format!("#{}", id.0)));
}
}
text_edits.push((range.start..range.start, "// ".into()));
call.to_string().match_indices('\n').for_each(|(offset, _)| {
let offset = offset + 1 + range.start;
text_edits.push((offset..offset, "// ".into()));
});
text_edits.push((range.end..range.end, "\n".into()));
text_edits.push((range.end..range.end, expn_text));
} else {
text_edits.push((range, expn_text));
}
text_edits.push((range, expn_text));
}
text_edits.sort_by_key(|(range, _)| range.start);
@ -226,19 +170,43 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
}
_ => None,
};
if let Some(src) = src {
if src.file_id.is_attr_macro(&db) || src.file_id.is_custom_derive(&db) {
let pp = pretty_print_macro_expansion(src.value, None);
format_to!(expanded_text, "\n{}", pp)
if let Some(file_id) = src.file_id.macro_file() {
if file_id.is_attr_macro(&db) || file_id.is_custom_derive(&db) {
let call = file_id.call_node(&db);
let mut show_spans = false;
let mut show_ctxt = false;
for comment in
call.value.children_with_tokens().filter(|it| it.kind() == COMMENT)
{
show_spans |= comment.to_string().contains("+spans");
show_ctxt |= comment.to_string().contains("+syntaxctxt");
}
let pp = pretty_print_macro_expansion(
src.value,
db.span_map(src.file_id).as_ref(),
show_spans,
show_ctxt,
);
format_to!(expanded_text, "\n{}", pp)
}
}
}
}
for impl_id in def_map[local_id].scope.impls() {
let src = impl_id.lookup(&db).source(&db);
if src.file_id.is_builtin_derive(&db) {
let pp = pretty_print_macro_expansion(src.value.syntax().clone(), None);
format_to!(expanded_text, "\n{}", pp)
if let Some(macro_file) = src.file_id.macro_file() {
if macro_file.is_builtin_derive(&db) {
let pp = pretty_print_macro_expansion(
src.value.syntax().clone(),
db.span_map(macro_file.into()).as_ref(),
false,
false,
);
format_to!(expanded_text, "\n{}", pp)
}
}
}
@ -246,20 +214,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
expect.assert_eq(&expanded_text);
}
fn extract_id_ranges(ranges: &mut Vec<(TextRange, TokenId)>, map: &TokenMap, tree: &Subtree) {
tree.token_trees.iter().for_each(|tree| match tree {
tt::TokenTree::Leaf(leaf) => {
let id = match leaf {
tt::Leaf::Literal(it) => it.span,
tt::Leaf::Punct(it) => it.span,
tt::Leaf::Ident(it) => it.span,
};
ranges.extend(map.ranges_by_token(id, SyntaxKind::ERROR).map(|range| (range, id)));
}
tt::TokenTree::Subtree(tree) => extract_id_ranges(ranges, map, tree),
});
}
fn reindent(indent: IndentLevel, pp: String) -> String {
if !pp.contains('\n') {
return pp;
@ -276,7 +230,12 @@ fn reindent(indent: IndentLevel, pp: String) -> String {
res
}
fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> String {
fn pretty_print_macro_expansion(
expn: SyntaxNode,
map: SpanMapRef<'_>,
show_spans: bool,
show_ctxt: bool,
) -> String {
let mut res = String::new();
let mut prev_kind = EOF;
let mut indent_level = 0;
@ -322,10 +281,22 @@ fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> Str
}
prev_kind = curr_kind;
format_to!(res, "{}", token);
if let Some(map) = map {
if let Some(id) = map.token_by_range(token.text_range()) {
format_to!(res, "#{}", id.0);
if show_spans || show_ctxt {
let span = map.span_for_range(token.text_range());
format_to!(res, "#");
if show_spans {
format_to!(
res,
"{:?}:{:?}@{:?}",
span.anchor.file_id,
span.anchor.ast_id.into_raw(),
span.range,
);
}
if show_ctxt {
format_to!(res, "\\{}", span.ctx);
}
format_to!(res, "#");
}
}
res
@ -342,6 +313,9 @@ impl base_db::ProcMacroExpander for IdentityWhenValidProcMacroExpander {
subtree: &Subtree,
_: Option<&Subtree>,
_: &base_db::Env,
_: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree, base_db::ProcMacroExpansionError> {
let (parse, _) =
::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems);

View file

@ -93,6 +93,41 @@ fn foo() {
);
}
#[test]
fn macro_rules_in_attr() {
// Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211
check(
r#"
//- proc_macros: identity
macro_rules! id {
($($t:tt)*) => {
$($t)*
};
}
id! {
#[proc_macros::identity]
impl Foo for WrapBj {
async fn foo(&self) {
self.id().await;
}
}
}
"#,
expect![[r#"
macro_rules! id {
($($t:tt)*) => {
$($t)*
};
}
#[proc_macros::identity] impl Foo for WrapBj {
async fn foo(&self ) {
self .id().await ;
}
}
"#]],
);
}
#[test]
fn float_parsing_panic() {
// Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211
@ -127,3 +162,27 @@ macro_rules! id {
"#]],
);
}
#[test]
fn float_attribute_mapping() {
check(
r#"
//- proc_macros: identity
//+spans+syntaxctxt
#[proc_macros::identity]
fn foo(&self) {
self.0. 1;
}
"#,
expect![[r#"
//+spans+syntaxctxt
#[proc_macros::identity]
fn foo(&self) {
self.0. 1;
}
fn#FileId(0):1@45..47\0# foo#FileId(0):1@48..51\0#(#FileId(0):1@51..52\0#&#FileId(0):1@52..53\0#self#FileId(0):1@53..57\0# )#FileId(0):1@57..58\0# {#FileId(0):1@59..60\0#
self#FileId(0):1@65..69\0# .#FileId(0):1@69..70\0#0#FileId(0):1@70..71\0#.#FileId(0):1@71..72\0#1#FileId(0):1@73..74\0#;#FileId(0):1@74..75\0#
}#FileId(0):1@76..77\0#"#]],
);
}

View file

@ -5,7 +5,7 @@
use std::{cmp::Ordering, iter, mem};
use base_db::{CrateId, Dependency, Edition, FileId};
use base_db::{span::SyntaxContextId, CrateId, Dependency, Edition, FileId};
use cfg::{CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{
@ -14,7 +14,6 @@ use hir_expand::{
builtin_attr_macro::find_builtin_attr,
builtin_derive_macro::find_builtin_derive,
builtin_fn_macro::find_builtin_macro,
hygiene::Hygiene,
name::{name, AsName, Name},
proc_macro::ProcMacroExpander,
ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroCallLoc,
@ -85,8 +84,17 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
.enumerate()
.map(|(idx, it)| {
// FIXME: a hacky way to create a Name from string.
let name =
tt::Ident { text: it.name.clone(), span: tt::TokenId::unspecified() };
let name = tt::Ident {
text: it.name.clone(),
span: tt::SpanData {
range: syntax::TextRange::empty(syntax::TextSize::new(0)),
anchor: base_db::span::SpanAnchor {
file_id: FileId::BOGUS,
ast_id: base_db::span::ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContextId::ROOT,
},
};
(name.as_name(), ProcMacroExpander::new(base_db::ProcMacroId(idx as u32)))
})
.collect())
@ -112,7 +120,6 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
from_glob_import: Default::default(),
skip_attrs: Default::default(),
is_proc_macro,
hygienes: FxHashMap::default(),
};
if tree_id.is_block() {
collector.seed_with_inner(tree_id);
@ -212,9 +219,23 @@ struct MacroDirective {
#[derive(Clone, Debug, Eq, PartialEq)]
enum MacroDirectiveKind {
FnLike { ast_id: AstIdWithPath<ast::MacroCall>, expand_to: ExpandTo },
Derive { ast_id: AstIdWithPath<ast::Adt>, derive_attr: AttrId, derive_pos: usize },
Attr { ast_id: AstIdWithPath<ast::Item>, attr: Attr, mod_item: ModItem, tree: TreeId },
FnLike {
ast_id: AstIdWithPath<ast::MacroCall>,
expand_to: ExpandTo,
call_site: SyntaxContextId,
},
Derive {
ast_id: AstIdWithPath<ast::Adt>,
derive_attr: AttrId,
derive_pos: usize,
call_site: SyntaxContextId,
},
Attr {
ast_id: AstIdWithPath<ast::Item>,
attr: Attr,
mod_item: ModItem,
/* is this needed? */ tree: TreeId,
},
}
/// Walks the tree of module recursively
@ -242,12 +263,6 @@ struct DefCollector<'a> {
/// This also stores the attributes to skip when we resolve derive helpers and non-macro
/// non-builtin attributes in general.
skip_attrs: FxHashMap<InFile<ModItem>, AttrId>,
/// `Hygiene` cache, because `Hygiene` construction is expensive.
///
/// Almost all paths should have been lowered to `ModPath` during `ItemTree` construction.
/// However, `DefCollector` still needs to lower paths in attributes, in particular those in
/// derive meta item list.
hygienes: FxHashMap<HirFileId, Hygiene>,
}
impl DefCollector<'_> {
@ -315,12 +330,11 @@ impl DefCollector<'_> {
}
if *attr_name == hir_expand::name![feature] {
let hygiene = &Hygiene::new_unhygienic();
let features = attr
.parse_path_comma_token_tree(self.db.upcast(), hygiene)
.parse_path_comma_token_tree(self.db.upcast())
.into_iter()
.flatten()
.filter_map(|feat| match feat.segments() {
.filter_map(|(feat, _)| match feat.segments() {
[name] => Some(name.to_smol_str()),
_ => None,
});
@ -471,7 +485,7 @@ impl DefCollector<'_> {
directive.module_id,
MacroCallKind::Attr {
ast_id: ast_id.ast_id,
attr_args: Arc::new((tt::Subtree::empty(), Default::default())),
attr_args: None,
invoc_attr_index: attr.id,
},
attr.path().clone(),
@ -1119,10 +1133,11 @@ impl DefCollector<'_> {
let resolver_def_id = |path| resolver(path).map(|(_, it)| it);
match &directive.kind {
MacroDirectiveKind::FnLike { ast_id, expand_to } => {
MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => {
let call_id = macro_call_as_call_id(
self.db.upcast(),
ast_id,
*call_site,
*expand_to,
self.def_map.krate,
resolver_def_id,
@ -1134,12 +1149,13 @@ impl DefCollector<'_> {
return false;
}
}
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => {
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site } => {
let id = derive_macro_as_call_id(
self.db,
ast_id,
*derive_attr,
*derive_pos as u32,
*call_site,
self.def_map.krate,
resolver,
);
@ -1212,7 +1228,7 @@ impl DefCollector<'_> {
};
if matches!(
def,
MacroDefId { kind:MacroDefKind::BuiltInAttr(expander, _),.. }
MacroDefId { kind: MacroDefKind::BuiltInAttr(expander, _),.. }
if expander.is_derive()
) {
// Resolved to `#[derive]`
@ -1234,22 +1250,10 @@ impl DefCollector<'_> {
};
let ast_id = ast_id.with_value(ast_adt_id);
let extend_unhygenic;
let hygiene = if file_id.is_macro() {
self.hygienes
.entry(file_id)
.or_insert_with(|| Hygiene::new(self.db.upcast(), file_id))
} else {
// Avoid heap allocation (`Hygiene` embraces `Arc`) and hash map entry
// when we're in an oridinary (non-macro) file.
extend_unhygenic = Hygiene::new_unhygienic();
&extend_unhygenic
};
match attr.parse_path_comma_token_tree(self.db.upcast(), hygiene) {
match attr.parse_path_comma_token_tree(self.db.upcast()) {
Some(derive_macros) => {
let mut len = 0;
for (idx, path) in derive_macros.enumerate() {
for (idx, (path, call_site)) in derive_macros.enumerate() {
let ast_id = AstIdWithPath::new(file_id, ast_id.value, path);
self.unresolved_macros.push(MacroDirective {
module_id: directive.module_id,
@ -1258,6 +1262,7 @@ impl DefCollector<'_> {
ast_id,
derive_attr: attr.id,
derive_pos: idx,
call_site,
},
container: directive.container,
});
@ -1414,11 +1419,12 @@ impl DefCollector<'_> {
for directive in &self.unresolved_macros {
match &directive.kind {
MacroDirectiveKind::FnLike { ast_id, expand_to } => {
MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => {
// FIXME: we shouldn't need to re-resolve the macro here just to get the unresolved error!
let macro_call_as_call_id = macro_call_as_call_id(
self.db.upcast(),
ast_id,
*call_site,
*expand_to,
self.def_map.krate,
|path| {
@ -1444,7 +1450,7 @@ impl DefCollector<'_> {
));
}
}
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => {
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site: _ } => {
self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
directive.module_id,
MacroCallKind::Derive {
@ -1823,9 +1829,8 @@ impl ModCollector<'_, '_> {
cov_mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use);
let mut single_imports = Vec::new();
let hygiene = Hygiene::new_unhygienic();
for attr in macro_use_attrs {
let Some(paths) = attr.parse_path_comma_token_tree(db.upcast(), &hygiene) else {
let Some(paths) = attr.parse_path_comma_token_tree(db.upcast()) else {
// `#[macro_use]` (without any paths) found, forget collected names and just import
// all visible macros.
self.def_collector.import_macros_from_extern_crate(
@ -1835,7 +1840,7 @@ impl ModCollector<'_, '_> {
);
return;
};
for path in paths {
for (path, _) in paths {
if let Some(name) = path.as_ident() {
single_imports.push(name.clone());
}
@ -2083,8 +2088,18 @@ impl ModCollector<'_, '_> {
let name = match attrs.by_key("rustc_builtin_macro").string_value() {
Some(it) => {
// FIXME: a hacky way to create a Name from string.
name =
tt::Ident { text: it.clone(), span: tt::TokenId::unspecified() }.as_name();
name = tt::Ident {
text: it.clone(),
span: tt::SpanData {
range: syntax::TextRange::empty(syntax::TextSize::new(0)),
anchor: base_db::span::SpanAnchor {
file_id: FileId::BOGUS,
ast_id: base_db::span::ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContextId::ROOT,
},
}
.as_name();
&name
}
None => {
@ -2210,8 +2225,12 @@ impl ModCollector<'_, '_> {
}
}
fn collect_macro_call(&mut self, mac: &MacroCall, container: ItemContainerId) {
let ast_id = AstIdWithPath::new(self.file_id(), mac.ast_id, ModPath::clone(&mac.path));
fn collect_macro_call(
&mut self,
&MacroCall { ref path, ast_id, expand_to, call_site }: &MacroCall,
container: ItemContainerId,
) {
let ast_id = AstIdWithPath::new(self.file_id(), ast_id, ModPath::clone(&path));
let db = self.def_collector.db;
// FIXME: Immediately expanding in "Case 1" is insufficient since "Case 2" may also define
@ -2222,7 +2241,8 @@ impl ModCollector<'_, '_> {
if let Ok(res) = macro_call_as_call_id_with_eager(
db.upcast(),
&ast_id,
mac.expand_to,
call_site,
expand_to,
self.def_collector.def_map.krate,
|path| {
path.as_ident().and_then(|name| {
@ -2276,7 +2296,7 @@ impl ModCollector<'_, '_> {
self.def_collector.unresolved_macros.push(MacroDirective {
module_id: self.module_id,
depth: self.macro_depth + 1,
kind: MacroDirectiveKind::FnLike { ast_id, expand_to: mac.expand_to },
kind: MacroDirectiveKind::FnLike { ast_id, expand_to: expand_to, call_site },
container,
});
}
@ -2363,7 +2383,6 @@ mod tests {
from_glob_import: Default::default(),
skip_attrs: Default::default(),
is_proc_macro: false,
hygienes: FxHashMap::default(),
};
collector.seed_with_top_level();
collector.collect();

View file

@ -1,7 +1,7 @@
//! This module resolves `mod foo;` declaration to file.
use arrayvec::ArrayVec;
use base_db::{AnchoredPath, FileId};
use hir_expand::name::Name;
use hir_expand::{name::Name, HirFileIdExt, MacroFileIdExt};
use limit::Limit;
use syntax::SmolStr;
@ -66,14 +66,14 @@ impl ModDir {
attr_path: Option<&SmolStr>,
) -> Result<(FileId, bool, ModDir), Box<[String]>> {
let name = name.unescaped();
let orig_file_id = file_id.original_file(db.upcast());
let orig_file_id = file_id.original_file_respecting_includes(db.upcast());
let mut candidate_files = ArrayVec::<_, 2>::new();
match attr_path {
Some(attr_path) => {
candidate_files.push(self.dir_path.join_attr(attr_path, self.root_non_dir_owner))
}
None if file_id.is_include_macro(db.upcast()) => {
None if file_id.macro_file().map_or(false, |it| it.is_include_macro(db.upcast())) => {
candidate_files.push(format!("{}.rs", name.display(db.upcast())));
candidate_files.push(format!("{}/mod.rs", name.display(db.upcast())));
}

View file

@ -96,8 +96,8 @@ impl DefMap {
let types = result.take_types()?;
match types {
ModuleDefId::ModuleId(m) => Visibility::Module(m),
// error: visibility needs to refer to module
_ => {
// error: visibility needs to refer to module
return None;
}
}

View file

@ -8,9 +8,7 @@ use base_db::{fixture::WithFixture, SourceDatabase};
use expect_test::{expect, Expect};
use triomphe::Arc;
use crate::{db::DefDatabase, test_db::TestDB};
use super::DefMap;
use crate::{db::DefDatabase, nameres::DefMap, test_db::TestDB};
fn compute_crate_def_map(ra_fixture: &str) -> Arc<DefMap> {
let db = TestDB::with_files(ra_fixture);

View file

@ -1,13 +1,19 @@
use base_db::SourceDatabaseExt;
use base_db::{SourceDatabase, SourceDatabaseExt};
use triomphe::Arc;
use crate::{db::DefDatabase, AdtId, ModuleDefId};
use super::*;
use crate::{
db::DefDatabase,
nameres::tests::{TestDB, WithFixture},
AdtId, ModuleDefId,
};
fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: &str) {
let (mut db, pos) = TestDB::with_position(ra_fixture_initial);
let krate = db.test_crate();
let krate = {
let crate_graph = db.crate_graph();
// Some of these tests use minicore/proc-macros which will be injected as the first crate
crate_graph.iter().last().unwrap()
};
{
let events = db.log_executed(|| {
db.crate_def_map(krate);
@ -28,84 +34,199 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change:
fn typing_inside_a_function_should_not_invalidate_def_map() {
check_def_map_is_not_recomputed(
r"
//- /lib.rs
mod foo;$0
//- /lib.rs
mod foo;$0
use crate::foo::bar::Baz;
use crate::foo::bar::Baz;
enum E { A, B }
use E::*;
enum E { A, B }
use E::*;
fn foo() -> i32 {
1 + 1
}
fn foo() -> i32 {
1 + 1
}
#[cfg(never)]
fn no() {}
//- /foo/mod.rs
pub mod bar;
#[cfg(never)]
fn no() {}
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
pub struct Baz;
",
//- /foo/bar.rs
pub struct Baz;
",
r"
mod foo;
mod foo;
use crate::foo::bar::Baz;
use crate::foo::bar::Baz;
enum E { A, B }
use E::*;
enum E { A, B }
use E::*;
fn foo() -> i32 { 92 }
fn foo() -> i32 { 92 }
#[cfg(never)]
fn no() {}
",
#[cfg(never)]
fn no() {}
",
);
}
#[test]
fn typing_inside_a_macro_should_not_invalidate_def_map() {
let (mut db, pos) = TestDB::with_position(
check_def_map_is_not_recomputed(
r"
//- /lib.rs
macro_rules! m {
($ident:ident) => {
fn f() {
$ident + $ident;
};
}
}
mod foo;
//- /lib.rs
macro_rules! m {
($ident:ident) => {
fn f() {
$ident + $ident;
};
}
}
mod foo;
//- /foo/mod.rs
pub mod bar;
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
$0
m!(X);
",
//- /foo/bar.rs
$0
m!(X);
pub struct S {}
",
r"
m!(Y);
pub struct S {}
",
);
let krate = db.test_crate();
{
let events = db.log_executed(|| {
let crate_def_map = db.crate_def_map(krate);
let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
assert_eq!(module_data.scope.resolutions().count(), 1);
});
assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
}
db.set_file_text(pos.file_id, Arc::from("m!(Y);"));
}
{
let events = db.log_executed(|| {
let crate_def_map = db.crate_def_map(krate);
let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
assert_eq!(module_data.scope.resolutions().count(), 1);
});
assert!(!format!("{events:?}").contains("crate_def_map"), "{events:#?}")
#[test]
fn typing_inside_an_attribute_should_not_invalidate_def_map() {
check_def_map_is_not_recomputed(
r"
//- proc_macros: identity
//- /lib.rs
mod foo;
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
$0
#[proc_macros::identity]
fn f() {}
",
r"
#[proc_macros::identity]
fn f() { foo }
",
);
}
#[test]
fn typing_inside_an_attribute_arg_should_not_invalidate_def_map() {
check_def_map_is_not_recomputed(
r"
//- proc_macros: identity
//- /lib.rs
mod foo;
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
$0
#[proc_macros::identity]
fn f() {}
",
r"
#[proc_macros::identity(foo)]
fn f() {}
",
);
}
#[test]
fn typing_inside_macro_heavy_file_should_not_invalidate_def_map() {
check_def_map_is_not_recomputed(
r"
//- proc_macros: identity, derive_identity
//- /lib.rs
macro_rules! m {
($ident:ident) => {
fn fm() {
$ident + $ident;
};
}
}
mod foo;
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
$0
fn f() {}
m!(X);
macro_rules! m2 {
($ident:ident) => {
fn f2() {
$ident + $ident;
};
}
}
m2!(X);
#[proc_macros::identity]
#[derive(proc_macros::DeriveIdentity)]
pub struct S {}
",
r"
fn f() {0}
m!(X);
macro_rules! m2 {
($ident:ident) => {
fn f2() {
$ident + $ident;
};
}
}
m2!(X);
#[proc_macros::identity]
#[derive(proc_macros::DeriveIdentity)]
pub struct S {}
",
);
}
#[test]
fn typing_inside_a_derive_should_not_invalidate_def_map() {
check_def_map_is_not_recomputed(
r"
//- proc_macros: derive_identity
//- minicore:derive
//- /lib.rs
mod foo;
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
$0
#[derive(proc_macros::DeriveIdentity)]
#[allow()]
struct S;
",
r"
#[derive(proc_macros::DeriveIdentity)]
#[allow(dead_code)]
struct S;
",
);
}
#[test]
fn typing_inside_a_function_should_not_invalidate_item_expansions() {

View file

@ -96,8 +96,8 @@ pub enum GenericArg {
impl Path {
/// Converts an `ast::Path` to `Path`. Works with use trees.
/// It correctly handles `$crate` based path from macro call.
pub fn from_src(path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path> {
lower::lower_path(path, ctx)
pub fn from_src(ctx: &LowerCtx<'_>, path: ast::Path) -> Option<Path> {
lower::lower_path(ctx, path)
}
/// Converts a known mod path to `Path`.

View file

@ -4,8 +4,10 @@ use std::iter;
use crate::{lower::LowerCtx, type_ref::ConstRef};
use either::Either;
use hir_expand::name::{name, AsName};
use hir_expand::{
mod_path::resolve_crate_root,
name::{name, AsName},
};
use intern::Interned;
use syntax::ast::{self, AstNode, HasTypeBounds};
@ -16,12 +18,12 @@ use crate::{
/// Converts an `ast::Path` to `Path`. Works with use trees.
/// It correctly handles `$crate` based path from macro call.
pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path> {
pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path> {
let mut kind = PathKind::Plain;
let mut type_anchor = None;
let mut segments = Vec::new();
let mut generic_args = Vec::new();
let hygiene = ctx.hygiene();
let span_map = ctx.span_map();
loop {
let segment = path.segment()?;
@ -31,31 +33,31 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
match segment.kind()? {
ast::PathSegmentKind::Name(name_ref) => {
// FIXME: this should just return name
match hygiene.name_ref_to_name(ctx.db.upcast(), name_ref) {
Either::Left(name) => {
let args = segment
.generic_arg_list()
.and_then(|it| lower_generic_args(ctx, it))
.or_else(|| {
lower_generic_args_from_fn_path(
ctx,
segment.param_list(),
segment.ret_type(),
)
})
.map(Interned::new);
if let Some(_) = args {
generic_args.resize(segments.len(), None);
generic_args.push(args);
}
segments.push(name);
}
Either::Right(crate_id) => {
kind = PathKind::DollarCrate(crate_id);
break;
}
if name_ref.text() == "$crate" {
break kind = resolve_crate_root(
ctx.db.upcast(),
span_map.span_for_range(name_ref.syntax().text_range()).ctx,
)
.map(PathKind::DollarCrate)
.unwrap_or(PathKind::Crate);
}
let name = name_ref.as_name();
let args = segment
.generic_arg_list()
.and_then(|it| lower_generic_args(ctx, it))
.or_else(|| {
lower_generic_args_from_fn_path(
ctx,
segment.param_list(),
segment.ret_type(),
)
})
.map(Interned::new);
if let Some(_) = args {
generic_args.resize(segments.len(), None);
generic_args.push(args);
}
segments.push(name);
}
ast::PathSegmentKind::SelfTypeKw => {
segments.push(name![Self]);
@ -74,7 +76,7 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
// <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo
Some(trait_ref) => {
let Path::Normal { mod_path, generic_args: path_generic_args, .. } =
Path::from_src(trait_ref.path()?, ctx)?
Path::from_src(ctx, trait_ref.path()?)?
else {
return None;
};
@ -151,8 +153,14 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
// We follow what it did anyway :)
if segments.len() == 1 && kind == PathKind::Plain {
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
if let Some(crate_id) = hygiene.local_inner_macros(ctx.db.upcast(), path) {
kind = PathKind::DollarCrate(crate_id);
let syn_ctxt = span_map.span_for_range(path.segment()?.syntax().text_range()).ctx;
if let Some(macro_call_id) = ctx.db.lookup_intern_syntax_context(syn_ctxt).outer_expn {
if ctx.db.lookup_intern_macro_call(macro_call_id).def.local_inner {
kind = match resolve_crate_root(ctx.db.upcast(), syn_ctxt) {
Some(crate_root) => PathKind::DollarCrate(crate_root),
None => PathKind::Crate,
}
}
}
}
}

View file

@ -588,6 +588,14 @@ impl Resolver {
_ => None,
})
}
pub fn impl_def(&self) -> Option<ImplId> {
self.scopes().find_map(|scope| match scope {
Scope::ImplDefScope(def) => Some(*def),
_ => None,
})
}
/// `expr_id` is required to be an expression id that comes after the top level expression scope in the given resolver
#[must_use]
pub fn update_to_inner_scope(

View file

@ -34,6 +34,7 @@ pub(crate) struct TestDB {
impl Default for TestDB {
fn default() -> Self {
let mut this = Self { storage: Default::default(), events: Default::default() };
this.setup_syntax_context_root();
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
this
}

View file

@ -2,7 +2,7 @@
use std::iter;
use hir_expand::{hygiene::Hygiene, InFile};
use hir_expand::{span::SpanMapRef, InFile};
use la_arena::ArenaMap;
use syntax::ast;
use triomphe::Arc;
@ -34,22 +34,22 @@ impl RawVisibility {
db: &dyn DefDatabase,
node: InFile<Option<ast::Visibility>>,
) -> RawVisibility {
Self::from_ast_with_hygiene(db, node.value, &Hygiene::new(db.upcast(), node.file_id))
Self::from_ast_with_span_map(db, node.value, db.span_map(node.file_id).as_ref())
}
pub(crate) fn from_ast_with_hygiene(
pub(crate) fn from_ast_with_span_map(
db: &dyn DefDatabase,
node: Option<ast::Visibility>,
hygiene: &Hygiene,
span_map: SpanMapRef<'_>,
) -> RawVisibility {
Self::from_ast_with_hygiene_and_default(db, node, RawVisibility::private(), hygiene)
Self::from_ast_with_span_map_and_default(db, node, RawVisibility::private(), span_map)
}
pub(crate) fn from_ast_with_hygiene_and_default(
pub(crate) fn from_ast_with_span_map_and_default(
db: &dyn DefDatabase,
node: Option<ast::Visibility>,
default: RawVisibility,
hygiene: &Hygiene,
span_map: SpanMapRef<'_>,
) -> RawVisibility {
let node = match node {
None => return default,
@ -57,7 +57,7 @@ impl RawVisibility {
};
match node.kind() {
ast::VisibilityKind::In(path) => {
let path = ModPath::from_src(db.upcast(), path, hygiene);
let path = ModPath::from_src(db.upcast(), path, span_map);
let path = match path {
None => return RawVisibility::private(),
Some(path) => path,
@ -73,7 +73,7 @@ impl RawVisibility {
RawVisibility::Module(path)
}
ast::VisibilityKind::PubSelf => {
let path = ModPath::from_kind(PathKind::Plain);
let path = ModPath::from_kind(PathKind::Super(0));
RawVisibility::Module(path)
}
ast::VisibilityKind::Pub => RawVisibility::Public,

View file

@ -12,11 +12,40 @@ use std::{
marker::PhantomData,
};
use la_arena::{Arena, Idx};
use la_arena::{Arena, Idx, RawIdx};
use profile::Count;
use rustc_hash::FxHasher;
use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
use crate::db;
pub use base_db::span::ErasedFileAstId;
/// `AstId` points to an AST node in any file.
///
/// It is stable across reparses, and can be used as salsa key/value.
pub type AstId<N> = crate::InFile<FileAstId<N>>;
impl<N: AstIdNode> AstId<N> {
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N {
self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
}
pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> crate::InFile<N> {
crate::InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)))
}
pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr<N> {
db.ast_id_map(self.file_id).get(self.value)
}
}
pub type ErasedAstId = crate::InFile<ErasedFileAstId>;
impl ErasedAstId {
pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr {
db.ast_id_map(self.file_id).get_erased(self.value)
}
}
/// `AstId` points to an AST node in a specific file.
pub struct FileAstId<N: AstIdNode> {
raw: ErasedFileAstId,
@ -62,8 +91,6 @@ impl<N: AstIdNode> FileAstId<N> {
}
}
pub type ErasedFileAstId = Idx<SyntaxNodePtr>;
pub trait AstIdNode: AstNode {}
macro_rules! register_ast_id_node {
(impl AstIdNode for $($ident:ident),+ ) => {
@ -129,6 +156,11 @@ impl AstIdMap {
pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
assert!(node.parent().is_none());
let mut res = AstIdMap::default();
// make sure to allocate the root node
if !should_alloc_id(node.kind()) {
res.alloc(node);
}
// By walking the tree in breadth-first order we make sure that parents
// get lower ids then children. That is, adding a new child does not
// change parent's id. This means that, say, adding a new function to a
@ -136,9 +168,9 @@ impl AstIdMap {
bdfs(node, |it| {
if should_alloc_id(it.kind()) {
res.alloc(&it);
true
TreeOrder::BreadthFirst
} else {
false
TreeOrder::DepthFirst
}
});
res.map = hashbrown::HashMap::with_capacity_and_hasher(res.arena.len(), ());
@ -155,6 +187,11 @@ impl AstIdMap {
res
}
/// The [`AstId`] of the root node
pub fn root(&self) -> SyntaxNodePtr {
self.arena[Idx::from_raw(RawIdx::from_u32(0))].clone()
}
pub fn ast_id<N: AstIdNode>(&self, item: &N) -> FileAstId<N> {
let raw = self.erased_ast_id(item.syntax());
FileAstId { raw, covariant: PhantomData }
@ -164,7 +201,7 @@ impl AstIdMap {
AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap()
}
pub(crate) fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
pub fn get_erased(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
self.arena[id].clone()
}
@ -192,14 +229,20 @@ fn hash_ptr(ptr: &SyntaxNodePtr) -> u64 {
hasher.finish()
}
#[derive(Copy, Clone, PartialEq, Eq)]
enum TreeOrder {
BreadthFirst,
DepthFirst,
}
/// Walks the subtree in bdfs order, calling `f` for each node. What is bdfs
/// order? It is a mix of breadth-first and depth first orders. Nodes for which
/// `f` returns true are visited breadth-first, all the other nodes are explored
/// depth-first.
/// `f` returns [`TreeOrder::BreadthFirst`] are visited breadth-first, all the other nodes are explored
/// [`TreeOrder::DepthFirst`].
///
/// In other words, the size of the bfs queue is bound by the number of "true"
/// nodes.
fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> bool) {
fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> TreeOrder) {
let mut curr_layer = vec![node.clone()];
let mut next_layer = vec![];
while !curr_layer.is_empty() {
@ -208,7 +251,7 @@ fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> bool) {
while let Some(event) = preorder.next() {
match event {
syntax::WalkEvent::Enter(node) => {
if f(node.clone()) {
if f(node.clone()) == TreeOrder::BreadthFirst {
next_layer.extend(node.children());
preorder.skip_subtree();
}

View file

@ -1,19 +1,19 @@
//! A higher level attributes based on TokenTree, with also some shortcuts.
use std::{fmt, ops};
use base_db::CrateId;
use base_db::{span::SyntaxContextId, CrateId};
use cfg::CfgExpr;
use either::Either;
use intern::Interned;
use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
use smallvec::{smallvec, SmallVec};
use syntax::{ast, match_ast, AstNode, SmolStr, SyntaxNode};
use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode};
use triomphe::Arc;
use crate::{
db::ExpandDatabase,
hygiene::Hygiene,
mod_path::ModPath,
span::SpanMapRef,
tt::{self, Subtree},
InFile,
};
@ -39,28 +39,33 @@ impl ops::Deref for RawAttrs {
impl RawAttrs {
pub const EMPTY: Self = Self { entries: None };
pub fn new(db: &dyn ExpandDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self {
let entries = collect_attrs(owner)
.filter_map(|(id, attr)| match attr {
Either::Left(attr) => {
attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id))
}
Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
id,
input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
path: Interned::new(ModPath::from(crate::name!(doc))),
}),
})
.collect::<Vec<_>>();
// FIXME: use `Arc::from_iter` when it becomes available
let entries: Arc<[Attr]> = Arc::from(entries);
pub fn new(
db: &dyn ExpandDatabase,
owner: &dyn ast::HasAttrs,
span_map: SpanMapRef<'_>,
) -> Self {
let entries = collect_attrs(owner).filter_map(|(id, attr)| match attr {
Either::Left(attr) => {
attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id))
}
Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
id,
input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
path: Interned::new(ModPath::from(crate::name!(doc))),
ctxt: span_map.span_for_range(comment.syntax().text_range()).ctx,
}),
});
let entries: Arc<[Attr]> = Arc::from_iter(entries);
Self { entries: if entries.is_empty() { None } else { Some(entries) } }
}
pub fn from_attrs_owner(db: &dyn ExpandDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self {
let hygiene = Hygiene::new(db, owner.file_id);
Self::new(db, owner.value, &hygiene)
pub fn from_attrs_owner(
db: &dyn ExpandDatabase,
owner: InFile<&dyn ast::HasAttrs>,
span_map: SpanMapRef<'_>,
) -> Self {
Self::new(db, owner.value, span_map)
}
pub fn merge(&self, other: Self) -> Self {
@ -71,19 +76,13 @@ impl RawAttrs {
(Some(a), Some(b)) => {
let last_ast_index = a.last().map_or(0, |it| it.id.ast_index() + 1) as u32;
Self {
entries: Some(Arc::from(
a.iter()
.cloned()
.chain(b.iter().map(|it| {
let mut it = it.clone();
it.id.id = it.id.ast_index() as u32 + last_ast_index
| (it.id.cfg_attr_index().unwrap_or(0) as u32)
<< AttrId::AST_INDEX_BITS;
it
}))
// FIXME: use `Arc::from_iter` when it becomes available
.collect::<Vec<_>>(),
)),
entries: Some(Arc::from_iter(a.iter().cloned().chain(b.iter().map(|it| {
let mut it = it.clone();
it.id.id = it.id.ast_index() as u32 + last_ast_index
| (it.id.cfg_attr_index().unwrap_or(0) as u32)
<< AttrId::AST_INDEX_BITS;
it
})))),
}
}
}
@ -100,51 +99,43 @@ impl RawAttrs {
}
let crate_graph = db.crate_graph();
let new_attrs = Arc::from(
self.iter()
.flat_map(|attr| -> SmallVec<[_; 1]> {
let is_cfg_attr =
attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]);
if !is_cfg_attr {
return smallvec![attr.clone()];
}
let new_attrs = Arc::from_iter(self.iter().flat_map(|attr| -> SmallVec<[_; 1]> {
let is_cfg_attr =
attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]);
if !is_cfg_attr {
return smallvec![attr.clone()];
}
let subtree = match attr.token_tree_value() {
Some(it) => it,
_ => return smallvec![attr.clone()],
let subtree = match attr.token_tree_value() {
Some(it) => it,
_ => return smallvec![attr.clone()],
};
let (cfg, parts) = match parse_cfg_attr_input(subtree) {
Some(it) => it,
None => return smallvec![attr.clone()],
};
let index = attr.id;
let attrs =
parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| {
let tree = Subtree {
delimiter: tt::Delimiter::dummy_invisible(),
token_trees: attr.to_vec(),
};
Attr::from_tt(db, &tree, index.with_cfg_attr(idx))
});
let (cfg, parts) = match parse_cfg_attr_input(subtree) {
Some(it) => it,
None => return smallvec![attr.clone()],
};
let index = attr.id;
let attrs = parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(
|(idx, attr)| {
let tree = Subtree {
delimiter: tt::Delimiter::unspecified(),
token_trees: attr.to_vec(),
};
// FIXME hygiene
let hygiene = Hygiene::new_unhygienic();
Attr::from_tt(db, &tree, &hygiene, index.with_cfg_attr(idx))
},
);
let cfg_options = &crate_graph[krate].cfg_options;
let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
let cfg = CfgExpr::parse(&cfg);
if cfg_options.check(&cfg) == Some(false) {
smallvec![]
} else {
cov_mark::hit!(cfg_attr_active);
let cfg_options = &crate_graph[krate].cfg_options;
let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
let cfg = CfgExpr::parse(&cfg);
if cfg_options.check(&cfg) == Some(false) {
smallvec![]
} else {
cov_mark::hit!(cfg_attr_active);
attrs.collect()
}
})
// FIXME: use `Arc::from_iter` when it becomes available
.collect::<Vec<_>>(),
);
attrs.collect()
}
}));
RawAttrs { entries: Some(new_attrs) }
}
@ -185,21 +176,23 @@ pub struct Attr {
pub id: AttrId,
pub path: Interned<ModPath>,
pub input: Option<Interned<AttrInput>>,
pub ctxt: SyntaxContextId,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum AttrInput {
/// `#[attr = "string"]`
// FIXME: This is losing span
Literal(SmolStr),
/// `#[attr(subtree)]`
TokenTree(Box<(tt::Subtree, mbe::TokenMap)>),
TokenTree(Box<tt::Subtree>),
}
impl fmt::Display for AttrInput {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()),
AttrInput::TokenTree(tt) => tt.0.fmt(f),
AttrInput::TokenTree(tt) => tt.fmt(f),
}
}
}
@ -208,10 +201,10 @@ impl Attr {
fn from_src(
db: &dyn ExpandDatabase,
ast: ast::Meta,
hygiene: &Hygiene,
span_map: SpanMapRef<'_>,
id: AttrId,
) -> Option<Attr> {
let path = Interned::new(ModPath::from_src(db, ast.path()?, hygiene)?);
let path = Interned::new(ModPath::from_src(db, ast.path()?, span_map)?);
let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
let value = match lit.kind() {
ast::LiteralKind::String(string) => string.value()?.into(),
@ -219,24 +212,20 @@ impl Attr {
};
Some(Interned::new(AttrInput::Literal(value)))
} else if let Some(tt) = ast.token_tree() {
let (tree, map) = syntax_node_to_token_tree(tt.syntax());
Some(Interned::new(AttrInput::TokenTree(Box::new((tree, map)))))
let tree = syntax_node_to_token_tree(tt.syntax(), span_map);
Some(Interned::new(AttrInput::TokenTree(Box::new(tree))))
} else {
None
};
Some(Attr { id, path, input })
Some(Attr { id, path, input, ctxt: span_map.span_for_range(ast.syntax().text_range()).ctx })
}
fn from_tt(
db: &dyn ExpandDatabase,
tt: &tt::Subtree,
hygiene: &Hygiene,
id: AttrId,
) -> Option<Attr> {
let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> {
// FIXME: Unecessary roundtrip tt -> ast -> tt
let (parse, map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
let ast = ast::Meta::cast(parse.syntax_node())?;
Self::from_src(db, ast, hygiene, id)
Self::from_src(db, ast, SpanMapRef::ExpansionSpanMap(&map), id)
}
pub fn path(&self) -> &ModPath {
@ -256,7 +245,7 @@ impl Attr {
/// #[path(ident)]
pub fn single_ident_value(&self) -> Option<&tt::Ident> {
match self.input.as_deref()? {
AttrInput::TokenTree(tt) => match &*tt.0.token_trees {
AttrInput::TokenTree(tt) => match &*tt.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident),
_ => None,
},
@ -267,7 +256,7 @@ impl Attr {
/// #[path TokenTree]
pub fn token_tree_value(&self) -> Option<&Subtree> {
match self.input.as_deref()? {
AttrInput::TokenTree(tt) => Some(&tt.0),
AttrInput::TokenTree(tt) => Some(tt),
_ => None,
}
}
@ -276,8 +265,7 @@ impl Attr {
pub fn parse_path_comma_token_tree<'a>(
&'a self,
db: &'a dyn ExpandDatabase,
hygiene: &'a Hygiene,
) -> Option<impl Iterator<Item = ModPath> + 'a> {
) -> Option<impl Iterator<Item = (ModPath, SyntaxContextId)> + 'a> {
let args = self.token_tree_value()?;
if args.delimiter.kind != DelimiterKind::Parenthesis {
@ -290,12 +278,13 @@ impl Attr {
if tts.is_empty() {
return None;
}
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation here.
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation
// here or maybe just parse a mod path from a token tree directly
let subtree = tt::Subtree {
delimiter: tt::Delimiter::unspecified(),
token_trees: tts.into_iter().cloned().collect(),
delimiter: tt::Delimiter::dummy_invisible(),
token_trees: tts.to_vec(),
};
let (parse, _) =
let (parse, span_map) =
mbe::token_tree_to_syntax_node(&subtree, mbe::TopEntryPoint::MetaItem);
let meta = ast::Meta::cast(parse.syntax_node())?;
// Only simple paths are allowed.
@ -304,7 +293,11 @@ impl Attr {
return None;
}
let path = meta.path()?;
ModPath::from_src(db, path, hygiene)
let call_site = span_map.span_at(path.syntax().text_range().start()).ctx;
Some((
ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?,
call_site,
))
});
Some(paths)

View file

@ -1,16 +1,22 @@
//! Builtin attributes.
use base_db::{
span::{SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
FileId,
};
use syntax::{TextRange, TextSize};
use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind};
macro_rules! register_builtin {
( $(($name:ident, $variant:ident) => $expand:ident),* ) => {
($expand_fn:ident: $(($name:ident, $variant:ident) => $expand:ident),* ) => {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum BuiltinAttrExpander {
$($variant),*
}
impl BuiltinAttrExpander {
pub fn expand(
pub fn $expand_fn(
&self,
db: &dyn ExpandDatabase,
id: MacroCallId,
@ -45,7 +51,7 @@ impl BuiltinAttrExpander {
}
}
register_builtin! {
register_builtin! { expand:
(bench, Bench) => dummy_attr_expand,
(cfg_accessible, CfgAccessible) => dummy_attr_expand,
(cfg_eval, CfgEval) => dummy_attr_expand,
@ -77,9 +83,8 @@ fn dummy_attr_expand(
///
/// As such, we expand `#[derive(Foo, bar::Bar)]` into
/// ```
/// #[Foo]
/// #[bar::Bar]
/// ();
/// #![Foo]
/// #![bar::Bar]
/// ```
/// which allows fallback path resolution in hir::Semantics to properly identify our derives.
/// Since we do not expand the attribute in nameres though, we keep the original item.
@ -98,21 +103,31 @@ fn derive_attr_expand(
) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(id);
let derives = match &loc.kind {
MacroCallKind::Attr { attr_args, .. } if loc.def.is_attribute_derive() => &attr_args.0,
_ => return ExpandResult::ok(tt::Subtree::empty()),
MacroCallKind::Attr { attr_args: Some(attr_args), .. } if loc.def.is_attribute_derive() => {
attr_args
}
_ => return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan::DUMMY)),
};
pseudo_derive_attr_expansion(tt, derives)
pseudo_derive_attr_expansion(tt, derives, loc.call_site)
}
pub fn pseudo_derive_attr_expansion(
tt: &tt::Subtree,
args: &tt::Subtree,
call_site: SyntaxContextId,
) -> ExpandResult<tt::Subtree> {
let mk_leaf = |char| {
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char,
spacing: tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
span: tt::SpanData {
range: TextRange::empty(TextSize::new(0)),
anchor: base_db::span::SpanAnchor {
file_id: FileId::BOGUS,
ast_id: ROOT_ERASED_FILE_AST_ID,
},
ctx: call_site,
},
}))
};
@ -122,12 +137,10 @@ pub fn pseudo_derive_attr_expansion(
.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))))
{
token_trees.push(mk_leaf('#'));
token_trees.push(mk_leaf('!'));
token_trees.push(mk_leaf('['));
token_trees.extend(tt.iter().cloned());
token_trees.push(mk_leaf(']'));
}
token_trees.push(mk_leaf('('));
token_trees.push(mk_leaf(')'));
token_trees.push(mk_leaf(';'));
ExpandResult::ok(tt::Subtree { delimiter: tt.delimiter, token_trees })
}

View file

@ -1,16 +1,16 @@
//! Builtin derives.
use ::tt::Ident;
use base_db::{CrateOrigin, LangCrateOrigin};
use base_db::{span::SpanData, CrateOrigin, LangCrateOrigin};
use itertools::izip;
use mbe::TokenMap;
use rustc_hash::FxHashSet;
use stdx::never;
use tracing::debug;
use crate::{
hygiene::span_with_def_site_ctxt,
name::{AsName, Name},
tt::{self, TokenId},
span::SpanMapRef,
tt,
};
use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds};
@ -29,12 +29,15 @@ macro_rules! register_builtin {
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &ast::Adt,
token_map: &TokenMap,
token_map: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let expander = match *self {
$( BuiltinDeriveExpander::$trait => $expand, )*
};
expander(db, id, tt, token_map)
let span = db.lookup_intern_macro_call(id).span(db);
let span = span_with_def_site_ctxt(db, span, id);
expander(db, id, span, tt, token_map)
}
fn find_by_name(name: &name::Name) -> Option<Self> {
@ -70,19 +73,19 @@ enum VariantShape {
Unit,
}
fn tuple_field_iterator(n: usize) -> impl Iterator<Item = tt::Ident> {
(0..n).map(|it| Ident::new(format!("f{it}"), tt::TokenId::unspecified()))
fn tuple_field_iterator(span: SpanData, n: usize) -> impl Iterator<Item = tt::Ident> {
(0..n).map(move |it| tt::Ident::new(format!("f{it}"), span))
}
impl VariantShape {
fn as_pattern(&self, path: tt::Subtree) -> tt::Subtree {
self.as_pattern_map(path, |it| quote!(#it))
fn as_pattern(&self, path: tt::Subtree, span: SpanData) -> tt::Subtree {
self.as_pattern_map(path, span, |it| quote!(span => #it))
}
fn field_names(&self) -> Vec<tt::Ident> {
fn field_names(&self, span: SpanData) -> Vec<tt::Ident> {
match self {
VariantShape::Struct(s) => s.clone(),
VariantShape::Tuple(n) => tuple_field_iterator(*n).collect(),
VariantShape::Tuple(n) => tuple_field_iterator(span, *n).collect(),
VariantShape::Unit => vec![],
}
}
@ -90,26 +93,27 @@ impl VariantShape {
fn as_pattern_map(
&self,
path: tt::Subtree,
span: SpanData,
field_map: impl Fn(&tt::Ident) -> tt::Subtree,
) -> tt::Subtree {
match self {
VariantShape::Struct(fields) => {
let fields = fields.iter().map(|it| {
let mapped = field_map(it);
quote! { #it : #mapped , }
quote! {span => #it : #mapped , }
});
quote! {
quote! {span =>
#path { ##fields }
}
}
&VariantShape::Tuple(n) => {
let fields = tuple_field_iterator(n).map(|it| {
let fields = tuple_field_iterator(span, n).map(|it| {
let mapped = field_map(&it);
quote! {
quote! {span =>
#mapped ,
}
});
quote! {
quote! {span =>
#path ( ##fields )
}
}
@ -117,7 +121,7 @@ impl VariantShape {
}
}
fn from(tm: &TokenMap, value: Option<FieldList>) -> Result<Self, ExpandError> {
fn from(tm: SpanMapRef<'_>, value: Option<FieldList>) -> Result<Self, ExpandError> {
let r = match value {
None => VariantShape::Unit,
Some(FieldList::RecordFieldList(it)) => VariantShape::Struct(
@ -139,17 +143,17 @@ enum AdtShape {
}
impl AdtShape {
fn as_pattern(&self, name: &tt::Ident) -> Vec<tt::Subtree> {
self.as_pattern_map(name, |it| quote!(#it))
fn as_pattern(&self, span: SpanData, name: &tt::Ident) -> Vec<tt::Subtree> {
self.as_pattern_map(name, |it| quote!(span =>#it), span)
}
fn field_names(&self) -> Vec<Vec<tt::Ident>> {
fn field_names(&self, span: SpanData) -> Vec<Vec<tt::Ident>> {
match self {
AdtShape::Struct(s) => {
vec![s.field_names()]
vec![s.field_names(span)]
}
AdtShape::Enum { variants, .. } => {
variants.iter().map(|(_, fields)| fields.field_names()).collect()
variants.iter().map(|(_, fields)| fields.field_names(span)).collect()
}
AdtShape::Union => {
never!("using fields of union in derive is always wrong");
@ -162,18 +166,21 @@ impl AdtShape {
&self,
name: &tt::Ident,
field_map: impl Fn(&tt::Ident) -> tt::Subtree,
span: SpanData,
) -> Vec<tt::Subtree> {
match self {
AdtShape::Struct(s) => {
vec![s.as_pattern_map(quote! { #name }, field_map)]
vec![s.as_pattern_map(quote! {span => #name }, span, field_map)]
}
AdtShape::Enum { variants, .. } => variants
.iter()
.map(|(v, fields)| fields.as_pattern_map(quote! { #name :: #v }, &field_map))
.map(|(v, fields)| {
fields.as_pattern_map(quote! {span => #name :: #v }, span, &field_map)
})
.collect(),
AdtShape::Union => {
never!("pattern matching on union is always wrong");
vec![quote! { un }]
vec![quote! {span => un }]
}
}
}
@ -189,8 +196,12 @@ struct BasicAdtInfo {
associated_types: Vec<tt::Subtree>,
}
fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError> {
let (name, generic_param_list, shape) = match &adt {
fn parse_adt(
tm: SpanMapRef<'_>,
adt: &ast::Adt,
call_site: SpanData,
) -> Result<BasicAdtInfo, ExpandError> {
let (name, generic_param_list, shape) = match adt {
ast::Adt::Struct(it) => (
it.name(),
it.generic_param_list(),
@ -234,22 +245,26 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError>
match this {
Some(it) => {
param_type_set.insert(it.as_name());
mbe::syntax_node_to_token_tree(it.syntax()).0
mbe::syntax_node_to_token_tree(it.syntax(), tm)
}
None => {
tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
}
None => tt::Subtree::empty(),
}
};
let bounds = match &param {
ast::TypeOrConstParam::Type(it) => {
it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm))
}
ast::TypeOrConstParam::Const(_) => None,
};
let ty = if let ast::TypeOrConstParam::Const(param) = param {
let ty = param
.ty()
.map(|ty| mbe::syntax_node_to_token_tree(ty.syntax()).0)
.unwrap_or_else(tt::Subtree::empty);
.map(|ty| mbe::syntax_node_to_token_tree(ty.syntax(), tm))
.unwrap_or_else(|| {
tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
});
Some(ty)
} else {
None
@ -282,20 +297,22 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError>
let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
param_type_set.contains(&name).then_some(p)
})
.map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
.map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm))
.collect();
let name_token = name_to_token(&tm, name)?;
let name_token = name_to_token(tm, name)?;
Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })
}
fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Ident, ExpandError> {
fn name_to_token(
token_map: SpanMapRef<'_>,
name: Option<ast::Name>,
) -> Result<tt::Ident, ExpandError> {
let name = name.ok_or_else(|| {
debug!("parsed item has no name");
ExpandError::other("missing name")
})?;
let name_token_id =
token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified);
let name_token = tt::Ident { span: name_token_id, text: name.text().into() };
let span = token_map.span_for_range(name.syntax().text_range());
let name_token = tt::Ident { span, text: name.text().into() };
Ok(name_token)
}
@ -331,14 +348,21 @@ fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Id
/// where B1, ..., BN are the bounds given by `bounds_paths`. Z is a phantom type, and
/// therefore does not get bound by the derived trait.
fn expand_simple_derive(
// FIXME: use
invoc_span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
trait_path: tt::Subtree,
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree,
) -> ExpandResult<tt::Subtree> {
let info = match parse_adt(tm, tt) {
let info = match parse_adt(tm, tt, invoc_span) {
Ok(info) => info,
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
Err(e) => {
return ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan { open: invoc_span, close: invoc_span }),
e,
)
}
};
let trait_body = make_trait_body(&info);
let mut where_block = vec![];
@ -349,13 +373,13 @@ fn expand_simple_derive(
let ident_ = ident.clone();
if let Some(b) = bound {
let ident = ident.clone();
where_block.push(quote! { #ident : #b , });
where_block.push(quote! {invoc_span => #ident : #b , });
}
if let Some(ty) = param_ty {
(quote! { const #ident : #ty , }, quote! { #ident_ , })
(quote! {invoc_span => const #ident : #ty , }, quote! {invoc_span => #ident_ , })
} else {
let bound = trait_path.clone();
(quote! { #ident : #bound , }, quote! { #ident_ , })
(quote! {invoc_span => #ident : #bound , }, quote! {invoc_span => #ident_ , })
}
})
.unzip();
@ -363,17 +387,17 @@ fn expand_simple_derive(
where_block.extend(info.associated_types.iter().map(|it| {
let it = it.clone();
let bound = trait_path.clone();
quote! { #it : #bound , }
quote! {invoc_span => #it : #bound , }
}));
let name = info.name;
let expanded = quote! {
let expanded = quote! {invoc_span =>
impl < ##params > #trait_path for #name < ##args > where ##where_block { #trait_body }
};
ExpandResult::ok(expanded)
}
fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree {
fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId, span: SpanData) -> tt::TokenTree {
// FIXME: make hygiene works for builtin derive macro
// such that $crate can be used here.
let cg = db.crate_graph();
@ -381,9 +405,9 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree
let tt = if matches!(cg[krate].origin, CrateOrigin::Lang(LangCrateOrigin::Core)) {
cov_mark::hit!(test_copy_expand_in_core);
quote! { crate }
quote! {span => crate }
} else {
quote! { core }
quote! {span => core }
};
tt.token_trees[0].clone()
@ -392,56 +416,50 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree
fn copy_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::marker::Copy }, |_| quote! {})
let krate = find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::marker::Copy }, |_| quote! {span =>})
}
fn clone_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::clone::Clone }, |adt| {
let krate = find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::clone::Clone }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
let star = tt::Punct {
char: '*',
spacing: ::tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
};
return quote! {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
fn clone(&self) -> Self {
#star self
}
};
}
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
let star = tt::Punct {
char: '*',
spacing: ::tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
};
return quote! {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
fn clone(&self) -> Self {
match #star self {}
}
};
}
let name = &adt.name;
let patterns = adt.shape.as_pattern(name);
let exprs = adt.shape.as_pattern_map(name, |it| quote! { #it .clone() });
let patterns = adt.shape.as_pattern(span, name);
let exprs = adt.shape.as_pattern_map(name, |it| quote! {span => #it .clone() }, span);
let arms = patterns.into_iter().zip(exprs.into_iter()).map(|(pat, expr)| {
let fat_arrow = fat_arrow();
quote! {
let fat_arrow = fat_arrow(span);
quote! {span =>
#pat #fat_arrow #expr,
}
});
quote! {
quote! {span =>
fn clone(&self) -> Self {
match self {
##arms
@ -451,53 +469,56 @@ fn clone_expand(
})
}
/// This function exists since `quote! { => }` doesn't work.
fn fat_arrow() -> ::tt::Subtree<TokenId> {
let eq =
tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() };
quote! { #eq> }
/// This function exists since `quote! {span => => }` doesn't work.
fn fat_arrow(span: SpanData) -> tt::Subtree {
let eq = tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span };
quote! {span => #eq> }
}
/// This function exists since `quote! { && }` doesn't work.
fn and_and() -> ::tt::Subtree<TokenId> {
let and =
tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() };
quote! { #and& }
/// This function exists since `quote! {span => && }` doesn't work.
fn and_and(span: SpanData) -> tt::Subtree {
let and = tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span };
quote! {span => #and& }
}
fn default_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::default::Default }, |adt| {
let krate = &find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::default::Default }, |adt| {
let body = match &adt.shape {
AdtShape::Struct(fields) => {
let name = &adt.name;
fields
.as_pattern_map(quote!(#name), |_| quote!(#krate::default::Default::default()))
fields.as_pattern_map(
quote!(span =>#name),
span,
|_| quote!(span =>#krate::default::Default::default()),
)
}
AdtShape::Enum { default_variant, variants } => {
if let Some(d) = default_variant {
let (name, fields) = &variants[*d];
let adt_name = &adt.name;
fields.as_pattern_map(
quote!(#adt_name :: #name),
|_| quote!(#krate::default::Default::default()),
quote!(span =>#adt_name :: #name),
span,
|_| quote!(span =>#krate::default::Default::default()),
)
} else {
// FIXME: Return expand error here
quote!()
quote!(span =>)
}
}
AdtShape::Union => {
// FIXME: Return expand error here
quote!()
quote!(span =>)
}
};
quote! {
quote! {span =>
fn default() -> Self {
#body
}
@ -508,44 +529,41 @@ fn default_expand(
fn debug_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::fmt::Debug }, |adt| {
let krate = &find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::fmt::Debug }, |adt| {
let for_variant = |name: String, v: &VariantShape| match v {
VariantShape::Struct(fields) => {
let for_fields = fields.iter().map(|it| {
let x_string = it.to_string();
quote! {
quote! {span =>
.field(#x_string, & #it)
}
});
quote! {
quote! {span =>
f.debug_struct(#name) ##for_fields .finish()
}
}
VariantShape::Tuple(n) => {
let for_fields = tuple_field_iterator(*n).map(|it| {
quote! {
let for_fields = tuple_field_iterator(span, *n).map(|it| {
quote! {span =>
.field( & #it)
}
});
quote! {
quote! {span =>
f.debug_tuple(#name) ##for_fields .finish()
}
}
VariantShape::Unit => quote! {
VariantShape::Unit => quote! {span =>
f.write_str(#name)
},
};
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
let star = tt::Punct {
char: '*',
spacing: ::tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
};
return quote! {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
match #star self {}
}
@ -553,20 +571,20 @@ fn debug_expand(
}
let arms = match &adt.shape {
AdtShape::Struct(fields) => {
let fat_arrow = fat_arrow();
let fat_arrow = fat_arrow(span);
let name = &adt.name;
let pat = fields.as_pattern(quote!(#name));
let pat = fields.as_pattern(quote!(span =>#name), span);
let expr = for_variant(name.to_string(), fields);
vec![quote! { #pat #fat_arrow #expr }]
vec![quote! {span => #pat #fat_arrow #expr }]
}
AdtShape::Enum { variants, .. } => variants
.iter()
.map(|(name, v)| {
let fat_arrow = fat_arrow();
let fat_arrow = fat_arrow(span);
let adt_name = &adt.name;
let pat = v.as_pattern(quote!(#adt_name :: #name));
let pat = v.as_pattern(quote!(span =>#adt_name :: #name), span);
let expr = for_variant(name.to_string(), v);
quote! {
quote! {span =>
#pat #fat_arrow #expr ,
}
})
@ -576,7 +594,7 @@ fn debug_expand(
vec![]
}
};
quote! {
quote! {span =>
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
match self {
##arms
@ -589,47 +607,46 @@ fn debug_expand(
fn hash_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::hash::Hash }, |adt| {
let krate = &find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::hash::Hash }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
return quote! {};
return quote! {span =>};
}
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
let star = tt::Punct {
char: '*',
spacing: ::tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
};
return quote! {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
match #star self {}
}
};
}
let arms = adt.shape.as_pattern(&adt.name).into_iter().zip(adt.shape.field_names()).map(
|(pat, names)| {
let expr = {
let it = names.iter().map(|it| quote! { #it . hash(ra_expand_state); });
quote! { {
##it
} }
};
let fat_arrow = fat_arrow();
quote! {
#pat #fat_arrow #expr ,
}
},
);
let arms =
adt.shape.as_pattern(span, &adt.name).into_iter().zip(adt.shape.field_names(span)).map(
|(pat, names)| {
let expr = {
let it =
names.iter().map(|it| quote! {span => #it . hash(ra_expand_state); });
quote! {span => {
##it
} }
};
let fat_arrow = fat_arrow(span);
quote! {span =>
#pat #fat_arrow #expr ,
}
},
);
let check_discriminant = if matches!(&adt.shape, AdtShape::Enum { .. }) {
quote! { #krate::mem::discriminant(self).hash(ra_expand_state); }
quote! {span => #krate::mem::discriminant(self).hash(ra_expand_state); }
} else {
quote! {}
quote! {span =>}
};
quote! {
quote! {span =>
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
#check_discriminant
match self {
@ -643,56 +660,58 @@ fn hash_expand(
fn eq_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::cmp::Eq }, |_| quote! {})
let krate = find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>})
}
fn partial_eq_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialEq }, |adt| {
let krate = find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::PartialEq }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
return quote! {};
return quote! {span =>};
}
let name = &adt.name;
let (self_patterns, other_patterns) = self_and_other_patterns(adt, name);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
let (self_patterns, other_patterns) = self_and_other_patterns(adt, name, span);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, names)| {
let fat_arrow = fat_arrow();
let fat_arrow = fat_arrow(span);
let body = match &*names {
[] => {
quote!(true)
quote!(span =>true)
}
[first, rest @ ..] => {
let rest = rest.iter().map(|it| {
let t1 = Ident::new(format!("{}_self", it.text), it.span);
let t2 = Ident::new(format!("{}_other", it.text), it.span);
let and_and = and_and();
quote!(#and_and #t1 .eq( #t2 ))
let t1 = tt::Ident::new(format!("{}_self", it.text), it.span);
let t2 = tt::Ident::new(format!("{}_other", it.text), it.span);
let and_and = and_and(span);
quote!(span =>#and_and #t1 .eq( #t2 ))
});
let first = {
let t1 = Ident::new(format!("{}_self", first.text), first.span);
let t2 = Ident::new(format!("{}_other", first.text), first.span);
quote!(#t1 .eq( #t2 ))
let t1 = tt::Ident::new(format!("{}_self", first.text), first.span);
let t2 = tt::Ident::new(format!("{}_other", first.text), first.span);
quote!(span =>#t1 .eq( #t2 ))
};
quote!(#first ##rest)
quote!(span =>#first ##rest)
}
};
quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
},
);
let fat_arrow = fat_arrow();
quote! {
let fat_arrow = fat_arrow(span);
quote! {span =>
fn eq(&self, other: &Self) -> bool {
match (self, other) {
##arms
@ -706,35 +725,46 @@ fn partial_eq_expand(
fn self_and_other_patterns(
adt: &BasicAdtInfo,
name: &tt::Ident,
span: SpanData,
) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) {
let self_patterns = adt.shape.as_pattern_map(name, |it| {
let t = Ident::new(format!("{}_self", it.text), it.span);
quote!(#t)
});
let other_patterns = adt.shape.as_pattern_map(name, |it| {
let t = Ident::new(format!("{}_other", it.text), it.span);
quote!(#t)
});
let self_patterns = adt.shape.as_pattern_map(
name,
|it| {
let t = tt::Ident::new(format!("{}_self", it.text), it.span);
quote!(span =>#t)
},
span,
);
let other_patterns = adt.shape.as_pattern_map(
name,
|it| {
let t = tt::Ident::new(format!("{}_other", it.text), it.span);
quote!(span =>#t)
},
span,
);
(self_patterns, other_patterns)
}
fn ord_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::cmp::Ord }, |adt| {
let krate = &find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::Ord }, |adt| {
fn compare(
krate: &tt::TokenTree,
left: tt::Subtree,
right: tt::Subtree,
rest: tt::Subtree,
span: SpanData,
) -> tt::Subtree {
let fat_arrow1 = fat_arrow();
let fat_arrow2 = fat_arrow();
quote! {
let fat_arrow1 = fat_arrow(span);
let fat_arrow2 = fat_arrow(span);
quote! {span =>
match #left.cmp(&#right) {
#krate::cmp::Ordering::Equal #fat_arrow1 {
#rest
@ -745,34 +775,34 @@ fn ord_expand(
}
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
return quote!();
return quote!(span =>);
}
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, fields)| {
let mut body = quote!(#krate::cmp::Ordering::Equal);
let mut body = quote!(span =>#krate::cmp::Ordering::Equal);
for f in fields.into_iter().rev() {
let t1 = Ident::new(format!("{}_self", f.text), f.span);
let t2 = Ident::new(format!("{}_other", f.text), f.span);
body = compare(krate, quote!(#t1), quote!(#t2), body);
let t1 = tt::Ident::new(format!("{}_self", f.text), f.span);
let t2 = tt::Ident::new(format!("{}_other", f.text), f.span);
body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span);
}
let fat_arrow = fat_arrow();
quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
let fat_arrow = fat_arrow(span);
quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
},
);
let fat_arrow = fat_arrow();
let mut body = quote! {
let fat_arrow = fat_arrow(span);
let mut body = quote! {span =>
match (self, other) {
##arms
_unused #fat_arrow #krate::cmp::Ordering::Equal
}
};
if matches!(&adt.shape, AdtShape::Enum { .. }) {
let left = quote!(#krate::intrinsics::discriminant_value(self));
let right = quote!(#krate::intrinsics::discriminant_value(other));
body = compare(krate, left, right, body);
let left = quote!(span =>#krate::intrinsics::discriminant_value(self));
let right = quote!(span =>#krate::intrinsics::discriminant_value(other));
body = compare(krate, left, right, body, span);
}
quote! {
quote! {span =>
fn cmp(&self, other: &Self) -> #krate::cmp::Ordering {
#body
}
@ -783,20 +813,22 @@ fn ord_expand(
fn partial_ord_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialOrd }, |adt| {
let krate = &find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::PartialOrd }, |adt| {
fn compare(
krate: &tt::TokenTree,
left: tt::Subtree,
right: tt::Subtree,
rest: tt::Subtree,
span: SpanData,
) -> tt::Subtree {
let fat_arrow1 = fat_arrow();
let fat_arrow2 = fat_arrow();
quote! {
let fat_arrow1 = fat_arrow(span);
let fat_arrow2 = fat_arrow(span);
quote! {span =>
match #left.partial_cmp(&#right) {
#krate::option::Option::Some(#krate::cmp::Ordering::Equal) #fat_arrow1 {
#rest
@ -807,37 +839,39 @@ fn partial_ord_expand(
}
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
return quote!();
return quote!(span =>);
}
let left = quote!(#krate::intrinsics::discriminant_value(self));
let right = quote!(#krate::intrinsics::discriminant_value(other));
let left = quote!(span =>#krate::intrinsics::discriminant_value(self));
let right = quote!(span =>#krate::intrinsics::discriminant_value(other));
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, fields)| {
let mut body = quote!(#krate::option::Option::Some(#krate::cmp::Ordering::Equal));
let mut body =
quote!(span =>#krate::option::Option::Some(#krate::cmp::Ordering::Equal));
for f in fields.into_iter().rev() {
let t1 = Ident::new(format!("{}_self", f.text), f.span);
let t2 = Ident::new(format!("{}_other", f.text), f.span);
body = compare(krate, quote!(#t1), quote!(#t2), body);
let t1 = tt::Ident::new(format!("{}_self", f.text), f.span);
let t2 = tt::Ident::new(format!("{}_other", f.text), f.span);
body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span);
}
let fat_arrow = fat_arrow();
quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
let fat_arrow = fat_arrow(span);
quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
},
);
let fat_arrow = fat_arrow();
let fat_arrow = fat_arrow(span);
let body = compare(
krate,
left,
right,
quote! {
quote! {span =>
match (self, other) {
##arms
_unused #fat_arrow #krate::option::Option::Some(#krate::cmp::Ordering::Equal)
}
},
span,
);
quote! {
quote! {span =>
fn partial_cmp(&self, other: &Self) -> #krate::option::Option::Option<#krate::cmp::Ordering> {
#body
}

View file

@ -1,17 +1,24 @@
//! Builtin macro
use base_db::{AnchoredPath, Edition, FileId};
use base_db::{
span::{SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
AnchoredPath, Edition, FileId,
};
use cfg::CfgExpr;
use either::Either;
use mbe::{parse_exprs_with_sep, parse_to_token_tree, TokenMap};
use itertools::Itertools;
use mbe::{parse_exprs_with_sep, parse_to_token_tree};
use syntax::{
ast::{self, AstToken},
SmolStr,
};
use crate::{
db::ExpandDatabase, name, quote, tt, EagerCallInfo, ExpandError, ExpandResult, MacroCallId,
MacroCallLoc,
db::ExpandDatabase,
hygiene::span_with_def_site_ctxt,
name, quote,
tt::{self, DelimSpan},
ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc,
};
macro_rules! register_builtin {
@ -36,7 +43,10 @@ macro_rules! register_builtin {
let expander = match *self {
$( BuiltinFnLikeExpander::$kind => $expand, )*
};
expander(db, id, tt)
let span = db.lookup_intern_macro_call(id).span(db);
let span = span_with_def_site_ctxt(db, span, id);
expander(db, id, tt, span)
}
}
@ -44,13 +54,16 @@ macro_rules! register_builtin {
pub fn expand(
&self,
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
let expander = match *self {
$( EagerExpander::$e_kind => $e_expand, )*
};
expander(db, arg_id, tt)
let span = db.lookup_intern_macro_call(id).span(db);
let span = span_with_def_site_ctxt(db, span, id);
expander(db, id, tt, span)
}
}
@ -109,29 +122,44 @@ register_builtin! {
(option_env, OptionEnv) => option_env_expand
}
const DOLLAR_CRATE: tt::Ident =
tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::TokenId::unspecified() };
fn mk_pound(span: SpanData) -> tt::Subtree {
crate::quote::IntoTt::to_subtree(
vec![crate::tt::Leaf::Punct(crate::tt::Punct {
char: '#',
spacing: crate::tt::Spacing::Alone,
span: span,
})
.into()],
span,
)
}
fn module_path_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
// Just return a dummy result.
ExpandResult::ok(quote! { "module::path" })
ExpandResult::ok(quote! {span =>
"module::path"
})
}
fn line_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
// dummy implementation for type-checking purposes
// Note that `line!` and `column!` will never be implemented properly, as they are by definition
// not incremental
ExpandResult::ok(tt::Subtree {
delimiter: tt::Delimiter::unspecified(),
delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text: "0u32".into(),
span: tt::Span::UNSPECIFIED,
span,
}))],
})
}
@ -140,26 +168,29 @@ fn log_syntax_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
ExpandResult::ok(quote! {})
ExpandResult::ok(quote! {span =>})
}
fn trace_macros_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
ExpandResult::ok(quote! {})
ExpandResult::ok(quote! {span =>})
}
fn stringify_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let pretty = ::tt::pretty(&tt.token_trees);
let expanded = quote! {
let expanded = quote! {span =>
#pretty
};
@ -170,27 +201,29 @@ fn assert_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let args = parse_exprs_with_sep(tt, ',');
let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
let expanded = match &*args {
[cond, panic_args @ ..] => {
let comma = tt::Subtree {
delimiter: tt::Delimiter::unspecified(),
delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char: ',',
spacing: tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
span,
}))],
};
let cond = cond.clone();
let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma);
quote! {{
quote! {span =>{
if !(#cond) {
#DOLLAR_CRATE::panic!(##panic_args);
#dollar_crate::panic!(##panic_args);
}
}}
}
[] => quote! {{}},
[] => quote! {span =>{}},
};
ExpandResult::ok(expanded)
@ -200,12 +233,13 @@ fn file_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
// FIXME: RA purposefully lacks knowledge of absolute file names
// so just return "".
let file_name = "";
let expanded = quote! {
let expanded = quote! {span =>
#file_name
};
@ -216,16 +250,18 @@ fn format_args_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
format_args_expand_general(db, id, tt, "")
format_args_expand_general(db, id, tt, "", span)
}
fn format_args_nl_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
format_args_expand_general(db, id, tt, "\\n")
format_args_expand_general(db, id, tt, "\\n", span)
}
fn format_args_expand_general(
@ -234,11 +270,12 @@ fn format_args_expand_general(
tt: &tt::Subtree,
// FIXME: Make use of this so that mir interpretation works properly
_end_string: &str,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let pound = quote! {@PUNCT '#'};
let pound = mk_pound(span);
let mut tt = tt.clone();
tt.delimiter.kind = tt::DelimiterKind::Parenthesis;
return ExpandResult::ok(quote! {
return ExpandResult::ok(quote! {span =>
builtin #pound format_args #tt
});
}
@ -247,25 +284,25 @@ fn asm_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
// We expand all assembly snippets to `format_args!` invocations to get format syntax
// highlighting for them.
let mut literals = Vec::new();
for tt in tt.token_trees.chunks(2) {
match tt {
[tt::TokenTree::Leaf(tt::Leaf::Literal(lit))]
| [tt::TokenTree::Leaf(tt::Leaf::Literal(lit)), tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', span: _, spacing: _ }))] =>
{
let krate = DOLLAR_CRATE.clone();
literals.push(quote!(#krate::format_args!(#lit);));
let dollar_krate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
literals.push(quote!(span=>#dollar_krate::format_args!(#lit);));
}
_ => break,
}
}
let pound = quote! {@PUNCT '#'};
let expanded = quote! {
let pound = mk_pound(span);
let expanded = quote! {span =>
builtin #pound asm (
{##literals}
)
@ -277,20 +314,22 @@ fn global_asm_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
// Expand to nothing (at item-level)
ExpandResult::ok(quote! {})
ExpandResult::ok(quote! {span =>})
}
fn cfg_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(id);
let expr = CfgExpr::parse(tt);
let enabled = db.crate_graph()[loc.krate].cfg_options.check(&expr) != Some(false);
let expanded = if enabled { quote!(true) } else { quote!(false) };
let expanded = if enabled { quote!(span=>true) } else { quote!(span=>false) };
ExpandResult::ok(expanded)
}
@ -298,13 +337,15 @@ fn panic_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
// Expand to a macro call `$crate::panic::panic_{edition}`
let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 {
quote!(#DOLLAR_CRATE::panic::panic_2021!)
quote!(span =>#dollar_crate::panic::panic_2021!)
} else {
quote!(#DOLLAR_CRATE::panic::panic_2015!)
quote!(span =>#dollar_crate::panic::panic_2015!)
};
// Pass the original arguments
@ -316,13 +357,15 @@ fn unreachable_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
// Expand to a macro call `$crate::panic::unreachable_{edition}`
let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 {
quote!(#DOLLAR_CRATE::panic::unreachable_2021!)
quote!(span =>#dollar_crate::panic::unreachable_2021!)
} else {
quote!(#DOLLAR_CRATE::panic::unreachable_2015!)
quote!(span =>#dollar_crate::panic::unreachable_2015!)
};
// Pass the original arguments
@ -352,6 +395,7 @@ fn compile_error_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let err = match &*tt.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) {
@ -361,13 +405,14 @@ fn compile_error_expand(
_ => ExpandError::other("`compile_error!` argument must be a string"),
};
ExpandResult { value: quote! {}, err: Some(err) }
ExpandResult { value: quote! {span =>}, err: Some(err) }
}
fn concat_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let mut err = None;
let mut text = String::new();
@ -407,13 +452,14 @@ fn concat_expand(
}
}
}
ExpandResult { value: quote!(#text), err }
ExpandResult { value: quote!(span =>#text), err }
}
fn concat_bytes_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let mut bytes = Vec::new();
let mut err = None;
@ -446,8 +492,25 @@ fn concat_bytes_expand(
}
}
}
let ident = tt::Ident { text: bytes.join(", ").into(), span: tt::TokenId::unspecified() };
ExpandResult { value: quote!([#ident]), err }
let value = tt::Subtree {
delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket },
token_trees: {
Itertools::intersperse_with(
bytes.into_iter().map(|it| {
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: it.into(), span }))
}),
|| {
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char: ',',
spacing: tt::Spacing::Alone,
span,
}))
},
)
.collect()
},
};
ExpandResult { value, err }
}
fn concat_bytes_expand_subtree(
@ -480,6 +543,7 @@ fn concat_idents_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let mut err = None;
let mut ident = String::new();
@ -494,8 +558,9 @@ fn concat_idents_expand(
}
}
}
let ident = tt::Ident { text: ident.into(), span: tt::TokenId::unspecified() };
ExpandResult { value: quote!(#ident), err }
// FIXME merge spans
let ident = tt::Ident { text: ident.into(), span };
ExpandResult { value: quote!(span =>#ident), err }
}
fn relative_file(
@ -530,45 +595,48 @@ fn parse_string(tt: &tt::Subtree) -> Result<String, ExpandError> {
fn include_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
_tt: &tt::Subtree,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
match db.include_expand(arg_id) {
Ok((res, _)) => ExpandResult::ok(res.0.clone()),
Err(e) => ExpandResult::new(tt::Subtree::empty(), e),
let file_id = match include_input_to_file_id(db, arg_id, tt) {
Ok(it) => it,
Err(e) => {
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
}
};
match parse_to_token_tree(
SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
SyntaxContextId::ROOT,
&db.file_text(file_id),
) {
Some(it) => ExpandResult::ok(it),
None => ExpandResult::new(
tt::Subtree::empty(DelimSpan { open: span, close: span }),
ExpandError::other("failed to parse included file"),
),
}
}
pub(crate) fn include_arg_to_tt(
pub fn include_input_to_file_id(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> {
let loc = db.lookup_intern_macro_call(arg_id);
let Some(EagerCallInfo { arg, arg_id, .. }) = loc.eager.as_deref() else {
panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager);
};
let path = parse_string(&arg.0)?;
let file_id = relative_file(db, *arg_id, &path, false)?;
let (subtree, map) =
parse_to_token_tree(&db.file_text(file_id)).ok_or(mbe::ExpandError::ConversionError)?;
Ok((triomphe::Arc::new((subtree, map)), file_id))
arg: &tt::Subtree,
) -> Result<FileId, ExpandError> {
relative_file(db, arg_id, &parse_string(arg)?, false)
}
fn include_bytes_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
_tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
if let Err(e) = parse_string(tt) {
return ExpandResult::new(tt::Subtree::empty(), e);
}
// FIXME: actually read the file here if the user asked for macro expansion
let res = tt::Subtree {
delimiter: tt::Delimiter::unspecified(),
delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text: r#"b"""#.into(),
span: tt::TokenId::unspecified(),
span,
}))],
};
ExpandResult::ok(res)
@ -578,10 +646,13 @@ fn include_str_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let path = match parse_string(tt) {
Ok(it) => it,
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
Err(e) => {
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
}
};
// FIXME: we're not able to read excluded files (which is most of them because
@ -591,14 +662,14 @@ fn include_str_expand(
let file_id = match relative_file(db, arg_id, &path, true) {
Ok(file_id) => file_id,
Err(_) => {
return ExpandResult::ok(quote!(""));
return ExpandResult::ok(quote!(span =>""));
}
};
let text = db.file_text(file_id);
let text = &*text;
ExpandResult::ok(quote!(#text))
ExpandResult::ok(quote!(span =>#text))
}
fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &str) -> Option<String> {
@ -610,10 +681,13 @@ fn env_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let key = match parse_string(tt) {
Ok(it) => it,
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
Err(e) => {
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
}
};
let mut err = None;
@ -630,7 +704,7 @@ fn env_expand(
// `include!("foo.rs"), which might go to infinite loop
"UNRESOLVED_ENV_VAR".to_string()
});
let expanded = quote! { #s };
let expanded = quote! {span => #s };
ExpandResult { value: expanded, err }
}
@ -639,15 +713,18 @@ fn option_env_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let key = match parse_string(tt) {
Ok(it) => it,
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
Err(e) => {
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
}
};
// FIXME: Use `DOLLAR_CRATE` when that works in eager macros.
let expanded = match get_env_inner(db, arg_id, &key) {
None => quote! { ::core::option::Option::None::<&str> },
Some(s) => quote! { ::core::option::Option::Some(#s) },
None => quote! {span => ::core::option::Option::None::<&str> },
Some(s) => quote! {span => ::core::option::Option::Some(#s) },
};
ExpandResult::ok(expanded)

View file

@ -1,22 +1,31 @@
//! Defines database & queries for macro expansion.
use base_db::{salsa, CrateId, Edition, SourceDatabase};
use base_db::{
salsa::{self, debug::DebugQueryTable},
span::SyntaxContextId,
CrateId, Edition, FileId, SourceDatabase,
};
use either::Either;
use limit::Limit;
use mbe::{syntax_node_to_token_tree, ValueResult};
use rustc_hash::FxHashSet;
use syntax::{
ast::{self, HasAttrs, HasDocComments},
AstNode, GreenNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
};
use triomphe::Arc;
use crate::{
ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander,
BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
MacroDefKind, MacroFile, ProcMacroExpander,
ast_id_map::AstIdMap,
attrs::RawAttrs,
builtin_attr_macro::pseudo_derive_attr_expansion,
builtin_fn_macro::EagerExpander,
fixup::{self, SyntaxFixupUndoInfo},
hygiene::{apply_mark, SyntaxContextData, Transparency},
span::{RealSpanMap, SpanMap, SpanMapRef},
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, HirFileId, HirFileIdRepr, MacroCallId,
MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFileId, ProcMacroExpander,
};
/// Total limit on the number of tokens produced by any macro invocation.
@ -30,32 +39,43 @@ static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
#[derive(Debug, Clone, Eq, PartialEq)]
/// Old-style `macro_rules` or the new macros 2.0
pub struct DeclarativeMacroExpander {
pub mac: mbe::DeclarativeMacro,
pub def_site_token_map: mbe::TokenMap,
pub mac: mbe::DeclarativeMacro<base_db::span::SpanData>,
pub transparency: Transparency,
}
impl DeclarativeMacroExpander {
pub fn expand(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
pub fn expand(
&self,
db: &dyn ExpandDatabase,
tt: tt::Subtree,
call_id: MacroCallId,
) -> ExpandResult<tt::Subtree> {
match self.mac.err() {
Some(e) => ExpandResult::new(
tt::Subtree::empty(),
tt::Subtree::empty(tt::DelimSpan::DUMMY),
ExpandError::other(format!("invalid macro definition: {e}")),
),
None => self.mac.expand(tt).map_err(Into::into),
None => self
.mac
.expand(&tt, |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency))
.map_err(Into::into),
}
}
pub fn map_id_down(&self, token_id: tt::TokenId) -> tt::TokenId {
self.mac.map_id_down(token_id)
}
pub fn map_id_up(&self, token_id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
self.mac.map_id_up(token_id)
pub fn expand_unhygienic(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
match self.mac.err() {
Some(e) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan::DUMMY),
ExpandError::other(format!("invalid macro definition: {e}")),
),
None => self.mac.expand(&tt, |_| ()).map_err(Into::into),
}
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum TokenExpander {
/// Old-style `macro_rules` or the new macros 2.0
DeclarativeMacro(Arc<DeclarativeMacroExpander>),
/// Stuff like `line!` and `file!`.
BuiltIn(BuiltinFnLikeExpander),
@ -69,31 +89,6 @@ pub enum TokenExpander {
ProcMacro(ProcMacroExpander),
}
// FIXME: Get rid of these methods
impl TokenExpander {
pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
match self {
TokenExpander::DeclarativeMacro(expander) => expander.map_id_down(id),
TokenExpander::BuiltIn(..)
| TokenExpander::BuiltInEager(..)
| TokenExpander::BuiltInAttr(..)
| TokenExpander::BuiltInDerive(..)
| TokenExpander::ProcMacro(..) => id,
}
}
pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
match self {
TokenExpander::DeclarativeMacro(expander) => expander.map_id_up(id),
TokenExpander::BuiltIn(..)
| TokenExpander::BuiltInEager(..)
| TokenExpander::BuiltInAttr(..)
| TokenExpander::BuiltInDerive(..)
| TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
}
}
}
#[salsa::query_group(ExpandDatabaseStorage)]
pub trait ExpandDatabase: SourceDatabase {
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
@ -108,8 +103,12 @@ pub trait ExpandDatabase: SourceDatabase {
// This query is LRU cached
fn parse_macro_expansion(
&self,
macro_file: MacroFile,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>;
macro_file: MacroFileId,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>;
#[salsa::transparent]
fn span_map(&self, file_id: HirFileId) -> SpanMap;
fn real_span_map(&self, file_id: FileId) -> Arc<RealSpanMap>;
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
/// reason why we use salsa at all.
@ -118,23 +117,21 @@ pub trait ExpandDatabase: SourceDatabase {
/// to be incremental.
#[salsa::interned]
fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
#[salsa::interned]
fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId;
/// Lowers syntactic macro call to a token tree representation.
#[salsa::transparent]
fn setup_syntax_context_root(&self) -> ();
#[salsa::transparent]
fn dump_syntax_contexts(&self) -> String;
/// Lowers syntactic macro call to a token tree representation. That's a firewall
/// query, only typing in the macro call itself changes the returned
/// subtree.
fn macro_arg(
&self,
id: MacroCallId,
) -> ValueResult<
Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
Arc<Box<[SyntaxError]>>,
>;
/// Extracts syntax node, corresponding to a macro call. That's a firewall
/// query, only typing in the macro call itself changes the returned
/// subtree.
fn macro_arg_node(
&self,
id: MacroCallId,
) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>>;
) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>>;
/// Fetches the expander for this macro.
#[salsa::transparent]
fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
@ -144,18 +141,6 @@ pub trait ExpandDatabase: SourceDatabase {
def_crate: CrateId,
id: AstId<ast::Macro>,
) -> Arc<DeclarativeMacroExpander>;
/// Expand macro call to a token tree.
// This query is LRU cached
fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
#[salsa::invoke(crate::builtin_fn_macro::include_arg_to_tt)]
fn include_expand(
&self,
arg_id: MacroCallId,
) -> Result<
(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, mbe::TokenMap)>, base_db::FileId),
ExpandError,
>;
/// Special case of the previous query for procedural macros. We can't LRU
/// proc macros, since they are not deterministic in general, and
/// non-determinism breaks salsa in a very, very, very bad way.
@ -166,8 +151,20 @@ pub trait ExpandDatabase: SourceDatabase {
&self,
macro_call: MacroCallId,
) -> ExpandResult<Box<[SyntaxError]>>;
}
fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>;
#[inline]
pub fn span_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
HirFileIdRepr::MacroFile(m) => {
SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1)
}
}
}
pub fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<RealSpanMap> {
Arc::new(RealSpanMap::from_file(db, file_id))
}
/// This expands the given macro call, but with different arguments. This is
@ -181,21 +178,36 @@ pub fn expand_speculative(
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
let loc = db.lookup_intern_macro_call(actual_macro_call);
let token_range = token_to_map.text_range();
let span_map = RealSpanMap::absolute(FileId::BOGUS);
let span_map = SpanMapRef::RealSpanMap(&span_map);
// Build the subtree and token mapping for the speculative args
let censor = censor_for_macro_input(&loc, speculative_args);
let mut fixups = fixup::fixup_syntax(speculative_args);
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
speculative_args,
fixups.token_map,
fixups.next_id,
fixups.replace,
fixups.append,
);
let (mut tt, undo_info) = match loc.kind {
MacroCallKind::FnLike { .. } => {
(mbe::syntax_node_to_token_tree(speculative_args, span_map), SyntaxFixupUndoInfo::NONE)
}
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
let censor = censor_for_macro_input(&loc, speculative_args);
let mut fixups = fixup::fixup_syntax(span_map, speculative_args);
fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Node(it) => !censor.contains(it),
syntax::NodeOrToken::Token(_) => true,
});
fixups.remove.extend(censor);
(
mbe::syntax_node_to_token_tree_modified(
speculative_args,
span_map,
fixups.append,
fixups.remove,
),
fixups.undo_info,
)
}
};
let (attr_arg, token_id) = match loc.kind {
let attr_arg = match loc.kind {
MacroCallKind::Attr { invoc_attr_index, .. } => {
let attr = if loc.def.is_attribute_derive() {
// for pseudo-derive expansion we actually pass the attribute itself only
@ -210,59 +222,45 @@ pub fn expand_speculative(
}?;
match attr.token_tree() {
Some(token_tree) => {
let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax());
tree.delimiter = tt::Delimiter::unspecified();
let mut tree = syntax_node_to_token_tree(token_tree.syntax(), span_map);
tree.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
let shift = mbe::Shift::new(&tt);
shift.shift_all(&mut tree);
let token_id = if token_tree.syntax().text_range().contains_range(token_range) {
let attr_input_start =
token_tree.left_delimiter_token()?.text_range().start();
let range = token_range.checked_sub(attr_input_start)?;
let token_id = shift.shift(map.token_by_range(range)?);
Some(token_id)
} else {
None
};
(Some(tree), token_id)
Some(tree)
}
_ => (None, None),
}
}
_ => (None, None),
};
let token_id = match token_id {
Some(token_id) => token_id,
// token wasn't inside an attribute input so it has to be in the general macro input
None => {
let range = token_range.checked_sub(speculative_args.text_range().start())?;
let token_id = spec_args_tmap.token_by_range(range)?;
match loc.def.kind {
MacroDefKind::Declarative(it) => {
db.decl_macro_expander(loc.krate, it).map_id_down(token_id)
}
_ => token_id,
_ => None,
}
}
_ => None,
};
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
let mut speculative_expansion = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => {
tt.delimiter = tt::Delimiter::unspecified();
expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref())
tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
let call_site = loc.span(db);
expander.expand(
db,
loc.def.krate,
loc.krate,
&tt,
attr_arg.as_ref(),
call_site,
call_site,
call_site,
)
}
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, loc.call_site)
}
MacroDefKind::BuiltInDerive(expander, ..) => {
// this cast is a bit sus, can we avoid losing the typedness here?
let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
expander.expand(db, actual_macro_call, &adt, &spec_args_tmap)
expander.expand(db, actual_macro_call, &adt, span_map)
}
MacroDefKind::Declarative(it) => {
db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt)
}
MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt),
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
MacroDefKind::BuiltInEager(it, _) => {
it.expand(db, actual_macro_call, &tt).map_err(Into::into)
@ -270,13 +268,14 @@ pub fn expand_speculative(
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt),
};
let expand_to = macro_expand_to(db, actual_macro_call);
fixup::reverse_fixups(&mut speculative_expansion.value, &spec_args_tmap, &fixups.undo_info);
let expand_to = loc.expand_to();
fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info);
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
let syntax_node = node.syntax_node();
let token = rev_tmap
.ranges_by_token(token_id, token_to_map.kind())
.ranges_with_span(span_map.span_for_range(token_to_map.text_range()))
.filter_map(|range| syntax_node.covering_element(range).into_token())
.min_by_key(|t| {
// prefer tokens of the same kind and text
@ -293,7 +292,7 @@ fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => db.parse(file_id).tree().syntax().clone(),
HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(),
HirFileIdRepr::MacroFile(macro_file) => {
db.parse_macro_expansion(macro_file).value.0.syntax_node()
}
@ -312,17 +311,16 @@ fn parse_or_expand_with_err(
}
}
// FIXME: We should verify that the parsed node is one of the many macro node variants we expect
// instead of having it be untyped
fn parse_macro_expansion(
db: &dyn ExpandDatabase,
macro_file: MacroFile,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
macro_file: MacroFileId,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
let _p = profile::span("parse_macro_expansion");
let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id);
let expand_to = macro_expand_to(db, macro_file.macro_call_id);
tracing::debug!("expanded = {}", tt.as_debug_string());
tracing::debug!("kind = {:?}", expand_to);
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let expand_to = loc.expand_to();
let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc);
let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to);
@ -333,51 +331,129 @@ fn parse_macro_expansion_error(
db: &dyn ExpandDatabase,
macro_call_id: MacroCallId,
) -> ExpandResult<Box<[SyntaxError]>> {
db.parse_macro_expansion(MacroFile { macro_call_id })
db.parse_macro_expansion(MacroFileId { macro_call_id })
.map(|it| it.0.errors().to_vec().into_boxed_slice())
}
fn parse_with_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> (Parse<SyntaxNode>, SpanMap) {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
(db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
}
HirFileIdRepr::MacroFile(macro_file) => {
let (parse, map) = db.parse_macro_expansion(macro_file).value;
(parse, SpanMap::ExpansionSpanMap(map))
}
}
}
fn macro_arg(
db: &dyn ExpandDatabase,
id: MacroCallId,
) -> ValueResult<
Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
Arc<Box<[SyntaxError]>>,
> {
let loc = db.lookup_intern_macro_call(id);
if let Some(EagerCallInfo { arg, arg_id: _, error: _ }) = loc.eager.as_deref() {
return ValueResult::ok(Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default()))));
}
let ValueResult { value, err } = db.macro_arg_node(id);
let Some(arg) = value else {
return ValueResult { value: None, err };
// FIXME: consider the following by putting fixup info into eager call info args
// ) -> ValueResult<Option<Arc<(tt::Subtree, SyntaxFixupUndoInfo)>>, Arc<Box<[SyntaxError]>>> {
) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>> {
let mismatched_delimiters = |arg: &SyntaxNode| {
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
let well_formed_tt =
matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
if !well_formed_tt {
// Don't expand malformed (unbalanced) macro invocations. This is
// less than ideal, but trying to expand unbalanced macro calls
// sometimes produces pathological, deeply nested code which breaks
// all kinds of things.
//
// Some day, we'll have explicit recursion counters for all
// recursive things, at which point this code might be removed.
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
Some(Arc::new(Box::new([SyntaxError::new(
"unbalanced token tree".to_owned(),
arg.text_range(),
)]) as Box<[_]>))
} else {
None
}
};
let loc = db.lookup_intern_macro_call(id);
if let Some(EagerCallInfo { arg, .. }) = matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
.then(|| loc.eager.as_deref())
.flatten()
{
ValueResult::ok(Some((arg.clone(), SyntaxFixupUndoInfo::NONE)))
} else {
let (parse, map) = parse_with_map(db, loc.kind.file_id());
let root = parse.syntax_node();
let node = SyntaxNode::new_root(arg);
let censor = censor_for_macro_input(&loc, &node);
let mut fixups = fixup::fixup_syntax(&node);
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
&node,
fixups.token_map,
fixups.next_id,
fixups.replace,
fixups.append,
);
let syntax = match loc.kind {
MacroCallKind::FnLike { ast_id, .. } => {
let node = &ast_id.to_ptr(db).to_node(&root);
let offset = node.syntax().text_range().start();
match node.token_tree() {
Some(tt) => {
let tt = tt.syntax();
if let Some(e) = mismatched_delimiters(tt) {
return ValueResult::only_err(e);
}
tt.clone()
}
None => {
return ValueResult::only_err(Arc::new(Box::new([
SyntaxError::new_at_offset("missing token tree".to_owned(), offset),
])));
}
}
}
MacroCallKind::Derive { ast_id, .. } => {
ast_id.to_ptr(db).to_node(&root).syntax().clone()
}
MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(),
};
let (mut tt, undo_info) = match loc.kind {
MacroCallKind::FnLike { .. } => {
(mbe::syntax_node_to_token_tree(&syntax, map.as_ref()), SyntaxFixupUndoInfo::NONE)
}
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
let censor = censor_for_macro_input(&loc, &syntax);
let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax);
fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Node(it) => !censor.contains(it),
syntax::NodeOrToken::Token(_) => true,
});
fixups.remove.extend(censor);
(
mbe::syntax_node_to_token_tree_modified(
&syntax,
map,
fixups.append,
fixups.remove,
),
fixups.undo_info,
)
}
};
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter = tt::Delimiter::unspecified();
}
let val = Some(Arc::new((tt, tmap, fixups.undo_info)));
match err {
Some(err) => ValueResult::new(val, err),
None => ValueResult::ok(val),
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
}
if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) {
match parse.errors() {
[] => ValueResult::ok(Some((Arc::new(tt), undo_info))),
errors => ValueResult::new(
Some((Arc::new(tt), undo_info)),
// Box::<[_]>::from(res.errors()), not stable yet
Arc::new(errors.to_vec().into_boxed_slice()),
),
}
} else {
ValueResult::ok(Some((Arc::new(tt), undo_info)))
}
}
}
// FIXME: Censoring info should be calculated by the caller! Namely by name resolution
/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
/// - attributes expect the invoking attribute to be stripped
@ -417,103 +493,67 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
.unwrap_or_default()
}
fn macro_arg_node(
db: &dyn ExpandDatabase,
id: MacroCallId,
) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>> {
let err = || -> Arc<Box<[_]>> {
Arc::new(Box::new([SyntaxError::new_at_offset(
"invalid macro call".to_owned(),
syntax::TextSize::from(0),
)]))
};
let loc = db.lookup_intern_macro_call(id);
let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind {
let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::MacroEagerInput).0)
} else {
loc.kind
.arg(db)
.and_then(|arg| ast::TokenTree::cast(arg.value))
.map(|tt| tt.reparse_as_comma_separated_expr().to_syntax())
};
match res {
Some(res) if res.errors().is_empty() => res.syntax_node(),
Some(res) => {
return ValueResult::new(
Some(res.syntax_node().green().into()),
// Box::<[_]>::from(res.errors()), not stable yet
Arc::new(res.errors().to_vec().into_boxed_slice()),
);
}
None => return ValueResult::only_err(err()),
}
} else {
match loc.kind.arg(db) {
Some(res) => res.value,
None => return ValueResult::only_err(err()),
}
};
if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
let well_formed_tt =
matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
if !well_formed_tt {
// Don't expand malformed (unbalanced) macro invocations. This is
// less than ideal, but trying to expand unbalanced macro calls
// sometimes produces pathological, deeply nested code which breaks
// all kinds of things.
//
// Some day, we'll have explicit recursion counters for all
// recursive things, at which point this code might be removed.
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
return ValueResult::only_err(Arc::new(Box::new([SyntaxError::new(
"unbalanced token tree".to_owned(),
arg.text_range(),
)])));
}
}
ValueResult::ok(Some(arg.green().into()))
}
fn decl_macro_expander(
db: &dyn ExpandDatabase,
def_crate: CrateId,
id: AstId<ast::Macro>,
) -> Arc<DeclarativeMacroExpander> {
let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021;
let (mac, def_site_token_map) = match id.to_node(db) {
ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() {
Some(arg) => {
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
(mac, def_site_token_map)
}
None => (
mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
Default::default(),
),
},
ast::Macro::MacroDef(macro_def) => match macro_def.body() {
Some(arg) => {
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
(mac, def_site_token_map)
}
None => (
mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
Default::default(),
),
},
let (root, map) = parse_with_map(db, id.file_id);
let root = root.syntax_node();
let transparency = |node| {
// ... would be nice to have the item tree here
let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate);
match &*attrs
.iter()
.find(|it| {
it.path.as_ident().and_then(|it| it.as_str()) == Some("rustc_macro_transparency")
})?
.token_tree_value()?
.token_trees
{
[tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text {
"transparent" => Some(Transparency::Transparent),
"semitransparent" => Some(Transparency::SemiTransparent),
"opaque" => Some(Transparency::Opaque),
_ => None,
},
_ => None,
}
};
Arc::new(DeclarativeMacroExpander { mac, def_site_token_map })
let (mac, transparency) = match id.to_ptr(db).to_node(&root) {
ast::Macro::MacroRules(macro_rules) => (
match macro_rules.token_tree() {
Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref());
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
mac
}
None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
},
transparency(&macro_rules).unwrap_or(Transparency::SemiTransparent),
),
ast::Macro::MacroDef(macro_def) => (
match macro_def.body() {
Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref());
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
mac
}
None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
},
transparency(&macro_def).unwrap_or(Transparency::Opaque),
),
};
Arc::new(DeclarativeMacroExpander { mac, transparency })
}
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
@ -529,39 +569,31 @@ fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
}
}
fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
fn macro_expand(
db: &dyn ExpandDatabase,
macro_call_id: MacroCallId,
loc: MacroCallLoc,
) -> ExpandResult<Arc<tt::Subtree>> {
let _p = profile::span("macro_expand");
let loc = db.lookup_intern_macro_call(id);
let ExpandResult { value: tt, mut err } = match loc.def.kind {
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(id),
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id),
MacroDefKind::BuiltInDerive(expander, ..) => {
let arg = db.macro_arg_node(id).value.unwrap();
let (root, map) = parse_with_map(db, loc.kind.file_id());
let root = root.syntax_node();
let MacroCallKind::Derive { ast_id, .. } = loc.kind else { unreachable!() };
let node = ast_id.to_ptr(db).to_node(&root);
let node = SyntaxNode::new_root(arg);
let censor = censor_for_macro_input(&loc, &node);
let mut fixups = fixup::fixup_syntax(&node);
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
let (tmap, _) = mbe::syntax_node_to_token_map_with_modifications(
&node,
fixups.token_map,
fixups.next_id,
fixups.replace,
fixups.append,
);
// this cast is a bit sus, can we avoid losing the typedness here?
let adt = ast::Adt::cast(node).unwrap();
let mut res = expander.expand(db, id, &adt, &tmap);
fixup::reverse_fixups(&mut res.value, &tmap, &fixups.undo_info);
res
// FIXME: Use censoring
let _censor = censor_for_macro_input(&loc, node.syntax());
expander.expand(db, macro_call_id, &node, map.as_ref())
}
_ => {
let ValueResult { value, err } = db.macro_arg(id);
let Some(macro_arg) = value else {
let ValueResult { value, err } = db.macro_arg(macro_call_id);
let Some((macro_arg, undo_info)) = value else {
return ExpandResult {
value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::UNSPECIFIED,
delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: Vec::new(),
}),
// FIXME: We should make sure to enforce an invariant that invalid macro
@ -570,12 +602,14 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
};
};
let (arg, arg_tm, undo_info) = &*macro_arg;
let mut res = match loc.def.kind {
let arg = &*macro_arg;
match loc.def.kind {
MacroDefKind::Declarative(id) => {
db.decl_macro_expander(loc.def.krate, id).expand(arg.clone())
db.decl_macro_expander(loc.def.krate, id).expand(db, arg.clone(), macro_call_id)
}
MacroDefKind::BuiltIn(it, _) => {
it.expand(db, macro_call_id, &arg).map_err(Into::into)
}
MacroDefKind::BuiltIn(it, _) => it.expand(db, id, &arg).map_err(Into::into),
// This might look a bit odd, but we do not expand the inputs to eager macros here.
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
// That kind of expansion uses the ast id map of an eager macros input though which goes through
@ -583,11 +617,8 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
// will end up going through here again, whereas we want to just want to inspect the raw input.
// As such we just return the input subtree here.
MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
let mut arg = arg.clone();
fixup::reverse_fixups(&mut arg, arg_tm, undo_info);
return ExpandResult {
value: Arc::new(arg),
value: macro_arg.clone(),
err: err.map(|err| {
let mut buf = String::new();
for err in &**err {
@ -600,12 +631,16 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
}),
};
}
MacroDefKind::BuiltInEager(it, _) => it.expand(db, id, &arg).map_err(Into::into),
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg),
MacroDefKind::BuiltInEager(it, _) => {
it.expand(db, macro_call_id, &arg).map_err(Into::into)
}
MacroDefKind::BuiltInAttr(it, _) => {
let mut res = it.expand(db, macro_call_id, &arg);
fixup::reverse_fixups(&mut res.value, &undo_info);
res
}
_ => unreachable!(),
};
fixup::reverse_fixups(&mut res.value, arg_tm, undo_info);
res
}
}
};
@ -627,10 +662,10 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
let loc = db.lookup_intern_macro_call(id);
let Some(macro_arg) = db.macro_arg(id).value else {
let Some((macro_arg, undo_info)) = db.macro_arg(id).value else {
return ExpandResult {
value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::UNSPECIFIED,
delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: Vec::new(),
}),
// FIXME: We should make sure to enforce an invariant that invalid macro
@ -639,47 +674,44 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
};
};
let (arg_tt, arg_tm, undo_info) = &*macro_arg;
let expander = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => expander,
_ => unreachable!(),
};
let attr_arg = match &loc.kind {
MacroCallKind::Attr { attr_args, .. } => {
let mut attr_args = attr_args.0.clone();
mbe::Shift::new(arg_tt).shift_all(&mut attr_args);
Some(attr_args)
}
MacroCallKind::Attr { attr_args: Some(attr_args), .. } => Some(&**attr_args),
_ => None,
};
let ExpandResult { value: mut tt, err } =
expander.expand(db, loc.def.krate, loc.krate, arg_tt, attr_arg.as_ref());
let call_site = loc.span(db);
let ExpandResult { value: mut tt, err } = expander.expand(
db,
loc.def.krate,
loc.krate,
&macro_arg,
attr_arg,
// FIXME
call_site,
call_site,
// FIXME
call_site,
);
// Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt) {
return value;
}
fixup::reverse_fixups(&mut tt, arg_tm, undo_info);
fixup::reverse_fixups(&mut tt, &undo_info);
ExpandResult { value: Arc::new(tt), err }
}
fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
Arc::new(HygieneFrame::new(db, file_id))
}
fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo {
db.lookup_intern_macro_call(id).expand_to()
}
fn token_tree_to_syntax_node(
tt: &tt::Subtree,
expand_to: ExpandTo,
) -> (Parse<SyntaxNode>, mbe::TokenMap) {
) -> (Parse<SyntaxNode>, ExpansionSpanMap) {
let entry_point = match expand_to {
ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
ExpandTo::Items => mbe::TopEntryPoint::MacroItems,
@ -695,7 +727,7 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>
if TOKEN_LIMIT.check(count).is_err() {
Err(ExpandResult {
value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::UNSPECIFIED,
delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: vec![],
}),
err: Some(ExpandError::other(format!(
@ -708,3 +740,44 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>
Ok(())
}
}
fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
db.intern_syntax_context(SyntaxContextData::root());
}
fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
let mut s = String::from("Expansions:");
let mut entries = InternMacroCallLookupQuery.in_db(db).entries::<Vec<_>>();
entries.sort_by_key(|e| e.key);
for e in entries {
let id = e.key;
let expn_data = e.value.as_ref().unwrap();
s.push_str(&format!(
"\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}",
id,
expn_data.kind.file_id(),
expn_data.call_site,
SyntaxContextId::ROOT, // FIXME expn_data.def_site,
expn_data.kind.descr(),
));
}
s.push_str("\n\nSyntaxContexts:\n");
let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::<Vec<_>>();
entries.sort_by_key(|e| e.key);
for e in entries {
struct SyntaxContextDebug<'a>(
&'a dyn ExpandDatabase,
SyntaxContextId,
&'a SyntaxContextData,
);
impl<'a> std::fmt::Debug for SyntaxContextDebug<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.2.fancy_debug(self.1, self.0, f)
}
}
stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap()));
}
s
}

View file

@ -18,18 +18,17 @@
//!
//!
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
use base_db::CrateId;
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent};
use base_db::{span::SyntaxContextId, CrateId};
use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent};
use triomphe::Arc;
use crate::{
ast::{self, AstNode},
db::ExpandDatabase,
hygiene::Hygiene,
mod_path::ModPath,
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind,
MacroCallLoc, MacroDefId, MacroDefKind,
span::SpanMapRef,
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, MacroCallId,
MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
};
pub fn expand_eager_macro_input(
@ -37,6 +36,7 @@ pub fn expand_eager_macro_input(
krate: CrateId,
macro_call: InFile<ast::MacroCall>,
def: MacroDefId,
call_site: SyntaxContextId,
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<MacroCallId>> {
let ast_map = db.ast_id_map(macro_call.file_id);
@ -53,75 +53,44 @@ pub fn expand_eager_macro_input(
krate,
eager: None,
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
call_site,
});
let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
db.parse_macro_expansion(arg_id.as_macro_file());
// we need this map here as the expansion of the eager input fake file loses whitespace ...
let mut ws_mapping = FxHashMap::default();
if let Some((_, tm, _)) = db.macro_arg(arg_id).value.as_deref() {
ws_mapping.extend(tm.entries().filter_map(|(id, range)| {
Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range))
}));
}
let mut arg_map = ExpansionSpanMap::empty();
let ExpandResult { value: expanded_eager_input, err } = {
eager_macro_recur(
db,
&Hygiene::new(db, macro_call.file_id),
&arg_exp_map,
&mut arg_map,
TextSize::new(0),
InFile::new(arg_id.as_file(), arg_exp.syntax_node()),
krate,
call_site,
resolver,
)
};
let err = parse_err.or(err);
if cfg!(debug_assertions) {
arg_map.finish();
}
let Some((expanded_eager_input, mapping)) = expanded_eager_input else {
let Some((expanded_eager_input, _mapping)) = expanded_eager_input else {
return ExpandResult { value: None, err };
};
let (mut subtree, expanded_eager_input_token_map) =
mbe::syntax_node_to_token_tree(&expanded_eager_input);
let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map);
let og_tmap = if let Some(tt) = macro_call.value.token_tree() {
let mut ids_used = FxHashSet::default();
let mut og_tmap = mbe::syntax_node_to_token_map(tt.syntax());
// The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside
// so we need to remap them to the original input of the eager macro.
subtree.visit_ids(&mut |id| {
// Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix
if let Some(range) = expanded_eager_input_token_map
.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)
{
// remap from expanded eager input to eager input expansion
if let Some(og_range) = mapping.get(&range) {
// remap from eager input expansion to original eager input
if let Some(&og_range) = ws_mapping.get(og_range) {
if let Some(og_token) = og_tmap.token_by_range(og_range) {
ids_used.insert(og_token);
return og_token;
}
}
}
}
tt::TokenId::UNSPECIFIED
});
og_tmap.filter(|id| ids_used.contains(&id));
og_tmap
} else {
Default::default()
};
subtree.delimiter = crate::tt::Delimiter::unspecified();
subtree.delimiter = crate::tt::Delimiter::DUMMY_INVISIBLE;
let loc = MacroCallLoc {
def,
krate,
eager: Some(Box::new(EagerCallInfo {
arg: Arc::new((subtree, og_tmap)),
arg_id,
error: err.clone(),
})),
eager: Some(Box::new(EagerCallInfo { arg: Arc::new(subtree), arg_id, error: err.clone() })),
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
call_site,
};
ExpandResult { value: Some(db.intern_macro_call(loc)), err }
@ -132,12 +101,13 @@ fn lazy_expand(
def: &MacroDefId,
macro_call: InFile<ast::MacroCall>,
krate: CrateId,
) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<mbe::TokenMap>)> {
call_site: SyntaxContextId,
) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<ExpansionSpanMap>)> {
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(&macro_call.value);
let expand_to = ExpandTo::from_call_site(&macro_call.value);
let ast_id = macro_call.with_value(ast_id);
let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to });
let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to }, call_site);
let macro_file = id.as_macro_file();
db.parse_macro_expansion(macro_file)
@ -146,57 +116,59 @@ fn lazy_expand(
fn eager_macro_recur(
db: &dyn ExpandDatabase,
hygiene: &Hygiene,
span_map: &ExpansionSpanMap,
expanded_map: &mut ExpansionSpanMap,
mut offset: TextSize,
curr: InFile<SyntaxNode>,
krate: CrateId,
call_site: SyntaxContextId,
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<(SyntaxNode, FxHashMap<TextRange, TextRange>)>> {
) -> ExpandResult<Option<(SyntaxNode, TextSize)>> {
let original = curr.value.clone_for_update();
let mut mapping = FxHashMap::default();
let mut replacements = Vec::new();
// FIXME: We only report a single error inside of eager expansions
let mut error = None;
let mut offset = 0i32;
let apply_offset = |it: TextSize, offset: i32| {
TextSize::from(u32::try_from(offset + u32::from(it) as i32).unwrap_or_default())
};
let mut children = original.preorder_with_tokens();
// Collect replacement
while let Some(child) = children.next() {
let WalkEvent::Enter(child) = child else { continue };
let call = match child {
syntax::NodeOrToken::Node(node) => match ast::MacroCall::cast(node) {
WalkEvent::Enter(SyntaxElement::Node(child)) => match ast::MacroCall::cast(child) {
Some(it) => {
children.skip_subtree();
it
}
None => continue,
_ => continue,
},
syntax::NodeOrToken::Token(t) => {
mapping.insert(
TextRange::new(
apply_offset(t.text_range().start(), offset),
apply_offset(t.text_range().end(), offset),
),
t.text_range(),
);
WalkEvent::Enter(_) => continue,
WalkEvent::Leave(child) => {
if let SyntaxElement::Token(t) = child {
let start = t.text_range().start();
offset += t.text_range().len();
expanded_map.push(offset, span_map.span_at(start));
}
continue;
}
};
let def = match call.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
let def = match call
.path()
.and_then(|path| ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(span_map)))
{
Some(path) => match macro_resolver(path.clone()) {
Some(def) => def,
None => {
error =
Some(ExpandError::other(format!("unresolved macro {}", path.display(db))));
offset += call.syntax().text_range().len();
continue;
}
},
None => {
error = Some(ExpandError::other("malformed macro invocation"));
offset += call.syntax().text_range().len();
continue;
}
};
@ -207,29 +179,22 @@ fn eager_macro_recur(
krate,
curr.with_value(call.clone()),
def,
call_site,
macro_resolver,
);
match value {
Some(call_id) => {
let ExpandResult { value, err: err2 } =
let ExpandResult { value: (parse, map), err: err2 } =
db.parse_macro_expansion(call_id.as_macro_file());
if let Some(tt) = call.token_tree() {
let call_tt_start = tt.syntax().text_range().start();
let call_start =
apply_offset(call.syntax().text_range().start(), offset);
if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() {
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
value
.1
.first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE)
.map(|r| (r + call_start, range + call_tt_start))
}));
}
}
map.iter().for_each(|(o, span)| expanded_map.push(o + offset, span));
let syntax_node = parse.syntax_node();
ExpandResult {
value: Some(value.0.syntax_node().clone_for_update()),
value: Some((
syntax_node.clone_for_update(),
offset + syntax_node.text_range().len(),
)),
err: err.or(err2),
}
}
@ -242,45 +207,23 @@ fn eager_macro_recur(
| MacroDefKind::BuiltInDerive(..)
| MacroDefKind::ProcMacro(..) => {
let ExpandResult { value: (parse, tm), err } =
lazy_expand(db, &def, curr.with_value(call.clone()), krate);
let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind {
Some(db.decl_macro_expander(def.krate, ast_id))
} else {
None
};
lazy_expand(db, &def, curr.with_value(call.clone()), krate, call_site);
// replace macro inside
let hygiene = Hygiene::new(db, parse.file_id);
let ExpandResult { value, err: error } = eager_macro_recur(
db,
&hygiene,
&tm,
expanded_map,
offset,
// FIXME: We discard parse errors here
parse.as_ref().map(|it| it.syntax_node()),
krate,
call_site,
macro_resolver,
);
let err = err.or(error);
if let Some(tt) = call.token_tree() {
let call_tt_start = tt.syntax().text_range().start();
let call_start = apply_offset(call.syntax().text_range().start(), offset);
if let Some((_tt, arg_map, _)) = parse
.file_id
.macro_file()
.and_then(|id| db.macro_arg(id.macro_call_id).value)
.as_deref()
{
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
tm.first_range_by_token(
decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid),
syntax::SyntaxKind::TOMBSTONE,
)
.map(|r| (r + call_start, range + call_tt_start))
}));
}
}
// FIXME: Do we need to re-use _m here?
ExpandResult { value: value.map(|(n, _m)| n), err }
ExpandResult { value, err }
}
};
if err.is_some() {
@ -288,16 +231,18 @@ fn eager_macro_recur(
}
// check if the whole original syntax is replaced
if call.syntax() == &original {
return ExpandResult { value: value.zip(Some(mapping)), err: error };
return ExpandResult { value, err: error };
}
if let Some(insert) = value {
offset += u32::from(insert.text_range().len()) as i32
- u32::from(call.syntax().text_range().len()) as i32;
replacements.push((call, insert));
match value {
Some((insert, new_offset)) => {
replacements.push((call, insert));
offset = new_offset;
}
None => offset += call.syntax().text_range().len(),
}
}
replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
ExpandResult { value: Some((original, mapping)), err: error }
ExpandResult { value: Some((original, offset)), err: error }
}

View file

@ -0,0 +1,375 @@
//! Things to wrap other things in file ids.
use std::iter;
use base_db::{
span::{HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId},
FileId, FileRange,
};
use either::Either;
use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize};
use crate::{db, ExpansionInfo, MacroFileIdExt};
/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
///
/// Typical usages are:
///
/// * `InFile<SyntaxNode>` -- syntax node in a file
/// * `InFile<ast::FnDef>` -- ast node in a file
/// * `InFile<TextSize>` -- offset in a file
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub struct InFileWrapper<FileKind, T> {
pub file_id: FileKind,
pub value: T,
}
pub type InFile<T> = InFileWrapper<HirFileId, T>;
pub type InMacroFile<T> = InFileWrapper<MacroFileId, T>;
pub type InRealFile<T> = InFileWrapper<FileId, T>;
impl<FileKind, T> InFileWrapper<FileKind, T> {
pub fn new(file_id: FileKind, value: T) -> Self {
Self { file_id, value }
}
pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> InFileWrapper<FileKind, U> {
InFileWrapper::new(self.file_id, f(self.value))
}
}
impl<FileKind: Copy, T> InFileWrapper<FileKind, T> {
pub fn with_value<U>(&self, value: U) -> InFileWrapper<FileKind, U> {
InFileWrapper::new(self.file_id, value)
}
pub fn as_ref(&self) -> InFileWrapper<FileKind, &T> {
self.with_value(&self.value)
}
}
impl<FileKind: Copy, T: Clone> InFileWrapper<FileKind, &T> {
pub fn cloned(&self) -> InFileWrapper<FileKind, T> {
self.with_value(self.value.clone())
}
}
impl<T> From<InMacroFile<T>> for InFile<T> {
fn from(InMacroFile { file_id, value }: InMacroFile<T>) -> Self {
InFile { file_id: file_id.into(), value }
}
}
impl<T> From<InRealFile<T>> for InFile<T> {
fn from(InRealFile { file_id, value }: InRealFile<T>) -> Self {
InFile { file_id: file_id.into(), value }
}
}
// region:transpose impls
impl<FileKind, T> InFileWrapper<FileKind, Option<T>> {
pub fn transpose(self) -> Option<InFileWrapper<FileKind, T>> {
Some(InFileWrapper::new(self.file_id, self.value?))
}
}
impl<FileKind, L, R> InFileWrapper<FileKind, Either<L, R>> {
pub fn transpose(self) -> Either<InFileWrapper<FileKind, L>, InFileWrapper<FileKind, R>> {
match self.value {
Either::Left(l) => Either::Left(InFileWrapper::new(self.file_id, l)),
Either::Right(r) => Either::Right(InFileWrapper::new(self.file_id, r)),
}
}
}
// endregion:transpose impls
trait FileIdToSyntax: Copy {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode;
}
impl FileIdToSyntax for FileId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
db.parse(self).syntax_node()
}
}
impl FileIdToSyntax for MacroFileId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
db.parse_macro_expansion(self).value.0.syntax_node()
}
}
impl FileIdToSyntax for HirFileId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
db.parse_or_expand(self)
}
}
#[allow(private_bounds)]
impl<FileId: FileIdToSyntax, T> InFileWrapper<FileId, T> {
pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
FileIdToSyntax::file_syntax(self.file_id, db)
}
}
impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, N> {
pub fn syntax(&self) -> InFileWrapper<FileId, &SyntaxNode> {
self.with_value(self.value.syntax())
}
}
// region:specific impls
impl InFile<&SyntaxNode> {
/// Skips the attributed item that caused the macro invocation we are climbing up
pub fn ancestors_with_macros_skip_attr_item(
self,
db: &dyn db::ExpandDatabase,
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
None => {
let macro_file_id = node.file_id.macro_file()?;
let parent_node = macro_file_id.call_node(db);
if macro_file_id.is_attr_macro(db) {
// macro call was an attributed item, skip it
// FIXME: does this fail if this is a direct expansion of another macro?
parent_node.map(|node| node.parent()).transpose()
} else {
Some(parent_node)
}
}
};
iter::successors(succ(&self.cloned()), succ)
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
///
/// For attributes and derives, this will point back to the attribute only.
/// For the entire item use [`InFile::original_file_range_full`].
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
if let Some((res, ctxt)) =
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
{
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if ctxt.is_root() {
return res;
}
}
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range(db)
}
}
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
if let Some((res, ctxt)) =
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
{
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if ctxt.is_root() {
return res;
}
}
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range_with_body(db)
}
}
}
/// Attempts to map the syntax node back up its macro calls.
pub fn original_file_range_opt(
self,
db: &dyn db::ExpandDatabase,
) -> Option<(FileRange, SyntaxContextId)> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
Some((FileRange { file_id, range: self.value.text_range() }, SyntaxContextId::ROOT))
}
HirFileIdRepr::MacroFile(mac_file) => {
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
}
}
}
pub fn original_syntax_node(
self,
db: &dyn db::ExpandDatabase,
) -> Option<InRealFile<SyntaxNode>> {
// This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
let file_id = match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
return Some(InRealFile { file_id, value: self.value.clone() })
}
HirFileIdRepr::MacroFile(m) => m,
};
if !file_id.is_attr_macro(db) {
return None;
}
let (FileRange { file_id, range }, ctx) =
ExpansionInfo::new(db, file_id).map_node_range_up(db, self.value.text_range())?;
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if !ctx.is_root() {
return None;
}
let anc = db.parse(file_id).syntax_node().covering_element(range);
let kind = self.value.kind();
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
let value = anc.ancestors().find(|it| it.kind() == kind)?;
Some(InRealFile::new(file_id, value))
}
}
impl InMacroFile<SyntaxToken> {
pub fn upmap_once(
self,
db: &dyn db::ExpandDatabase,
) -> InFile<smallvec::SmallVec<[TextRange; 1]>> {
self.file_id.expansion_info(db).map_range_up_once(db, self.value.text_range())
}
}
impl InFile<SyntaxToken> {
/// Falls back to the macro call range if the node cannot be mapped up fully.
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
let (range, ctxt) = ExpansionInfo::new(db, mac_file)
.span_for_offset(db, self.value.text_range().start());
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if ctxt.is_root() {
return range;
}
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range(db)
}
}
}
/// Attempts to map the syntax node back up its macro calls.
pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
Some(FileRange { file_id, range: self.value.text_range() })
}
HirFileIdRepr::MacroFile(mac_file) => {
let (range, ctxt) = ExpansionInfo::new(db, mac_file)
.span_for_offset(db, self.value.text_range().start());
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if ctxt.is_root() {
Some(range)
} else {
None
}
}
}
}
}
impl InMacroFile<TextSize> {
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> (FileRange, SyntaxContextId) {
ExpansionInfo::new(db, self.file_id).span_for_offset(db, self.value)
}
}
impl InFile<TextRange> {
pub fn original_node_file_range(
self,
db: &dyn db::ExpandDatabase,
) -> (FileRange, SyntaxContextId) {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
(FileRange { file_id, range: self.value }, SyntaxContextId::ROOT)
}
HirFileIdRepr::MacroFile(mac_file) => {
match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) {
Some(it) => it,
None => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
(loc.kind.original_call_range(db), SyntaxContextId::ROOT)
}
}
}
}
}
pub fn original_node_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
HirFileIdRepr::MacroFile(mac_file) => {
match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) {
Some((it, SyntaxContextId::ROOT)) => it,
_ => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range(db)
}
}
}
}
}
pub fn original_node_file_range_opt(
self,
db: &dyn db::ExpandDatabase,
) -> Option<(FileRange, SyntaxContextId)> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
Some((FileRange { file_id, range: self.value }, SyntaxContextId::ROOT))
}
HirFileIdRepr::MacroFile(mac_file) => {
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value)
}
}
}
}
impl<N: AstNode> InFile<N> {
pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<N>> {
// This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
let file_id = match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
return Some(InRealFile { file_id, value: self.value })
}
HirFileIdRepr::MacroFile(m) => m,
};
if !file_id.is_attr_macro(db) {
return None;
}
let (FileRange { file_id, range }, ctx) = ExpansionInfo::new(db, file_id)
.map_node_range_up(db, self.value.syntax().text_range())?;
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if !ctx.is_root() {
return None;
}
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
let anc = db.parse(file_id).syntax_node().covering_element(range);
let value = anc.ancestors().find_map(N::cast)?;
Some(InRealFile::new(file_id, value))
}
}

View file

@ -1,111 +1,124 @@
//! To make attribute macros work reliably when typing, we need to take care to
//! fix up syntax errors in the code we're passing to them.
use std::mem;
use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
use rustc_hash::FxHashMap;
use base_db::{
span::{ErasedFileAstId, SpanAnchor, SpanData},
FileId,
};
use la_arena::RawIdx;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec;
use syntax::{
ast::{self, AstNode, HasLoopBody},
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange,
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
};
use triomphe::Arc;
use tt::Spacing;
use crate::{
span::SpanMapRef,
tt::{Ident, Leaf, Punct, Subtree},
};
use tt::token_id::Subtree;
/// The result of calculating fixes for a syntax node -- a bunch of changes
/// (appending to and replacing nodes), the information that is needed to
/// reverse those changes afterwards, and a token map.
#[derive(Debug, Default)]
pub(crate) struct SyntaxFixups {
pub(crate) append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
pub(crate) replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
pub(crate) append: FxHashMap<SyntaxElement, Vec<Leaf>>,
pub(crate) remove: FxHashSet<SyntaxNode>,
pub(crate) undo_info: SyntaxFixupUndoInfo,
pub(crate) token_map: TokenMap,
pub(crate) next_id: u32,
}
/// This is the information needed to reverse the fixups.
#[derive(Debug, Default, PartialEq, Eq)]
#[derive(Clone, Debug, Default, PartialEq, Eq)]
pub struct SyntaxFixupUndoInfo {
original: Box<[Subtree]>,
// FIXME: ThinArc<[Subtree]>
original: Option<Arc<Box<[Subtree]>>>,
}
const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
impl SyntaxFixupUndoInfo {
pub(crate) const NONE: Self = SyntaxFixupUndoInfo { original: None };
}
pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
// censoring -> just don't convert the node
// replacement -> censor + append
// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how
// to remove later
pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> SyntaxFixups {
let mut append = FxHashMap::<SyntaxElement, _>::default();
let mut replace = FxHashMap::<SyntaxElement, _>::default();
let mut remove = FxHashSet::<SyntaxNode>::default();
let mut preorder = node.preorder();
let mut original = Vec::new();
let mut token_map = TokenMap::default();
let mut next_id = 0;
let dummy_range = TextRange::empty(TextSize::new(0));
// we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as
// the index into the replacement vec but only if the end points to !0
let dummy_anchor = SpanAnchor {
file_id: FileId::from_raw(!0),
ast_id: ErasedFileAstId::from_raw(RawIdx::from(!0)),
};
let fake_span = |range| SpanData {
range: dummy_range,
anchor: dummy_anchor,
ctx: span_map.span_for_range(range).ctx,
};
while let Some(event) = preorder.next() {
let node = match event {
syntax::WalkEvent::Enter(node) => node,
syntax::WalkEvent::Leave(_) => continue,
};
let syntax::WalkEvent::Enter(node) = event else { continue };
let node_range = node.text_range();
if can_handle_error(&node) && has_error_to_handle(&node) {
remove.insert(node.clone().into());
// the node contains an error node, we have to completely replace it by something valid
let (original_tree, new_tmap, new_next_id) =
mbe::syntax_node_to_token_tree_with_modifications(
&node,
mem::take(&mut token_map),
next_id,
Default::default(),
Default::default(),
);
token_map = new_tmap;
next_id = new_next_id;
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map);
let idx = original.len() as u32;
original.push(original_tree);
let replacement = SyntheticToken {
kind: SyntaxKind::IDENT,
let replacement = Leaf::Ident(Ident {
text: "__ra_fixup".into(),
range: node.text_range(),
id: SyntheticTokenId(idx),
};
replace.insert(node.clone().into(), vec![replacement]);
span: SpanData {
range: TextRange::new(TextSize::new(idx), TextSize::new(!0)),
anchor: dummy_anchor,
ctx: span_map.span_for_range(node_range).ctx,
},
});
append.insert(node.clone().into(), vec![replacement]);
preorder.skip_subtree();
continue;
}
// In some other situations, we can fix things by just appending some tokens.
let end_range = TextRange::empty(node.text_range().end());
match_ast! {
match node {
ast::FieldExpr(it) => {
if it.name_ref().is_none() {
// incomplete field access: some_expr.|
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::IDENT,
Leaf::Ident(Ident {
text: "__ra_fixup".into(),
range: end_range,
id: EMPTY_ID,
},
span: fake_span(node_range),
}),
]);
}
},
ast::ExprStmt(it) => {
if it.semicolon_token().is_none() {
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::SEMICOLON,
text: ";".into(),
range: end_range,
id: EMPTY_ID,
},
Leaf::Punct(Punct {
char: ';',
spacing: Spacing::Alone,
span: fake_span(node_range),
}),
]);
}
},
ast::LetStmt(it) => {
if it.semicolon_token().is_none() {
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::SEMICOLON,
text: ";".into(),
range: end_range,
id: EMPTY_ID,
},
Leaf::Punct(Punct {
char: ';',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
]);
}
},
@ -117,28 +130,25 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue,
};
append.insert(if_token.into(), vec![
SyntheticToken {
kind: SyntaxKind::IDENT,
Leaf::Ident(Ident {
text: "__ra_fixup".into(),
range: end_range,
id: EMPTY_ID,
},
span: fake_span(node_range)
}),
]);
}
if it.then_branch().is_none() {
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::L_CURLY,
text: "{".into(),
range: end_range,
id: EMPTY_ID,
},
SyntheticToken {
kind: SyntaxKind::R_CURLY,
text: "}".into(),
range: end_range,
id: EMPTY_ID,
},
// FIXME: THis should be a subtree no?
Leaf::Punct(Punct {
char: '{',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
Leaf::Punct(Punct {
char: '}',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
]);
}
},
@ -150,46 +160,42 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue,
};
append.insert(while_token.into(), vec![
SyntheticToken {
kind: SyntaxKind::IDENT,
Leaf::Ident(Ident {
text: "__ra_fixup".into(),
range: end_range,
id: EMPTY_ID,
},
span: fake_span(node_range)
}),
]);
}
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::L_CURLY,
text: "{".into(),
range: end_range,
id: EMPTY_ID,
},
SyntheticToken {
kind: SyntaxKind::R_CURLY,
text: "}".into(),
range: end_range,
id: EMPTY_ID,
},
// FIXME: THis should be a subtree no?
Leaf::Punct(Punct {
char: '{',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
Leaf::Punct(Punct {
char: '}',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
]);
}
},
ast::LoopExpr(it) => {
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::L_CURLY,
text: "{".into(),
range: end_range,
id: EMPTY_ID,
},
SyntheticToken {
kind: SyntaxKind::R_CURLY,
text: "}".into(),
range: end_range,
id: EMPTY_ID,
},
// FIXME: THis should be a subtree no?
Leaf::Punct(Punct {
char: '{',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
Leaf::Punct(Punct {
char: '}',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
]);
}
},
@ -201,29 +207,26 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue
};
append.insert(match_token.into(), vec![
SyntheticToken {
kind: SyntaxKind::IDENT,
Leaf::Ident(Ident {
text: "__ra_fixup".into(),
range: end_range,
id: EMPTY_ID
},
span: fake_span(node_range)
}),
]);
}
if it.match_arm_list().is_none() {
// No match arms
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::L_CURLY,
text: "{".into(),
range: end_range,
id: EMPTY_ID,
},
SyntheticToken {
kind: SyntaxKind::R_CURLY,
text: "}".into(),
range: end_range,
id: EMPTY_ID,
},
// FIXME: THis should be a subtree no?
Leaf::Punct(Punct {
char: '{',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
Leaf::Punct(Punct {
char: '}',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
]);
}
},
@ -234,10 +237,15 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
};
let [pat, in_token, iter] = [
(SyntaxKind::UNDERSCORE, "_"),
(SyntaxKind::IN_KW, "in"),
(SyntaxKind::IDENT, "__ra_fixup")
].map(|(kind, text)| SyntheticToken { kind, text: text.into(), range: end_range, id: EMPTY_ID});
"_",
"in",
"__ra_fixup"
].map(|text|
Leaf::Ident(Ident {
text: text.into(),
span: fake_span(node_range)
}),
);
if it.pat().is_none() && it.in_token().is_none() && it.iterable().is_none() {
append.insert(for_token.into(), vec![pat, in_token, iter]);
@ -248,18 +256,17 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::L_CURLY,
text: "{".into(),
range: end_range,
id: EMPTY_ID,
},
SyntheticToken {
kind: SyntaxKind::R_CURLY,
text: "}".into(),
range: end_range,
id: EMPTY_ID,
},
// FIXME: THis should be a subtree no?
Leaf::Punct(Punct {
char: '{',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
Leaf::Punct(Punct {
char: '}',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
]);
}
},
@ -267,12 +274,13 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
}
}
}
let needs_fixups = !append.is_empty() || !original.is_empty();
SyntaxFixups {
append,
replace,
token_map,
next_id,
undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() },
remove,
undo_info: SyntaxFixupUndoInfo {
original: needs_fixups.then(|| Arc::new(original.into_boxed_slice())),
},
}
}
@ -288,30 +296,32 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool {
has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c))
}
pub(crate) fn reverse_fixups(
tt: &mut Subtree,
token_map: &TokenMap,
undo_info: &SyntaxFixupUndoInfo,
) {
pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) {
let Some(undo_info) = undo_info.original.as_deref() else { return };
let undo_info = &**undo_info;
reverse_fixups_(tt, undo_info);
}
fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
let tts = std::mem::take(&mut tt.token_trees);
tt.token_trees = tts
.into_iter()
// delete all fake nodes
.filter(|tt| match tt {
tt::TokenTree::Leaf(leaf) => {
token_map.synthetic_token_id(*leaf.span()) != Some(EMPTY_ID)
}
tt::TokenTree::Subtree(st) => {
token_map.synthetic_token_id(st.delimiter.open) != Some(EMPTY_ID)
let span = leaf.span();
span.anchor.file_id != FileId::from_raw(!0) || span.range.end() == TextSize::new(!0)
}
tt::TokenTree::Subtree(_) => true,
})
.flat_map(|tt| match tt {
tt::TokenTree::Subtree(mut tt) => {
reverse_fixups(&mut tt, token_map, undo_info);
reverse_fixups_(&mut tt, undo_info);
SmallVec::from_const([tt.into()])
}
tt::TokenTree::Leaf(leaf) => {
if let Some(id) = token_map.synthetic_token_id(*leaf.span()) {
let original = undo_info.original[id.0 as usize].clone();
if leaf.span().anchor.file_id == FileId::from_raw(!0) {
let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone();
if original.delimiter.kind == tt::DelimiterKind::Invisible {
original.token_trees.into()
} else {
@ -327,11 +337,15 @@ pub(crate) fn reverse_fixups(
#[cfg(test)]
mod tests {
use base_db::FileId;
use expect_test::{expect, Expect};
use triomphe::Arc;
use crate::tt;
use super::reverse_fixups;
use crate::{
fixup::reverse_fixups,
span::{RealSpanMap, SpanMap},
tt,
};
// The following three functions are only meant to check partial structural equivalence of
// `TokenTree`s, see the last assertion in `check()`.
@ -361,13 +375,13 @@ mod tests {
#[track_caller]
fn check(ra_fixture: &str, mut expect: Expect) {
let parsed = syntax::SourceFile::parse(ra_fixture);
let fixups = super::fixup_syntax(&parsed.syntax_node());
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
let fixups = super::fixup_syntax(span_map.as_ref(), &parsed.syntax_node());
let mut tt = mbe::syntax_node_to_token_tree_modified(
&parsed.syntax_node(),
fixups.token_map,
fixups.next_id,
fixups.replace,
span_map.as_ref(),
fixups.append,
fixups.remove,
);
let actual = format!("{tt}\n");
@ -383,14 +397,15 @@ mod tests {
parse.syntax_node()
);
reverse_fixups(&mut tt, &tmap, &fixups.undo_info);
reverse_fixups(&mut tt, &fixups.undo_info);
// the fixed-up + reversed version should be equivalent to the original input
// modulo token IDs and `Punct`s' spacing.
let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node());
let original_as_tt =
mbe::syntax_node_to_token_tree(&parsed.syntax_node(), span_map.as_ref());
assert!(
check_subtree_eq(&tt, &original_as_tt),
"different token tree: {tt:?},\n{original_as_tt:?}"
"different token tree:\n{tt:?}\n\n{original_as_tt:?}"
);
}
@ -403,7 +418,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {for _ in __ra_fixup {}}
fn foo () {for _ in __ra_fixup { }}
"#]],
)
}
@ -431,7 +446,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {for bar in qux {}}
fn foo () {for bar in qux { }}
"#]],
)
}
@ -462,7 +477,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {match __ra_fixup {}}
fn foo () {match __ra_fixup { }}
"#]],
)
}
@ -494,7 +509,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {match __ra_fixup {}}
fn foo () {match __ra_fixup { }}
"#]],
)
}
@ -609,7 +624,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {if a {}}
fn foo () {if a { }}
"#]],
)
}
@ -623,7 +638,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {if __ra_fixup {}}
fn foo () {if __ra_fixup { }}
"#]],
)
}
@ -637,7 +652,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {if __ra_fixup {} {}}
fn foo () {if __ra_fixup {} { }}
"#]],
)
}
@ -651,7 +666,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {while __ra_fixup {}}
fn foo () {while __ra_fixup { }}
"#]],
)
}
@ -665,7 +680,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {while foo {}}
fn foo () {while foo { }}
"#]],
)
}
@ -692,7 +707,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {loop {}}
fn foo () {loop { }}
"#]],
)
}

View file

@ -2,252 +2,247 @@
//!
//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at
//! this moment, this is horribly incomplete and handles only `$crate`.
use base_db::CrateId;
use db::TokenExpander;
use either::Either;
use mbe::Origin;
use syntax::{
ast::{self, HasDocComments},
AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize,
};
use triomphe::Arc;
use std::iter;
use crate::{
db::{self, ExpandDatabase},
fixup,
name::{AsName, Name},
HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile,
};
use base_db::span::{MacroCallId, SpanData, SyntaxContextId};
#[derive(Clone, Debug)]
pub struct Hygiene {
frames: Option<HygieneFrames>,
use crate::db::ExpandDatabase;
#[derive(Copy, Clone, Hash, PartialEq, Eq)]
pub struct SyntaxContextData {
pub outer_expn: Option<MacroCallId>,
pub outer_transparency: Transparency,
pub parent: SyntaxContextId,
/// This context, but with all transparent and semi-transparent expansions filtered away.
pub opaque: SyntaxContextId,
/// This context, but with all transparent expansions filtered away.
pub opaque_and_semitransparent: SyntaxContextId,
}
impl Hygiene {
pub fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Hygiene {
Hygiene { frames: Some(HygieneFrames::new(db, file_id)) }
impl std::fmt::Debug for SyntaxContextData {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("SyntaxContextData")
.field("outer_expn", &self.outer_expn)
.field("outer_transparency", &self.outer_transparency)
.field("parent", &self.parent)
.field("opaque", &self.opaque)
.field("opaque_and_semitransparent", &self.opaque_and_semitransparent)
.finish()
}
}
impl SyntaxContextData {
pub fn root() -> Self {
SyntaxContextData {
outer_expn: None,
outer_transparency: Transparency::Opaque,
parent: SyntaxContextId::ROOT,
opaque: SyntaxContextId::ROOT,
opaque_and_semitransparent: SyntaxContextId::ROOT,
}
}
pub fn new_unhygienic() -> Hygiene {
Hygiene { frames: None }
}
// FIXME: this should just return name
pub fn name_ref_to_name(
&self,
pub fn fancy_debug(
self,
self_id: SyntaxContextId,
db: &dyn ExpandDatabase,
name_ref: ast::NameRef,
) -> Either<Name, CrateId> {
if let Some(frames) = &self.frames {
if name_ref.text() == "$crate" {
if let Some(krate) = frames.root_crate(db, name_ref.syntax()) {
return Either::Right(krate);
}
f: &mut std::fmt::Formatter<'_>,
) -> std::fmt::Result {
write!(f, "#{self_id} parent: #{}, outer_mark: (", self.parent)?;
match self.outer_expn {
Some(id) => {
write!(f, "{:?}::{{{{expn{:?}}}}}", db.lookup_intern_macro_call(id).krate, id)?
}
None => write!(f, "root")?,
}
Either::Left(name_ref.as_name())
}
pub fn local_inner_macros(&self, db: &dyn ExpandDatabase, path: ast::Path) -> Option<CrateId> {
let mut token = path.syntax().first_token()?.text_range();
let frames = self.frames.as_ref()?;
let mut current = &frames.0;
loop {
let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?;
if origin == Origin::Def {
return if current.local_inner {
frames.root_crate(db, path.syntax())
} else {
None
};
}
current = current.call_site.as_ref()?;
token = mapped.value;
}
write!(f, ", {:?})", self.outer_transparency)
}
}
#[derive(Clone, Debug)]
struct HygieneFrames(Arc<HygieneFrame>);
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct HygieneFrame {
expansion: Option<HygieneInfo>,
// Indicate this is a local inner macro
local_inner: bool,
krate: Option<CrateId>,
call_site: Option<Arc<HygieneFrame>>,
def_site: Option<Arc<HygieneFrame>>,
/// A property of a macro expansion that determines how identifiers
/// produced by that expansion are resolved.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug)]
pub enum Transparency {
/// Identifier produced by a transparent expansion is always resolved at call-site.
/// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this.
Transparent,
/// Identifier produced by a semi-transparent expansion may be resolved
/// either at call-site or at definition-site.
/// If it's a local variable, label or `$crate` then it's resolved at def-site.
/// Otherwise it's resolved at call-site.
/// `macro_rules` macros behave like this, built-in macros currently behave like this too,
/// but that's an implementation detail.
SemiTransparent,
/// Identifier produced by an opaque expansion is always resolved at definition-site.
/// Def-site spans in procedural macros, identifiers from `macro` by default use this.
Opaque,
}
impl HygieneFrames {
fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Self {
// Note that this intentionally avoids the `hygiene_frame` query to avoid blowing up memory
// usage. The query is only helpful for nested `HygieneFrame`s as it avoids redundant work.
HygieneFrames(Arc::new(HygieneFrame::new(db, file_id)))
}
fn root_crate(&self, db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option<CrateId> {
let mut token = node.first_token()?.text_range();
let mut result = self.0.krate;
let mut current = self.0.clone();
while let Some((mapped, origin)) =
current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token))
{
result = current.krate;
let site = match origin {
Origin::Def => &current.def_site,
Origin::Call => &current.call_site,
};
let site = match site {
None => break,
Some(it) => it,
};
current = site.clone();
token = mapped.value;
}
result
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
struct HygieneInfo {
file: MacroFile,
/// The start offset of the `macro_rules!` arguments or attribute input.
attr_input_or_mac_def_start: Option<InFile<TextSize>>,
macro_def: TokenExpander,
macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
macro_arg_shift: mbe::Shift,
exp_map: Arc<mbe::TokenMap>,
}
impl HygieneInfo {
fn map_ident_up(
&self,
db: &dyn ExpandDatabase,
token: TextRange,
) -> Option<(InFile<TextRange>, Origin)> {
let token_id = self.exp_map.token_by_range(token)?;
let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
let loc = db.lookup_intern_macro_call(self.file.macro_call_id);
let (token_map, tt) = match &loc.kind {
MacroCallKind::Attr { attr_args, .. } => match self.macro_arg_shift.unshift(token_id) {
Some(unshifted) => {
token_id = unshifted;
(&attr_args.1, self.attr_input_or_mac_def_start?)
}
None => (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())),
},
_ => match origin {
mbe::Origin::Call => {
(&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start()))
}
mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def_start) {
(TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
(&expander.def_site_token_map, *tt)
}
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
},
},
};
let range = token_map.first_range_by_token(token_id, SyntaxKind::IDENT)?;
Some((tt.with_value(range + tt.value), origin))
}
}
fn make_hygiene_info(
pub fn span_with_def_site_ctxt(
db: &dyn ExpandDatabase,
macro_file: MacroFile,
loc: &MacroCallLoc,
) -> HygieneInfo {
let def = loc.def.ast_id().left().and_then(|id| {
let def_tt = match id.to_node(db) {
ast::Macro::MacroRules(mac) => mac.token_tree()?,
ast::Macro::MacroDef(mac) => mac.body()?,
};
Some(InFile::new(id.file_id, def_tt))
});
let attr_input_or_mac_def = def.or_else(|| match loc.kind {
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
let tt = ast_id
.to_node(db)
.doc_comments_and_attrs()
.nth(invoc_attr_index.ast_index())
.and_then(Either::left)?
.token_tree()?;
Some(InFile::new(ast_id.file_id, tt))
}
_ => None,
});
span: SpanData,
expn_id: MacroCallId,
) -> SpanData {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::Opaque)
}
let macro_def = db.macro_expander(loc.def);
let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
Arc::new((
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
Default::default(),
Default::default(),
))
});
pub fn span_with_call_site_ctxt(
db: &dyn ExpandDatabase,
span: SpanData,
expn_id: MacroCallId,
) -> SpanData {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::Transparent)
}
HygieneInfo {
file: macro_file,
attr_input_or_mac_def_start: attr_input_or_mac_def
.map(|it| it.map(|tt| tt.syntax().text_range().start())),
macro_arg_shift: mbe::Shift::new(&macro_arg.0),
macro_arg,
macro_def,
exp_map,
pub fn span_with_mixed_site_ctxt(
db: &dyn ExpandDatabase,
span: SpanData,
expn_id: MacroCallId,
) -> SpanData {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::SemiTransparent)
}
fn span_with_ctxt_from_mark(
db: &dyn ExpandDatabase,
span: SpanData,
expn_id: MacroCallId,
transparency: Transparency,
) -> SpanData {
SpanData { ctx: apply_mark(db, SyntaxContextId::ROOT, expn_id, transparency), ..span }
}
pub(super) fn apply_mark(
db: &dyn ExpandDatabase,
ctxt: SyntaxContextId,
call_id: MacroCallId,
transparency: Transparency,
) -> SyntaxContextId {
if transparency == Transparency::Opaque {
return apply_mark_internal(db, ctxt, Some(call_id), transparency);
}
let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site;
let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
call_site_ctxt.normalize_to_macros_2_0(db)
} else {
call_site_ctxt.normalize_to_macro_rules(db)
};
if call_site_ctxt.is_root() {
return apply_mark_internal(db, ctxt, Some(call_id), transparency);
}
// Otherwise, `expn_id` is a macros 1.0 definition and the call site is in a
// macros 2.0 expansion, i.e., a macros 1.0 invocation is in a macros 2.0 definition.
//
// In this case, the tokens from the macros 1.0 definition inherit the hygiene
// at their invocation. That is, we pretend that the macros 1.0 definition
// was defined at its invocation (i.e., inside the macros 2.0 definition)
// so that the macros 2.0 definition remains hygienic.
//
// See the example at `test/ui/hygiene/legacy_interaction.rs`.
for (call_id, transparency) in ctxt.marks(db) {
call_site_ctxt = apply_mark_internal(db, call_site_ctxt, call_id, transparency);
}
apply_mark_internal(db, call_site_ctxt, Some(call_id), transparency)
}
fn apply_mark_internal(
db: &dyn ExpandDatabase,
ctxt: SyntaxContextId,
call_id: Option<MacroCallId>,
transparency: Transparency,
) -> SyntaxContextId {
let syntax_context_data = db.lookup_intern_syntax_context(ctxt);
let mut opaque = syntax_context_data.opaque;
let mut opaque_and_semitransparent = syntax_context_data.opaque_and_semitransparent;
if transparency >= Transparency::Opaque {
let parent = opaque;
let new_opaque = SyntaxContextId::SELF_REF;
// But we can't just grab the to be allocated ID either as that would not deduplicate
// things!
// So we need a new salsa store type here ...
opaque = db.intern_syntax_context(SyntaxContextData {
outer_expn: call_id,
outer_transparency: transparency,
parent,
opaque: new_opaque,
opaque_and_semitransparent: new_opaque,
});
}
if transparency >= Transparency::SemiTransparent {
let parent = opaque_and_semitransparent;
let new_opaque_and_semitransparent = SyntaxContextId::SELF_REF;
opaque_and_semitransparent = db.intern_syntax_context(SyntaxContextData {
outer_expn: call_id,
outer_transparency: transparency,
parent,
opaque,
opaque_and_semitransparent: new_opaque_and_semitransparent,
});
}
let parent = ctxt;
db.intern_syntax_context(SyntaxContextData {
outer_expn: call_id,
outer_transparency: transparency,
parent,
opaque,
opaque_and_semitransparent,
})
}
pub trait SyntaxContextExt {
fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self;
fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self;
fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self;
fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option<MacroCallId>, Transparency)>;
}
#[inline(always)]
fn handle_self_ref(p: SyntaxContextId, n: SyntaxContextId) -> SyntaxContextId {
match n {
SyntaxContextId::SELF_REF => p,
_ => n,
}
}
impl HygieneFrame {
pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> HygieneFrame {
let (info, krate, local_inner) = match file_id.macro_file() {
None => (None, None, false),
Some(macro_file) => {
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let info = Some((make_hygiene_info(db, macro_file, &loc), loc.kind.file_id()));
match loc.def.kind {
MacroDefKind::Declarative(_) => {
(info, Some(loc.def.krate), loc.def.local_inner)
}
MacroDefKind::BuiltIn(..) => (info, Some(loc.def.krate), false),
MacroDefKind::BuiltInAttr(..) => (info, None, false),
MacroDefKind::BuiltInDerive(..) => (info, None, false),
MacroDefKind::BuiltInEager(..) => (info, None, false),
MacroDefKind::ProcMacro(..) => (info, None, false),
}
}
};
let Some((info, calling_file)) = info else {
return HygieneFrame {
expansion: None,
local_inner,
krate,
call_site: None,
def_site: None,
};
};
let def_site = info.attr_input_or_mac_def_start.map(|it| db.hygiene_frame(it.file_id));
let call_site = Some(db.hygiene_frame(calling_file));
HygieneFrame { expansion: Some(info), local_inner, krate, call_site, def_site }
impl SyntaxContextExt for SyntaxContextId {
fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self {
handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque_and_semitransparent)
}
fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self {
handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque)
}
fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self {
db.lookup_intern_syntax_context(self).parent
}
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency) {
let data = db.lookup_intern_syntax_context(self);
(data.outer_expn, data.outer_transparency)
}
fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency) {
let data = db.lookup_intern_syntax_context(*self);
*self = data.parent;
(data.outer_expn, data.outer_transparency)
}
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option<MacroCallId>, Transparency)> {
let mut marks = marks_rev(self, db).collect::<Vec<_>>();
marks.reverse();
marks
}
}
// FIXME: Make this a SyntaxContextExt method once we have RPIT
pub fn marks_rev(
ctxt: SyntaxContextId,
db: &dyn ExpandDatabase,
) -> impl Iterator<Item = (Option<MacroCallId>, Transparency)> + '_ {
iter::successors(Some(ctxt), move |&mark| {
Some(mark.parent_ctxt(db)).filter(|&it| it != SyntaxContextId::ROOT)
})
.map(|ctx| ctx.outer_mark(db))
}

File diff suppressed because it is too large Load diff

View file

@ -7,11 +7,11 @@ use std::{
use crate::{
db::ExpandDatabase,
hygiene::Hygiene,
name::{known, Name},
hygiene::{marks_rev, SyntaxContextExt, Transparency},
name::{known, AsName, Name},
span::SpanMapRef,
};
use base_db::CrateId;
use either::Either;
use base_db::{span::SyntaxContextId, CrateId};
use smallvec::SmallVec;
use syntax::{ast, AstNode};
@ -38,6 +38,7 @@ pub enum PathKind {
Crate,
/// Absolute path (::foo)
Abs,
// FIXME: Remove this
/// `$crate` from macro expansion
DollarCrate(CrateId),
}
@ -46,9 +47,9 @@ impl ModPath {
pub fn from_src(
db: &dyn ExpandDatabase,
path: ast::Path,
hygiene: &Hygiene,
span_map: SpanMapRef<'_>,
) -> Option<ModPath> {
convert_path(db, None, path, hygiene)
convert_path(db, None, path, span_map)
}
pub fn from_segments(kind: PathKind, segments: impl IntoIterator<Item = Name>) -> ModPath {
@ -193,33 +194,36 @@ fn convert_path(
db: &dyn ExpandDatabase,
prefix: Option<ModPath>,
path: ast::Path,
hygiene: &Hygiene,
span_map: SpanMapRef<'_>,
) -> Option<ModPath> {
let prefix = match path.qualifier() {
Some(qual) => Some(convert_path(db, prefix, qual, hygiene)?),
Some(qual) => Some(convert_path(db, prefix, qual, span_map)?),
None => prefix,
};
let segment = path.segment()?;
let mut mod_path = match segment.kind()? {
ast::PathSegmentKind::Name(name_ref) => {
match hygiene.name_ref_to_name(db, name_ref) {
Either::Left(name) => {
// no type args in use
let mut res = prefix.unwrap_or_else(|| {
ModPath::from_kind(
segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
)
});
res.segments.push(name);
res
}
Either::Right(crate_id) => {
return Some(ModPath::from_segments(
PathKind::DollarCrate(crate_id),
iter::empty(),
))
if name_ref.text() == "$crate" {
if prefix.is_some() {
return None;
}
ModPath::from_kind(
resolve_crate_root(
db,
span_map.span_for_range(name_ref.syntax().text_range()).ctx,
)
.map(PathKind::DollarCrate)
.unwrap_or(PathKind::Crate),
)
} else {
let mut res = prefix.unwrap_or_else(|| {
ModPath::from_kind(
segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
)
});
res.segments.push(name_ref.as_name());
res
}
}
ast::PathSegmentKind::SelfTypeKw => {
@ -261,8 +265,14 @@ fn convert_path(
// We follow what it did anyway :)
if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain {
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
if let Some(crate_id) = hygiene.local_inner_macros(db, path) {
mod_path.kind = PathKind::DollarCrate(crate_id);
let syn_ctx = span_map.span_for_range(segment.syntax().text_range()).ctx;
if let Some(macro_call_id) = db.lookup_intern_syntax_context(syn_ctx).outer_expn {
if db.lookup_intern_macro_call(macro_call_id).def.local_inner {
mod_path.kind = match resolve_crate_root(db, syn_ctx) {
Some(crate_root) => PathKind::DollarCrate(crate_root),
None => PathKind::Crate,
}
}
}
}
}
@ -270,6 +280,29 @@ fn convert_path(
Some(mod_path)
}
pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> Option<CrateId> {
// When resolving `$crate` from a `macro_rules!` invoked in a `macro`,
// we don't want to pretend that the `macro_rules!` definition is in the `macro`
// as described in `SyntaxContext::apply_mark`, so we ignore prepended opaque marks.
// FIXME: This is only a guess and it doesn't work correctly for `macro_rules!`
// definitions actually produced by `macro` and `macro` definitions produced by
// `macro_rules!`, but at least such configurations are not stable yet.
ctxt = ctxt.normalize_to_macro_rules(db);
let mut iter = marks_rev(ctxt, db).peekable();
let mut result_mark = None;
// Find the last opaque mark from the end if it exists.
while let Some(&(mark, Transparency::Opaque)) = iter.peek() {
result_mark = Some(mark);
iter.next();
}
// Then find the last semi-transparent mark from the end if it exists.
while let Some((mark, Transparency::SemiTransparent)) = iter.next() {
result_mark = Some(mark);
}
result_mark.flatten().map(|call| db.lookup_intern_macro_call(call.into()).def.krate)
}
pub use crate::name as __name;
#[macro_export]

View file

@ -470,6 +470,7 @@ pub mod known {
pub const SELF_TYPE: super::Name = super::Name::new_inline("Self");
pub const STATIC_LIFETIME: super::Name = super::Name::new_inline("'static");
pub const DOLLAR_CRATE: super::Name = super::Name::new_inline("$crate");
#[macro_export]
macro_rules! name {

View file

@ -1,6 +1,6 @@
//! Proc Macro Expander stub
use base_db::{CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
use base_db::{span::SpanData, CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
use stdx::never;
use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult};
@ -33,11 +33,15 @@ impl ProcMacroExpander {
calling_crate: CrateId,
tt: &tt::Subtree,
attr_arg: Option<&tt::Subtree>,
def_site: SpanData,
call_site: SpanData,
mixed_site: SpanData,
) -> ExpandResult<tt::Subtree> {
match self.proc_macro_id {
ProcMacroId(DUMMY_ID) => {
ExpandResult::new(tt::Subtree::empty(), ExpandError::UnresolvedProcMacro(def_crate))
}
ProcMacroId(DUMMY_ID) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::UnresolvedProcMacro(def_crate),
),
ProcMacroId(id) => {
let proc_macros = db.proc_macros();
let proc_macros = match proc_macros.get(&def_crate) {
@ -45,7 +49,7 @@ impl ProcMacroExpander {
Some(Err(_)) | None => {
never!("Non-dummy expander even though there are no proc macros");
return ExpandResult::new(
tt::Subtree::empty(),
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::other("Internal error"),
);
}
@ -59,7 +63,7 @@ impl ProcMacroExpander {
id
);
return ExpandResult::new(
tt::Subtree::empty(),
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::other("Internal error"),
);
}
@ -68,7 +72,8 @@ impl ProcMacroExpander {
let krate_graph = db.crate_graph();
// Proc macros have access to the environment variables of the invoking crate.
let env = &krate_graph[calling_crate].env;
match proc_macro.expander.expand(tt, attr_arg, env) {
match proc_macro.expander.expand(tt, attr_arg, env, def_site, call_site, mixed_site)
{
Ok(t) => ExpandResult::ok(t),
Err(err) => match err {
// Don't discard the item in case something unexpected happened while expanding attributes
@ -78,9 +83,10 @@ impl ProcMacroExpander {
ExpandResult { value: tt.clone(), err: Some(ExpandError::other(text)) }
}
ProcMacroExpansionError::System(text)
| ProcMacroExpansionError::Panic(text) => {
ExpandResult::new(tt::Subtree::empty(), ExpandError::other(text))
}
| ProcMacroExpansionError::Panic(text) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::ProcMacroPanic(Box::new(text.into_boxed_str())),
),
},
}
}

View file

@ -1,5 +1,7 @@
//! A simplified version of quote-crate like quasi quote macro
use base_db::span::SpanData;
// A helper macro quote macro
// FIXME:
// 1. Not all puncts are handled
@ -8,109 +10,109 @@
#[doc(hidden)]
#[macro_export]
macro_rules! __quote {
() => {
($span:ident) => {
Vec::<crate::tt::TokenTree>::new()
};
( @SUBTREE $delim:ident $($tt:tt)* ) => {
( @SUBTREE($span:ident) $delim:ident $($tt:tt)* ) => {
{
let children = $crate::__quote!($($tt)*);
let children = $crate::__quote!($span $($tt)*);
crate::tt::Subtree {
delimiter: crate::tt::Delimiter {
kind: crate::tt::DelimiterKind::$delim,
open: crate::tt::TokenId::unspecified(),
close: crate::tt::TokenId::unspecified(),
open: $span,
close: $span,
},
token_trees: $crate::quote::IntoTt::to_tokens(children),
}
}
};
( @PUNCT $first:literal ) => {
( @PUNCT($span:ident) $first:literal ) => {
{
vec![
crate::tt::Leaf::Punct(crate::tt::Punct {
char: $first,
spacing: crate::tt::Spacing::Alone,
span: crate::tt::TokenId::unspecified(),
span: $span,
}).into()
]
}
};
( @PUNCT $first:literal, $sec:literal ) => {
( @PUNCT($span:ident) $first:literal, $sec:literal ) => {
{
vec![
crate::tt::Leaf::Punct(crate::tt::Punct {
char: $first,
spacing: crate::tt::Spacing::Joint,
span: crate::tt::TokenId::unspecified(),
span: $span,
}).into(),
crate::tt::Leaf::Punct(crate::tt::Punct {
char: $sec,
spacing: crate::tt::Spacing::Alone,
span: crate::tt::TokenId::unspecified(),
span: $span,
}).into()
]
}
};
// hash variable
( # $first:ident $($tail:tt)* ) => {
($span:ident # $first:ident $($tail:tt)* ) => {
{
let token = $crate::quote::ToTokenTree::to_token($first);
let token = $crate::quote::ToTokenTree::to_token($first, $span);
let mut tokens = vec![token.into()];
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
tokens.append(&mut tail_tokens);
tokens
}
};
( ## $first:ident $($tail:tt)* ) => {
($span:ident ## $first:ident $($tail:tt)* ) => {
{
let mut tokens = $first.into_iter().map($crate::quote::ToTokenTree::to_token).collect::<Vec<crate::tt::TokenTree>>();
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
let mut tokens = $first.into_iter().map(|it| $crate::quote::ToTokenTree::to_token(it, $span)).collect::<Vec<crate::tt::TokenTree>>();
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
tokens.append(&mut tail_tokens);
tokens
}
};
// Brace
( { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE Brace $($tt)*) };
($span:ident { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE($span) Brace $($tt)*) };
// Bracket
( [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE Bracket $($tt)*) };
($span:ident [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE($span) Bracket $($tt)*) };
// Parenthesis
( ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE Parenthesis $($tt)*) };
($span:ident ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE($span) Parenthesis $($tt)*) };
// Literal
( $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt).into()] };
($span:ident $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt, $span).into()] };
// Ident
( $tt:ident ) => {
($span:ident $tt:ident ) => {
vec![ {
crate::tt::Leaf::Ident(crate::tt::Ident {
text: stringify!($tt).into(),
span: crate::tt::TokenId::unspecified(),
span: $span,
}).into()
}]
};
// Puncts
// FIXME: Not all puncts are handled
( -> ) => {$crate::__quote!(@PUNCT '-', '>')};
( & ) => {$crate::__quote!(@PUNCT '&')};
( , ) => {$crate::__quote!(@PUNCT ',')};
( : ) => {$crate::__quote!(@PUNCT ':')};
( ; ) => {$crate::__quote!(@PUNCT ';')};
( :: ) => {$crate::__quote!(@PUNCT ':', ':')};
( . ) => {$crate::__quote!(@PUNCT '.')};
( < ) => {$crate::__quote!(@PUNCT '<')};
( > ) => {$crate::__quote!(@PUNCT '>')};
( ! ) => {$crate::__quote!(@PUNCT '!')};
($span:ident -> ) => {$crate::__quote!(@PUNCT($span) '-', '>')};
($span:ident & ) => {$crate::__quote!(@PUNCT($span) '&')};
($span:ident , ) => {$crate::__quote!(@PUNCT($span) ',')};
($span:ident : ) => {$crate::__quote!(@PUNCT($span) ':')};
($span:ident ; ) => {$crate::__quote!(@PUNCT($span) ';')};
($span:ident :: ) => {$crate::__quote!(@PUNCT($span) ':', ':')};
($span:ident . ) => {$crate::__quote!(@PUNCT($span) '.')};
($span:ident < ) => {$crate::__quote!(@PUNCT($span) '<')};
($span:ident > ) => {$crate::__quote!(@PUNCT($span) '>')};
($span:ident ! ) => {$crate::__quote!(@PUNCT($span) '!')};
( $first:tt $($tail:tt)+ ) => {
($span:ident $first:tt $($tail:tt)+ ) => {
{
let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($first));
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $first ));
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
tokens.append(&mut tail_tokens);
tokens
@ -122,19 +124,22 @@ macro_rules! __quote {
/// It probably should implement in proc-macro
#[macro_export]
macro_rules! quote {
( $($tt:tt)* ) => {
$crate::quote::IntoTt::to_subtree($crate::__quote!($($tt)*))
($span:ident=> $($tt:tt)* ) => {
$crate::quote::IntoTt::to_subtree($crate::__quote!($span $($tt)*), $span)
}
}
pub(crate) trait IntoTt {
fn to_subtree(self) -> crate::tt::Subtree;
fn to_subtree(self, span: SpanData) -> crate::tt::Subtree;
fn to_tokens(self) -> Vec<crate::tt::TokenTree>;
}
impl IntoTt for Vec<crate::tt::TokenTree> {
fn to_subtree(self) -> crate::tt::Subtree {
crate::tt::Subtree { delimiter: crate::tt::Delimiter::unspecified(), token_trees: self }
fn to_subtree(self, span: SpanData) -> crate::tt::Subtree {
crate::tt::Subtree {
delimiter: crate::tt::Delimiter::invisible_spanned(span),
token_trees: self,
}
}
fn to_tokens(self) -> Vec<crate::tt::TokenTree> {
@ -143,7 +148,7 @@ impl IntoTt for Vec<crate::tt::TokenTree> {
}
impl IntoTt for crate::tt::Subtree {
fn to_subtree(self) -> crate::tt::Subtree {
fn to_subtree(self, _: SpanData) -> crate::tt::Subtree {
self
}
@ -153,39 +158,39 @@ impl IntoTt for crate::tt::Subtree {
}
pub(crate) trait ToTokenTree {
fn to_token(self) -> crate::tt::TokenTree;
fn to_token(self, span: SpanData) -> crate::tt::TokenTree;
}
impl ToTokenTree for crate::tt::TokenTree {
fn to_token(self) -> crate::tt::TokenTree {
fn to_token(self, _: SpanData) -> crate::tt::TokenTree {
self
}
}
impl ToTokenTree for &crate::tt::TokenTree {
fn to_token(self) -> crate::tt::TokenTree {
fn to_token(self, _: SpanData) -> crate::tt::TokenTree {
self.clone()
}
}
impl ToTokenTree for crate::tt::Subtree {
fn to_token(self) -> crate::tt::TokenTree {
fn to_token(self, _: SpanData) -> crate::tt::TokenTree {
self.into()
}
}
macro_rules! impl_to_to_tokentrees {
($($ty:ty => $this:ident $im:block);*) => {
($($span:ident: $ty:ty => $this:ident $im:block);*) => {
$(
impl ToTokenTree for $ty {
fn to_token($this) -> crate::tt::TokenTree {
fn to_token($this, $span: SpanData) -> crate::tt::TokenTree {
let leaf: crate::tt::Leaf = $im.into();
leaf.into()
}
}
impl ToTokenTree for &$ty {
fn to_token($this) -> crate::tt::TokenTree {
fn to_token($this, $span: SpanData) -> crate::tt::TokenTree {
let leaf: crate::tt::Leaf = $im.clone().into();
leaf.into()
}
@ -195,60 +200,76 @@ macro_rules! impl_to_to_tokentrees {
}
impl_to_to_tokentrees! {
u32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
usize => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
i32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
bool => self { crate::tt::Ident{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
crate::tt::Leaf => self { self };
crate::tt::Literal => self { self };
crate::tt::Ident => self { self };
crate::tt::Punct => self { self };
&str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}};
String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}}
span: u32 => self { crate::tt::Literal{text: self.to_string().into(), span} };
span: usize => self { crate::tt::Literal{text: self.to_string().into(), span} };
span: i32 => self { crate::tt::Literal{text: self.to_string().into(), span} };
span: bool => self { crate::tt::Ident{text: self.to_string().into(), span} };
_span: crate::tt::Leaf => self { self };
_span: crate::tt::Literal => self { self };
_span: crate::tt::Ident => self { self };
_span: crate::tt::Punct => self { self };
span: &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}};
span: String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}}
}
#[cfg(test)]
mod tests {
use crate::tt;
use base_db::{
span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
FileId,
};
use expect_test::expect;
use syntax::{TextRange, TextSize};
const DUMMY: tt::SpanData = tt::SpanData {
range: TextRange::empty(TextSize::new(0)),
anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID },
ctx: SyntaxContextId::ROOT,
};
#[test]
fn test_quote_delimiters() {
assert_eq!(quote!({}).to_string(), "{}");
assert_eq!(quote!(()).to_string(), "()");
assert_eq!(quote!([]).to_string(), "[]");
assert_eq!(quote!(DUMMY =>{}).to_string(), "{}");
assert_eq!(quote!(DUMMY =>()).to_string(), "()");
assert_eq!(quote!(DUMMY =>[]).to_string(), "[]");
}
#[test]
fn test_quote_idents() {
assert_eq!(quote!(32).to_string(), "32");
assert_eq!(quote!(struct).to_string(), "struct");
assert_eq!(quote!(DUMMY =>32).to_string(), "32");
assert_eq!(quote!(DUMMY =>struct).to_string(), "struct");
}
#[test]
fn test_quote_hash_simple_literal() {
let a = 20;
assert_eq!(quote!(#a).to_string(), "20");
assert_eq!(quote!(DUMMY =>#a).to_string(), "20");
let s: String = "hello".into();
assert_eq!(quote!(#s).to_string(), "\"hello\"");
assert_eq!(quote!(DUMMY =>#s).to_string(), "\"hello\"");
}
fn mk_ident(name: &str) -> crate::tt::Ident {
crate::tt::Ident { text: name.into(), span: crate::tt::TokenId::unspecified() }
crate::tt::Ident { text: name.into(), span: DUMMY }
}
#[test]
fn test_quote_hash_token_tree() {
let a = mk_ident("hello");
let quoted = quote!(#a);
let quoted = quote!(DUMMY =>#a);
assert_eq!(quoted.to_string(), "hello");
let t = format!("{quoted:?}");
assert_eq!(t, "SUBTREE $$ 4294967295 4294967295\n IDENT hello 4294967295");
expect![[r#"
SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) } SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) }
IDENT hello SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) }"#]].assert_eq(&t);
}
#[test]
fn test_quote_simple_derive_copy() {
let name = mk_ident("Foo");
let quoted = quote! {
let quoted = quote! {DUMMY =>
impl Clone for #name {
fn clone(&self) -> Self {
Self {}
@ -268,18 +289,19 @@ mod tests {
// }
let struct_name = mk_ident("Foo");
let fields = [mk_ident("name"), mk_ident("id")];
let fields = fields.iter().flat_map(|it| quote!(#it: self.#it.clone(), ).token_trees);
let fields =
fields.iter().flat_map(|it| quote!(DUMMY =>#it: self.#it.clone(), ).token_trees);
let list = crate::tt::Subtree {
delimiter: crate::tt::Delimiter {
kind: crate::tt::DelimiterKind::Brace,
open: crate::tt::TokenId::unspecified(),
close: crate::tt::TokenId::unspecified(),
open: DUMMY,
close: DUMMY,
},
token_trees: fields.collect(),
};
let quoted = quote! {
let quoted = quote! {DUMMY =>
impl Clone for #struct_name {
fn clone(&self) -> Self {
Self #list

View file

@ -0,0 +1,111 @@
//! Spanmaps allow turning absolute ranges into relative ranges for incrementality purposes as well
//! as associating spans with text ranges in a particular file.
use base_db::{
span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
FileId,
};
use syntax::{ast::HasModuleItem, AstNode, TextRange, TextSize};
use triomphe::Arc;
use crate::db::ExpandDatabase;
pub type ExpansionSpanMap = mbe::SpanMap<SpanData>;
/// Spanmap for a macro file or a real file
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum SpanMap {
/// Spanmap for a macro file
ExpansionSpanMap(Arc<ExpansionSpanMap>),
/// Spanmap for a real file
RealSpanMap(Arc<RealSpanMap>),
}
#[derive(Copy, Clone)]
pub enum SpanMapRef<'a> {
/// Spanmap for a macro file
ExpansionSpanMap(&'a ExpansionSpanMap),
/// Spanmap for a real file
RealSpanMap(&'a RealSpanMap),
}
impl mbe::SpanMapper<SpanData> for SpanMap {
fn span_for(&self, range: TextRange) -> SpanData {
self.span_for_range(range)
}
}
impl mbe::SpanMapper<SpanData> for SpanMapRef<'_> {
fn span_for(&self, range: TextRange) -> SpanData {
self.span_for_range(range)
}
}
impl mbe::SpanMapper<SpanData> for RealSpanMap {
fn span_for(&self, range: TextRange) -> SpanData {
self.span_for_range(range)
}
}
impl SpanMap {
pub fn span_for_range(&self, range: TextRange) -> SpanData {
match self {
Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()),
Self::RealSpanMap(span_map) => span_map.span_for_range(range),
}
}
pub fn as_ref(&self) -> SpanMapRef<'_> {
match self {
Self::ExpansionSpanMap(span_map) => SpanMapRef::ExpansionSpanMap(span_map),
Self::RealSpanMap(span_map) => SpanMapRef::RealSpanMap(span_map),
}
}
}
impl SpanMapRef<'_> {
pub fn span_for_range(self, range: TextRange) -> SpanData {
match self {
Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()),
Self::RealSpanMap(span_map) => span_map.span_for_range(range),
}
}
}
#[derive(PartialEq, Eq, Hash, Debug)]
pub struct RealSpanMap {
file_id: FileId,
/// Invariant: Sorted vec over TextSize
// FIXME: SortedVec<(TextSize, ErasedFileAstId)>?
pairs: Box<[(TextSize, ErasedFileAstId)]>,
}
impl RealSpanMap {
/// Creates a real file span map that returns absolute ranges (relative ranges to the root ast id).
pub fn absolute(file_id: FileId) -> Self {
RealSpanMap { file_id, pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]) }
}
pub fn from_file(db: &dyn ExpandDatabase, file_id: FileId) -> Self {
let mut pairs = vec![(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)];
let ast_id_map = db.ast_id_map(file_id.into());
pairs.extend(
db.parse(file_id)
.tree()
.items()
.map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())),
);
RealSpanMap { file_id, pairs: pairs.into_boxed_slice() }
}
pub fn span_for_range(&self, range: TextRange) -> SpanData {
let start = range.start();
let idx = self
.pairs
.binary_search_by(|&(it, _)| it.cmp(&start).then(std::cmp::Ordering::Less))
.unwrap_err();
let (offset, ast_id) = self.pairs[idx - 1];
SpanData {
range: range - offset,
anchor: SpanAnchor { file_id: self.file_id, ast_id },
ctx: SyntaxContextId::ROOT,
}
}
}

View file

@ -1,9 +1,10 @@
//! Constant evaluation details
use base_db::CrateId;
use base_db::{salsa::Cycle, CrateId};
use chalk_ir::{cast::Cast, BoundVar, DebruijnIndex};
use hir_def::{
hir::Expr,
body::Body,
hir::{Expr, ExprId},
path::Path,
resolver::{Resolver, ValueNs},
type_ref::LiteralConstRef,
@ -136,7 +137,7 @@ pub fn intern_const_ref(
ty: Ty,
krate: CrateId,
) -> Const {
let layout = db.layout_of_ty(ty.clone(), Arc::new(TraitEnvironment::empty(krate)));
let layout = db.layout_of_ty(ty.clone(), TraitEnvironment::empty(krate));
let bytes = match value {
LiteralConstRef::Int(i) => {
// FIXME: We should handle failure of layout better.
@ -184,7 +185,7 @@ pub fn try_const_usize(db: &dyn HirDatabase, c: &Const) -> Option<u128> {
pub(crate) fn const_eval_recover(
_: &dyn HirDatabase,
_: &[String],
_: &Cycle,
_: &GeneralConstId,
_: &Substitution,
_: &Option<Arc<TraitEnvironment>>,
@ -194,7 +195,7 @@ pub(crate) fn const_eval_recover(
pub(crate) fn const_eval_static_recover(
_: &dyn HirDatabase,
_: &[String],
_: &Cycle,
_: &StaticId,
) -> Result<Const, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
@ -202,7 +203,7 @@ pub(crate) fn const_eval_static_recover(
pub(crate) fn const_eval_discriminant_recover(
_: &dyn HirDatabase,
_: &[String],
_: &Cycle,
_: &EnumVariantId,
) -> Result<i128, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
@ -280,7 +281,7 @@ pub(crate) fn const_eval_discriminant_variant(
// get an `InferenceResult` instead of an `InferenceContext`. And we should remove `ctx.clone().resolve_all()` here
// and make this function private. See the fixme comment on `InferenceContext::resolve_all`.
pub(crate) fn eval_to_const(
expr: Idx<Expr>,
expr: ExprId,
mode: ParamLoweringMode,
ctx: &mut InferenceContext<'_>,
args: impl FnOnce() -> Generics,
@ -288,13 +289,24 @@ pub(crate) fn eval_to_const(
) -> Const {
let db = ctx.db;
let infer = ctx.clone().resolve_all();
fn has_closure(body: &Body, expr: ExprId) -> bool {
if matches!(body[expr], Expr::Closure { .. }) {
return true;
}
let mut r = false;
body[expr].walk_child_exprs(|idx| r |= has_closure(body, idx));
r
}
if has_closure(&ctx.body, expr) {
// Type checking clousres need an isolated body (See the above FIXME). Bail out early to prevent panic.
return unknown_const(infer[expr].clone());
}
if let Expr::Path(p) = &ctx.body.exprs[expr] {
let resolver = &ctx.resolver;
if let Some(c) = path_to_const(db, resolver, p, mode, args, debruijn, infer[expr].clone()) {
return c;
}
}
let infer = ctx.clone().resolve_all();
if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, &ctx.body, &infer, expr) {
if let Ok(result) = interpret_mir(db, Arc::new(mir_body), true, None).0 {
return result;

View file

@ -24,7 +24,7 @@ use hir_def::{
};
use hir_expand::{
name::{AsName, Name},
HirFileId,
HirFileId, MacroFileIdExt,
};
use stdx::{always, never};
use syntax::{
@ -196,7 +196,7 @@ impl<'a> DeclValidator<'a> {
AttrDefId::GenericParamId(_) => None,
}
.map_or(false, |file_id| {
file_id.is_custom_derive(db.upcast()) || file_id.is_builtin_derive(db.upcast())
matches!(file_id.macro_file(), Some(file_id) if file_id.is_custom_derive(db.upcast()) || file_id.is_builtin_derive(db.upcast()))
})
};

View file

@ -23,7 +23,7 @@ use hir_def::{
EnumVariantId, HasModule, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId,
TraitId,
};
use hir_expand::{hygiene::Hygiene, name::Name};
use hir_expand::name::Name;
use intern::{Internable, Interned};
use itertools::Itertools;
use la_arena::ArenaMap;
@ -448,9 +448,8 @@ fn render_const_scalar(
) -> Result<(), HirDisplayError> {
// FIXME: We need to get krate from the final callers of the hir display
// infrastructure and have it here as a field on `f`.
let trait_env = Arc::new(TraitEnvironment::empty(
*f.db.crate_graph().crates_in_topological_order().last().unwrap(),
));
let trait_env =
TraitEnvironment::empty(*f.db.crate_graph().crates_in_topological_order().last().unwrap());
match ty.kind(Interner) {
TyKind::Scalar(s) => match s {
Scalar::Bool => write!(f, "{}", if b[0] == 0 { false } else { true }),
@ -1732,13 +1731,13 @@ impl HirDisplay for TypeRef {
f.write_joined(bounds, " + ")?;
}
TypeRef::Macro(macro_call) => {
let macro_call = macro_call.to_node(f.db.upcast());
let ctx = hir_def::lower::LowerCtx::with_hygiene(
let ctx = hir_def::lower::LowerCtx::with_span_map(
f.db.upcast(),
&Hygiene::new_unhygienic(),
f.db.span_map(macro_call.file_id),
);
let macro_call = macro_call.to_node(f.db.upcast());
match macro_call.path() {
Some(path) => match Path::from_src(path, &ctx) {
Some(path) => match Path::from_src(&ctx, path) {
Some(path) => path.hir_fmt(f)?,
None => write!(f, "{{macro}}")?,
},

View file

@ -18,7 +18,6 @@ use hir_def::{
use hir_expand::name::{name, Name};
use stdx::always;
use syntax::ast::RangeOp;
use triomphe::Arc;
use crate::{
autoderef::{builtin_deref, deref_by_trait, Autoderef},
@ -40,7 +39,8 @@ use crate::{
traits::FnTrait,
utils::{generics, Generics},
Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, FnPointer, FnSig, FnSubst,
Interner, Rawness, Scalar, Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind,
Interner, Rawness, Scalar, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt,
TyKind,
};
use super::{
@ -579,7 +579,7 @@ impl InferenceContext<'_> {
}
ty
}
Expr::Field { expr, name } => self.infer_field_access(tgt_expr, *expr, name),
Expr::Field { expr, name } => self.infer_field_access(tgt_expr, *expr, name, expected),
Expr::Await { expr } => {
let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
self.resolve_associated_type(inner_ty, self.resolve_future_future_output())
@ -1291,7 +1291,7 @@ impl InferenceContext<'_> {
let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr);
let prev_env = block_id.map(|block_id| {
let prev_env = self.table.trait_env.clone();
Arc::make_mut(&mut self.table.trait_env).block = Some(block_id);
TraitEnvironment::with_block(&mut self.table.trait_env, block_id);
prev_env
});
@ -1456,7 +1456,13 @@ impl InferenceContext<'_> {
})
}
fn infer_field_access(&mut self, tgt_expr: ExprId, receiver: ExprId, name: &Name) -> Ty {
fn infer_field_access(
&mut self,
tgt_expr: ExprId,
receiver: ExprId,
name: &Name,
expected: &Expectation,
) -> Ty {
let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none());
if name.is_missing() {
@ -1482,28 +1488,42 @@ impl InferenceContext<'_> {
ty
}
None => {
// no field found,
let method_with_same_name_exists = {
self.get_traits_in_scope();
let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
method_resolution::lookup_method(
self.db,
&canonicalized_receiver.value,
self.table.trait_env.clone(),
self.get_traits_in_scope().as_ref().left_or_else(|&it| it),
VisibleFromModule::Filter(self.resolver.module()),
name,
)
.is_some()
};
// no field found, lets attempt to resolve it like a function so that IDE things
// work out while people are typing
let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
let resolved = method_resolution::lookup_method(
self.db,
&canonicalized_receiver.value,
self.table.trait_env.clone(),
self.get_traits_in_scope().as_ref().left_or_else(|&it| it),
VisibleFromModule::Filter(self.resolver.module()),
name,
);
self.result.diagnostics.push(InferenceDiagnostic::UnresolvedField {
expr: tgt_expr,
receiver: receiver_ty,
receiver: receiver_ty.clone(),
name: name.clone(),
method_with_same_name_exists,
method_with_same_name_exists: resolved.is_some(),
});
self.err_ty()
match resolved {
Some((adjust, func, _)) => {
let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
let generics = generics(self.db.upcast(), func.into());
let substs = self.substs_for_method_call(generics, None);
self.write_expr_adj(receiver, adjustments);
self.write_method_resolution(tgt_expr, func, substs.clone());
self.check_method_call(
tgt_expr,
&[],
self.db.value_ty(func.into()),
substs,
ty,
expected,
)
}
None => self.err_ty(),
}
}
}
}
@ -1517,7 +1537,7 @@ impl InferenceContext<'_> {
generic_args: Option<&GenericArgs>,
expected: &Expectation,
) -> Ty {
let receiver_ty = self.infer_expr(receiver, &Expectation::none());
let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none());
let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
let resolved = method_resolution::lookup_method(
@ -1568,23 +1588,32 @@ impl InferenceContext<'_> {
)
}
};
self.check_method_call(tgt_expr, args, method_ty, substs, receiver_ty, expected)
}
fn check_method_call(
&mut self,
tgt_expr: ExprId,
args: &[ExprId],
method_ty: Binders<Ty>,
substs: Substitution,
receiver_ty: Ty,
expected: &Expectation,
) -> Ty {
let method_ty = method_ty.substitute(Interner, &substs);
self.register_obligations_for_call(&method_ty);
let (formal_receiver_ty, param_tys, ret_ty, is_varargs) =
let ((formal_receiver_ty, param_tys), ret_ty, is_varargs) =
match method_ty.callable_sig(self.db) {
Some(sig) => {
Some(sig) => (
if !sig.params().is_empty() {
(
sig.params()[0].clone(),
sig.params()[1..].to_vec(),
sig.ret().clone(),
sig.is_varargs,
)
(sig.params()[0].clone(), sig.params()[1..].to_vec())
} else {
(self.err_ty(), Vec::new(), sig.ret().clone(), sig.is_varargs)
}
}
None => (self.err_ty(), Vec::new(), self.err_ty(), true),
(self.err_ty(), Vec::new())
},
sig.ret().clone(),
sig.is_varargs,
),
None => ((self.err_ty(), Vec::new()), self.err_ty(), true),
};
self.unify(&formal_receiver_ty, &receiver_ty);

View file

@ -390,6 +390,7 @@ impl InferenceContext<'_> {
}
}
#[derive(Debug)]
enum ValuePathResolution {
// It's awkward to wrap a single ID in two enums, but we need both and this saves fallible
// conversion between them + `unwrap()`.

View file

@ -2,6 +2,7 @@
use std::fmt;
use base_db::salsa::Cycle;
use chalk_ir::{AdtId, FloatTy, IntTy, TyKind, UintTy};
use hir_def::{
layout::{
@ -431,7 +432,7 @@ pub fn layout_of_ty_query(
pub fn layout_of_ty_recover(
_: &dyn HirDatabase,
_: &[String],
_: &Cycle,
_: &Ty,
_: &Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {

View file

@ -2,6 +2,7 @@
use std::{cmp, ops::Bound};
use base_db::salsa::Cycle;
use hir_def::{
data::adt::VariantData,
layout::{Integer, LayoutCalculator, ReprOptions, TargetDataLayout},
@ -140,7 +141,7 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>,
pub fn layout_of_adt_recover(
_: &dyn HirDatabase,
_: &[String],
_: &Cycle,
_: &AdtId,
_: &Substitution,
_: &Arc<TraitEnvironment>,

View file

@ -1,6 +1,6 @@
//! The type system. We currently use this to infer types for completion, hover
//! information and various assists.
#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
#![warn(rust_2018_idioms, unused_lifetimes)]
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#[allow(unused)]
@ -73,8 +73,8 @@ pub use infer::{
};
pub use interner::Interner;
pub use lower::{
associated_type_shorthand_candidates, CallableDefId, ImplTraitLoweringMode, TyDefId,
TyLoweringContext, ValueTyDefId,
associated_type_shorthand_candidates, CallableDefId, ImplTraitLoweringMode, ParamLoweringMode,
TyDefId, TyLoweringContext, ValueTyDefId,
};
pub use mapping::{
from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx,
@ -122,7 +122,7 @@ pub type TyKind = chalk_ir::TyKind<Interner>;
pub type TypeFlags = chalk_ir::TypeFlags;
pub type DynTy = chalk_ir::DynTy<Interner>;
pub type FnPointer = chalk_ir::FnPointer<Interner>;
// pub type FnSubst = chalk_ir::FnSubst<Interner>;
// pub type FnSubst = chalk_ir::FnSubst<Interner>; // a re-export so we don't lose the tuple constructor
pub use chalk_ir::FnSubst;
pub type ProjectionTy = chalk_ir::ProjectionTy<Interner>;
pub type AliasTy = chalk_ir::AliasTy<Interner>;
@ -322,8 +322,7 @@ impl CallableSig {
pub fn from_fn_ptr(fn_ptr: &FnPointer) -> CallableSig {
CallableSig {
// FIXME: what to do about lifetime params? -> return PolyFnSig
// FIXME: use `Arc::from_iter` when it becomes available
params_and_return: Arc::from(
params_and_return: Arc::from_iter(
fn_ptr
.substitution
.clone()
@ -332,8 +331,7 @@ impl CallableSig {
.0
.as_slice(Interner)
.iter()
.map(|arg| arg.assert_ty_ref(Interner).clone())
.collect::<Vec<_>>(),
.map(|arg| arg.assert_ty_ref(Interner).clone()),
),
is_varargs: fn_ptr.sig.variadic,
safety: fn_ptr.sig.safety,

View file

@ -10,7 +10,7 @@ use std::{
iter,
};
use base_db::CrateId;
use base_db::{salsa::Cycle, CrateId};
use chalk_ir::{
cast::Cast, fold::Shift, fold::TypeFoldable, interner::HasInterner, Mutability, Safety,
};
@ -407,11 +407,7 @@ impl<'a> TyLoweringContext<'a> {
drop(expander);
let ty = self.lower_ty(&type_ref);
self.expander
.borrow_mut()
.as_mut()
.unwrap()
.exit(self.db.upcast(), mark);
self.expander.borrow_mut().as_mut().unwrap().exit(mark);
Some(ty)
}
_ => {
@ -1458,13 +1454,12 @@ pub(crate) fn generic_predicates_for_param_query(
pub(crate) fn generic_predicates_for_param_recover(
_db: &dyn HirDatabase,
_cycle: &[String],
_cycle: &Cycle,
_def: &GenericDefId,
_param_id: &TypeOrConstParamId,
_assoc_name: &Option<Name>,
) -> Arc<[Binders<QuantifiedWhereClause>]> {
// FIXME: use `Arc::from_iter` when it becomes available
Arc::from(vec![])
Arc::from_iter(None)
}
pub(crate) fn trait_environment_for_body_query(
@ -1473,7 +1468,7 @@ pub(crate) fn trait_environment_for_body_query(
) -> Arc<TraitEnvironment> {
let Some(def) = def.as_generic_def_id() else {
let krate = def.module(db.upcast()).krate();
return Arc::new(TraitEnvironment::empty(krate));
return TraitEnvironment::empty(krate);
};
db.trait_environment(def)
}
@ -1533,12 +1528,7 @@ pub(crate) fn trait_environment_query(
let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses);
Arc::new(TraitEnvironment {
krate,
block: None,
traits_from_clauses: traits_in_scope.into_boxed_slice(),
env,
})
TraitEnvironment::new(krate, None, traits_in_scope.into_boxed_slice(), env)
}
/// Resolve the where clause(s) of an item with generics.
@ -1607,69 +1597,54 @@ pub(crate) fn generic_defaults_query(
let generic_params = generics(db.upcast(), def);
let parent_start_idx = generic_params.len_self();
let defaults = Arc::from(
generic_params
.iter()
.enumerate()
.map(|(idx, (id, p))| {
match p {
TypeOrConstParamData::TypeParamData(p) => {
let mut ty = p
.default
.as_ref()
.map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t));
// Each default can only refer to previous parameters.
// Type variable default referring to parameter coming
// after it is forbidden (FIXME: report diagnostic)
ty = fallback_bound_vars(ty, idx, parent_start_idx);
crate::make_binders(db, &generic_params, ty.cast(Interner))
}
TypeOrConstParamData::ConstParamData(p) => {
let mut val = p.default.as_ref().map_or_else(
|| {
unknown_const_as_generic(
db.const_param_ty(ConstParamId::from_unchecked(id)),
)
},
|c| {
let c = ctx.lower_const(c, ctx.lower_ty(&p.ty));
c.cast(Interner)
},
);
// Each default can only refer to previous parameters, see above.
val = fallback_bound_vars(val, idx, parent_start_idx);
make_binders(db, &generic_params, val)
}
}
})
// FIXME: use `Arc::from_iter` when it becomes available
.collect::<Vec<_>>(),
);
let defaults = Arc::from_iter(generic_params.iter().enumerate().map(|(idx, (id, p))| {
match p {
TypeOrConstParamData::TypeParamData(p) => {
let mut ty =
p.default.as_ref().map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t));
// Each default can only refer to previous parameters.
// Type variable default referring to parameter coming
// after it is forbidden (FIXME: report diagnostic)
ty = fallback_bound_vars(ty, idx, parent_start_idx);
crate::make_binders(db, &generic_params, ty.cast(Interner))
}
TypeOrConstParamData::ConstParamData(p) => {
let mut val = p.default.as_ref().map_or_else(
|| {
unknown_const_as_generic(
db.const_param_ty(ConstParamId::from_unchecked(id)),
)
},
|c| {
let c = ctx.lower_const(c, ctx.lower_ty(&p.ty));
c.cast(Interner)
},
);
// Each default can only refer to previous parameters, see above.
val = fallback_bound_vars(val, idx, parent_start_idx);
make_binders(db, &generic_params, val)
}
}
}));
defaults
}
pub(crate) fn generic_defaults_recover(
db: &dyn HirDatabase,
_cycle: &[String],
_cycle: &Cycle,
def: &GenericDefId,
) -> Arc<[Binders<crate::GenericArg>]> {
let generic_params = generics(db.upcast(), *def);
// FIXME: this code is not covered in tests.
// we still need one default per parameter
let defaults = Arc::from(
generic_params
.iter_id()
.map(|id| {
let val = match id {
Either::Left(_) => TyKind::Error.intern(Interner).cast(Interner),
Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
};
crate::make_binders(db, &generic_params, val)
})
// FIXME: use `Arc::from_iter` when it becomes available
.collect::<Vec<_>>(),
);
let defaults = Arc::from_iter(generic_params.iter_id().map(|id| {
let val = match id {
Either::Left(_) => TyKind::Error.intern(Interner).cast(Interner),
Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
};
crate::make_binders(db, &generic_params, val)
}));
defaults
}
@ -1885,7 +1860,7 @@ pub(crate) fn ty_query(db: &dyn HirDatabase, def: TyDefId) -> Binders<Ty> {
}
}
pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &[String], def: &TyDefId) -> Binders<Ty> {
pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &Cycle, def: &TyDefId) -> Binders<Ty> {
let generics = match *def {
TyDefId::BuiltinType(_) => return Binders::empty(Interner, TyKind::Error.intern(Interner)),
TyDefId::AdtId(it) => generics(db.upcast(), it.into()),
@ -1935,7 +1910,7 @@ pub(crate) fn const_param_ty_query(db: &dyn HirDatabase, def: ConstParamId) -> T
pub(crate) fn impl_self_ty_recover(
db: &dyn HirDatabase,
_cycle: &[String],
_cycle: &Cycle,
impl_id: &ImplId,
) -> Binders<Ty> {
let generics = generics(db.upcast(), (*impl_id).into());

View file

@ -168,12 +168,9 @@ impl TraitImpls {
) -> Arc<[Arc<Self>]> {
let _p = profile::span("trait_impls_in_deps_query").detail(|| format!("{krate:?}"));
let crate_graph = db.crate_graph();
// FIXME: use `Arc::from_iter` when it becomes available
Arc::from(
crate_graph
.transitive_deps(krate)
.map(|krate| db.trait_impls_in_crate(krate))
.collect::<Vec<_>>(),
Arc::from_iter(
crate_graph.transitive_deps(krate).map(|krate| db.trait_impls_in_crate(krate)),
)
}

View file

@ -40,7 +40,6 @@ pub use monomorphization::{
use rustc_hash::FxHashMap;
use smallvec::{smallvec, SmallVec};
use stdx::{impl_from, never};
use triomphe::Arc;
use super::consteval::{intern_const_scalar, try_const_usize};
@ -147,7 +146,7 @@ impl<V, T> ProjectionElem<V, T> {
base = normalize(
db,
// FIXME: we should get this from caller
Arc::new(TraitEnvironment::empty(krate)),
TraitEnvironment::empty(krate),
base,
);
}

View file

@ -21,7 +21,7 @@ use hir_def::{
AdtId, ConstId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup,
StaticId, VariantId,
};
use hir_expand::{mod_path::ModPath, InFile};
use hir_expand::{mod_path::ModPath, HirFileIdExt, InFile};
use intern::Interned;
use la_arena::ArenaMap;
use rustc_hash::{FxHashMap, FxHashSet};

View file

@ -2,7 +2,7 @@
use std::{fmt::Write, iter, mem};
use base_db::FileId;
use base_db::{salsa::Cycle, FileId};
use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind};
use hir_def::{
body::Body,
@ -2110,7 +2110,7 @@ pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<Mi
pub fn mir_body_recover(
_db: &dyn HirDatabase,
_cycle: &[String],
_cycle: &Cycle,
_def: &DefWithBodyId,
) -> Result<Arc<MirBody>> {
Err(MirLowerError::Loop)

View file

@ -9,6 +9,7 @@
use std::mem;
use base_db::salsa::Cycle;
use chalk_ir::{
fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable},
ConstData, DebruijnIndex,
@ -300,7 +301,7 @@ pub fn monomorphized_mir_body_query(
pub fn monomorphized_mir_body_recover(
_: &dyn HirDatabase,
_: &[String],
_: &Cycle,
_: &DefWithBodyId,
_: &Substitution,
_: &Arc<crate::TraitEnvironment>,

View file

@ -30,6 +30,7 @@ pub(crate) struct TestDB {
impl Default for TestDB {
fn default() -> Self {
let mut this = Self { storage: Default::default(), events: Default::default() };
this.setup_syntax_context_root();
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
this
}

View file

@ -2000,3 +2000,15 @@ fn test() {
"#,
);
}
#[test]
fn rustc_test_issue_52437() {
check_types(
r#"
fn main() {
let x = [(); &(&'static: loop { |x| {}; }) as *const _ as usize]
//^ [(); _]
}
"#,
);
}

View file

@ -48,18 +48,32 @@ pub struct TraitEnvironment {
pub krate: CrateId,
pub block: Option<BlockId>,
// FIXME make this a BTreeMap
pub(crate) traits_from_clauses: Box<[(Ty, TraitId)]>,
traits_from_clauses: Box<[(Ty, TraitId)]>,
pub env: chalk_ir::Environment<Interner>,
}
impl TraitEnvironment {
pub fn empty(krate: CrateId) -> Self {
TraitEnvironment {
pub fn empty(krate: CrateId) -> Arc<Self> {
Arc::new(TraitEnvironment {
krate,
block: None,
traits_from_clauses: Box::default(),
env: chalk_ir::Environment::new(Interner),
}
})
}
pub fn new(
krate: CrateId,
block: Option<BlockId>,
traits_from_clauses: Box<[(Ty, TraitId)]>,
env: chalk_ir::Environment<Interner>,
) -> Arc<Self> {
Arc::new(TraitEnvironment { krate, block, traits_from_clauses, env })
}
// pub fn with_block(self: &mut Arc<Self>, block: BlockId) {
pub fn with_block(this: &mut Arc<Self>, block: BlockId) {
Arc::make_mut(this).block = Some(block);
}
pub fn traits_in_scope_from_clauses(&self, ty: Ty) -> impl Iterator<Item = TraitId> + '_ {

View file

@ -1,5 +1,6 @@
//! Attributes & documentation for hir types.
use base_db::FileId;
use hir_def::{
attr::AttrsWithOwner,
item_scope::ItemInNs,
@ -8,7 +9,10 @@ use hir_def::{
resolver::{HasResolver, Resolver, TypeNs},
AssocItemId, AttrDefId, ModuleDefId,
};
use hir_expand::{hygiene::Hygiene, name::Name};
use hir_expand::{
name::Name,
span::{RealSpanMap, SpanMapRef},
};
use hir_ty::db::HirDatabase;
use syntax::{ast, AstNode};
@ -234,7 +238,11 @@ fn modpath_from_str(db: &dyn HirDatabase, link: &str) -> Option<ModPath> {
if ast_path.syntax().text() != link {
return None;
}
ModPath::from_src(db.upcast(), ast_path, &Hygiene::new_unhygienic())
ModPath::from_src(
db.upcast(),
ast_path,
SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId::BOGUS)),
)
};
let full = try_get_modpath(link);

View file

@ -3,10 +3,27 @@
//! we didn't do that.
//!
//! But we need this for at least LRU caching at the query level.
pub use hir_def::db::*;
pub use hir_def::db::{
AttrsQuery, BlockDefMapQuery, BlockItemTreeQueryQuery, BodyQuery, BodyWithSourceMapQuery,
ConstDataQuery, ConstVisibilityQuery, CrateDefMapQueryQuery, CrateLangItemsQuery,
CrateSupportsNoStdQuery, DefDatabase, DefDatabaseStorage, EnumDataQuery,
EnumDataWithDiagnosticsQuery, ExprScopesQuery, ExternCrateDeclDataQuery,
FieldVisibilitiesQuery, FieldsAttrsQuery, FieldsAttrsSourceMapQuery, FileItemTreeQuery,
FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery, ImplDataQuery,
ImplDataWithDiagnosticsQuery, ImportMapQuery, InternAnonymousConstQuery, InternBlockQuery,
InternConstQuery, InternDatabase, InternDatabaseStorage, InternEnumQuery,
InternExternBlockQuery, InternExternCrateQuery, InternFunctionQuery, InternImplQuery,
InternInTypeConstQuery, InternMacro2Query, InternMacroRulesQuery, InternProcMacroQuery,
InternStaticQuery, InternStructQuery, InternTraitAliasQuery, InternTraitQuery,
InternTypeAliasQuery, InternUnionQuery, InternUseQuery, LangAttrQuery, LangItemQuery,
Macro2DataQuery, MacroRulesDataQuery, ProcMacroDataQuery, StaticDataQuery, StructDataQuery,
StructDataWithDiagnosticsQuery, TraitAliasDataQuery, TraitDataQuery,
TraitDataWithDiagnosticsQuery, TypeAliasDataQuery, UnionDataQuery,
UnionDataWithDiagnosticsQuery, VariantsAttrsQuery, VariantsAttrsSourceMapQuery,
};
pub use hir_expand::db::{
AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgNodeQuery,
MacroExpandQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery,
ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery,
ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, RealSpanMapQuery,
};
pub use hir_ty::db::*;

View file

@ -12,7 +12,7 @@ use hir_def::path::ModPath;
use hir_expand::{name::Name, HirFileId, InFile};
use syntax::{ast, AstPtr, SyntaxError, SyntaxNodePtr, TextRange};
use crate::{AssocItem, Field, Local, MacroKind, Type};
use crate::{AssocItem, Field, Local, MacroKind, Trait, Type};
macro_rules! diagnostics {
($($diag:ident,)*) => {
@ -55,6 +55,7 @@ diagnostics![
ReplaceFilterMapNextWithFindMap,
TraitImplIncorrectSafety,
TraitImplMissingAssocItems,
TraitImplRedundantAssocItems,
TraitImplOrphan,
TypedHole,
TypeMismatch,
@ -310,3 +311,11 @@ pub struct TraitImplMissingAssocItems {
pub impl_: AstPtr<ast::Impl>,
pub missing: Vec<(Name, AssocItem)>,
}
#[derive(Debug, PartialEq, Eq)]
pub struct TraitImplRedundantAssocItems {
pub file_id: HirFileId,
pub trait_: Trait,
pub impl_: AstPtr<ast::Impl>,
pub assoc_item: (Name, AssocItem),
}

View file

@ -17,7 +17,7 @@
//! from the ide with completions, hovers, etc. It is a (soft, internal) boundary:
//! <https://www.tedinski.com/2018/02/06/system-boundaries.html>.
#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
#![warn(rust_2018_idioms, unused_lifetimes)]
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#![recursion_limit = "512"]
@ -59,7 +59,7 @@ use hir_def::{
Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId,
TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
};
use hir_expand::{name::name, MacroCallKind};
use hir_expand::{attrs::collect_attrs, name::name, MacroCallKind};
use hir_ty::{
all_super_traits, autoderef, check_orphan_rules,
consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt},
@ -81,7 +81,7 @@ use once_cell::unsync::Lazy;
use rustc_hash::FxHashSet;
use stdx::{impl_from, never};
use syntax::{
ast::{self, HasAttrs as _, HasDocComments, HasName},
ast::{self, HasAttrs as _, HasName},
AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, T,
};
use triomphe::Arc;
@ -92,7 +92,9 @@ pub use crate::{
attrs::{resolve_doc_path_on, HasAttrs},
diagnostics::*,
has_source::HasSource,
semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits},
semantics::{
DescendPreference, PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits,
},
};
// Be careful with these re-exports.
@ -123,8 +125,10 @@ pub use {
},
hir_expand::{
attrs::{Attr, AttrId},
hygiene::{marks_rev, SyntaxContextExt},
name::{known, Name},
ExpandResult, HirFileId, InFile, MacroFile, Origin,
tt, ExpandResult, HirFileId, HirFileIdExt, InFile, InMacroFile, InRealFile, MacroFileId,
MacroFileIdExt,
},
hir_ty::{
display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite},
@ -140,7 +144,10 @@ pub use {
#[allow(unused)]
use {
hir_def::path::Path,
hir_expand::{hygiene::Hygiene, name::AsName},
hir_expand::{
name::AsName,
span::{ExpansionSpanMap, RealSpanMap, SpanMap, SpanMapRef},
},
};
/// hir::Crate describes a single crate. It's the main interface with which
@ -601,7 +608,7 @@ impl Module {
let tree = loc.id.item_tree(db.upcast());
let node = &tree[loc.id.value];
let file_id = loc.id.file_id();
if file_id.is_builtin_derive(db.upcast()) {
if file_id.macro_file().map_or(false, |it| it.is_builtin_derive(db.upcast())) {
// these expansion come from us, diagnosing them is a waste of resources
// FIXME: Once we diagnose the inputs to builtin derives, we should at least extract those diagnostics somehow
continue;
@ -664,7 +671,8 @@ impl Module {
_ => (),
};
if let Some(trait_) = trait_ {
// Negative impls can't have items, don't emit missing items diagnostic for them
if let (false, Some(trait_)) = (impl_is_negative, trait_) {
let items = &db.trait_data(trait_.into()).items;
let required_items = items.iter().filter(|&(_, assoc)| match *assoc {
AssocItemId::FunctionId(it) => !db.function_data(it).has_body(),
@ -686,6 +694,26 @@ impl Module {
},
));
let redundant = impl_assoc_items_scratch
.iter()
.filter(|(id, name)| {
!items.iter().any(|(impl_name, impl_item)| {
discriminant(impl_item) == discriminant(id) && impl_name == name
})
})
.map(|(item, name)| (name.clone(), AssocItem::from(*item)));
for (name, assoc_item) in redundant {
acc.push(
TraitImplRedundantAssocItems {
trait_,
file_id,
impl_: ast_id_map.get(node.ast_id()),
assoc_item: (name, assoc_item),
}
.into(),
)
}
let missing: Vec<_> = required_items
.filter(|(name, id)| {
!impl_assoc_items_scratch.iter().any(|(impl_item, impl_name)| {
@ -947,10 +975,9 @@ fn precise_macro_call_location(
// Compute the precise location of the macro name's token in the derive
// list.
let token = (|| {
let derive_attr = node
.doc_comments_and_attrs()
let derive_attr = collect_attrs(&node)
.nth(derive_attr_index.ast_index())
.and_then(Either::left)?;
.and_then(|x| Either::left(x.1))?;
let token_tree = derive_attr.meta()?.token_tree()?;
let group_by = token_tree
.syntax()
@ -975,10 +1002,9 @@ fn precise_macro_call_location(
}
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
let node = ast_id.to_node(db.upcast());
let attr = node
.doc_comments_and_attrs()
let attr = collect_attrs(&node)
.nth(invoc_attr_index.ast_index())
.and_then(Either::left)
.and_then(|x| Either::left(x.1))
.unwrap_or_else(|| {
panic!("cannot find attribute #{}", invoc_attr_index.ast_index())
});
@ -3490,9 +3516,34 @@ impl Impl {
self.id.lookup(db.upcast()).container.into()
}
pub fn as_builtin_derive(self, db: &dyn HirDatabase) -> Option<InFile<ast::Attr>> {
pub fn as_builtin_derive_path(self, db: &dyn HirDatabase) -> Option<InMacroFile<ast::Path>> {
let src = self.source(db)?;
src.file_id.as_builtin_derive_attr_node(db.upcast())
let macro_file = src.file_id.macro_file()?;
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let (derive_attr, derive_index) = match loc.kind {
MacroCallKind::Derive { ast_id, derive_attr_index, derive_index } => {
let module_id = self.id.lookup(db.upcast()).container;
(
db.crate_def_map(module_id.krate())[module_id.local_id]
.scope
.derive_macro_invoc(ast_id, derive_attr_index)?,
derive_index,
)
}
_ => return None,
};
let file_id = MacroFileId { macro_call_id: derive_attr };
let path = db
.parse_macro_expansion(file_id)
.value
.0
.syntax_node()
.children()
.nth(derive_index as usize)
.and_then(<ast::Attr as AstNode>::cast)
.and_then(|it| it.path())?;
Some(InMacroFile { file_id, value: path })
}
pub fn check_orphan_rules(self, db: &dyn HirDatabase) -> bool {
@ -3512,10 +3563,9 @@ impl TraitRef {
resolver: &Resolver,
trait_ref: hir_ty::TraitRef,
) -> TraitRef {
let env = resolver.generic_def().map_or_else(
|| Arc::new(TraitEnvironment::empty(resolver.krate())),
|d| db.trait_environment(d),
);
let env = resolver
.generic_def()
.map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
TraitRef { env, trait_ref }
}
@ -3655,15 +3705,14 @@ impl Type {
resolver: &Resolver,
ty: Ty,
) -> Type {
let environment = resolver.generic_def().map_or_else(
|| Arc::new(TraitEnvironment::empty(resolver.krate())),
|d| db.trait_environment(d),
);
let environment = resolver
.generic_def()
.map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
Type { env: environment, ty }
}
pub(crate) fn new_for_crate(krate: CrateId, ty: Ty) -> Type {
Type { env: Arc::new(TraitEnvironment::empty(krate)), ty }
Type { env: TraitEnvironment::empty(krate), ty }
}
pub fn reference(inner: &Type, m: Mutability) -> Type {
@ -3679,10 +3728,9 @@ impl Type {
fn new(db: &dyn HirDatabase, lexical_env: impl HasResolver, ty: Ty) -> Type {
let resolver = lexical_env.resolver(db.upcast());
let environment = resolver.generic_def().map_or_else(
|| Arc::new(TraitEnvironment::empty(resolver.krate())),
|d| db.trait_environment(d),
);
let environment = resolver
.generic_def()
.map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
Type { env: environment, ty }
}
@ -4252,10 +4300,10 @@ impl Type {
let canonical = hir_ty::replace_errors_with_variables(&self.ty);
let krate = scope.krate();
let environment = scope.resolver().generic_def().map_or_else(
|| Arc::new(TraitEnvironment::empty(krate.id)),
|d| db.trait_environment(d),
);
let environment = scope
.resolver()
.generic_def()
.map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
method_resolution::iterate_method_candidates_dyn(
&canonical,
@ -4309,10 +4357,10 @@ impl Type {
let canonical = hir_ty::replace_errors_with_variables(&self.ty);
let krate = scope.krate();
let environment = scope.resolver().generic_def().map_or_else(
|| Arc::new(TraitEnvironment::empty(krate.id)),
|d| db.trait_environment(d),
);
let environment = scope
.resolver()
.generic_def()
.map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
method_resolution::iterate_path_candidates(
&canonical,

View file

@ -2,7 +2,11 @@
mod source_to_def;
use std::{cell::RefCell, fmt, iter, mem, ops};
use std::{
cell::RefCell,
fmt, iter, mem,
ops::{self, ControlFlow, Not},
};
use base_db::{FileId, FileRange};
use either::Either;
@ -13,16 +17,21 @@ use hir_def::{
nameres::MacroSubNs,
resolver::{self, HasResolver, Resolver, TypeNs},
type_ref::Mutability,
AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId,
AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId,
};
use hir_expand::{
db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, InMacroFile, MacroCallId,
MacroFileId, MacroFileIdExt,
};
use hir_expand::{db::ExpandDatabase, name::AsName, ExpansionInfo, MacroCallId};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec};
use stdx::TupleExt;
use syntax::{
algo::skip_trivia_token,
ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody},
match_ast, AstNode, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
ast::{self, HasAttrs as _, HasDocComments, HasGenericParams, HasLoopBody, IsString as _},
match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken,
TextRange, TextSize,
};
use crate::{
@ -35,7 +44,13 @@ use crate::{
TypeAlias, TypeParam, VariantDef,
};
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum DescendPreference {
SameText,
SameKind,
None,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum PathResolution {
/// An item
Def(ModuleDef),
@ -114,11 +129,12 @@ pub struct Semantics<'db, DB> {
pub struct SemanticsImpl<'db> {
pub db: &'db dyn HirDatabase,
s2d_cache: RefCell<SourceToDefCache>,
expansion_info_cache: RefCell<FxHashMap<HirFileId, Option<ExpansionInfo>>>,
// Rootnode to HirFileId cache
/// Rootnode to HirFileId cache
cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
// MacroCall to its expansion's HirFileId cache
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, HirFileId>>,
// These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens
expansion_info_cache: RefCell<FxHashMap<MacroFileId, ExpansionInfo>>,
/// MacroCall to its expansion's MacroFileId cache
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
}
impl<DB> fmt::Debug for Semantics<'_, DB> {
@ -182,20 +198,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
}
pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
self.imp.resolve_method_call(call).map(Function::from)
}
/// Attempts to resolve this call expression as a method call falling back to resolving it as a field.
pub fn resolve_method_call_field_fallback(
&self,
call: &ast::MethodCallExpr,
) -> Option<Either<Function, Field>> {
self.imp
.resolve_method_call_fallback(call)
.map(|it| it.map_left(Function::from).map_right(Field::from))
}
pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<Function> {
self.imp.resolve_await_to_poll(await_expr).map(Function::from)
}
@ -255,7 +257,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
let sa = self.analyze_no_infer(macro_call.syntax())?;
let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
let node = self.parse_or_expand(file_id);
let node = self.parse_or_expand(file_id.into());
Some(node)
}
@ -388,11 +390,72 @@ impl<'db> SemanticsImpl<'db> {
)
}
pub fn as_format_args_parts(
&self,
string: &ast::String,
) -> Option<Vec<(TextRange, Option<PathResolution>)>> {
if let Some(quote) = string.open_quote_text_range() {
return self
.descend_into_macros(DescendPreference::SameText, string.syntax().clone())
.into_iter()
.find_map(|token| {
let string = ast::String::cast(token)?;
let literal =
string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?;
let source_analyzer = self.analyze_no_infer(format_args.syntax())?;
let format_args = self.wrap_node_infile(format_args);
let res = source_analyzer
.as_format_args_parts(self.db, format_args.as_ref())?
.map(|(range, res)| (range + quote.end(), res))
.collect();
Some(res)
});
}
None
}
pub fn check_for_format_args_template(
&self,
original_token: SyntaxToken,
offset: TextSize,
) -> Option<(TextRange, Option<PathResolution>)> {
if let Some(original_string) = ast::String::cast(original_token.clone()) {
if let Some(quote) = original_string.open_quote_text_range() {
return self
.descend_into_macros(DescendPreference::SameText, original_token.clone())
.into_iter()
.find_map(|token| {
self.resolve_offset_in_format_args(
ast::String::cast(token)?,
offset - quote.end(),
)
})
.map(|(range, res)| (range + quote.end(), res));
}
}
None
}
fn resolve_offset_in_format_args(
&self,
string: ast::String,
offset: TextSize,
) -> Option<(TextRange, Option<PathResolution>)> {
debug_assert!(offset <= string.syntax().text_range().len());
let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?;
let source_analyzer = &self.analyze_no_infer(format_args.syntax())?;
let format_args = self.wrap_node_infile(format_args);
source_analyzer.resolve_offset_in_format_args(self.db, format_args.as_ref(), offset)
}
/// Maps a node down by mapping its first and last token down.
pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
// This might not be the correct way to do this, but it works for now
let mut res = smallvec![];
let tokens = (|| {
// FIXME: the trivia skipping should not be necessary
let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?;
let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?;
Some((first, last))
@ -403,24 +466,28 @@ impl<'db> SemanticsImpl<'db> {
};
if first == last {
self.descend_into_macros_impl(first, 0.into(), &mut |InFile { value, .. }| {
if let Some(node) = value.parent_ancestors().find_map(N::cast) {
// node is just the token, so descend the token
self.descend_into_macros_impl(first, &mut |InFile { value, .. }| {
if let Some(node) = value
.parent_ancestors()
.take_while(|it| it.text_range() == value.text_range())
.find_map(N::cast)
{
res.push(node)
}
false
ControlFlow::Continue(())
});
} else {
// Descend first and last token, then zip them to look for the node they belong to
let mut scratch: SmallVec<[_; 1]> = smallvec![];
self.descend_into_macros_impl(first, 0.into(), &mut |token| {
self.descend_into_macros_impl(first, &mut |token| {
scratch.push(token);
false
ControlFlow::Continue(())
});
let mut scratch = scratch.into_iter();
self.descend_into_macros_impl(
last,
0.into(),
&mut |InFile { value: last, file_id: last_fid }| {
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
if first_fid == last_fid {
@ -437,7 +504,7 @@ impl<'db> SemanticsImpl<'db> {
}
}
}
false
ControlFlow::Continue(())
},
);
}
@ -449,32 +516,42 @@ impl<'db> SemanticsImpl<'db> {
/// be considered for the mapping in case of inline format args.
pub fn descend_into_macros(
&self,
mode: DescendPreference,
token: SyntaxToken,
offset: TextSize,
) -> SmallVec<[SyntaxToken; 1]> {
enum Dp<'t> {
SameText(&'t str),
SameKind(SyntaxKind),
None,
}
let fetch_kind = |token: &SyntaxToken| match token.parent() {
Some(node) => match node.kind() {
kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind,
_ => token.kind(),
},
None => token.kind(),
};
let mode = match mode {
DescendPreference::SameText => Dp::SameText(token.text()),
DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)),
DescendPreference::None => Dp::None,
};
let mut res = smallvec![];
self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| {
res.push(value);
false
});
res
}
/// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token.
///
/// Returns the original non descended token if none of the mapped counterparts have the same text.
pub fn descend_into_macros_with_same_text(
&self,
token: SyntaxToken,
offset: TextSize,
) -> SmallVec<[SyntaxToken; 1]> {
let text = token.text();
let mut res = smallvec![];
self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| {
if value.text() == text {
self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
let is_a_match = match mode {
Dp::SameText(text) => value.text() == text,
Dp::SameKind(preferred_kind) => {
let kind = fetch_kind(&value);
kind == preferred_kind
// special case for derive macros
|| (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF)
}
Dp::None => true,
};
if is_a_match {
res.push(value);
}
false
ControlFlow::Continue(())
});
if res.is_empty() {
res.push(token);
@ -482,44 +559,46 @@ impl<'db> SemanticsImpl<'db> {
res
}
pub fn descend_into_macros_with_kind_preference(
pub fn descend_into_macros_single(
&self,
mode: DescendPreference,
token: SyntaxToken,
offset: TextSize,
) -> SyntaxToken {
enum Dp<'t> {
SameText(&'t str),
SameKind(SyntaxKind),
None,
}
let fetch_kind = |token: &SyntaxToken| match token.parent() {
Some(node) => match node.kind() {
kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => {
node.parent().map_or(kind, |it| it.kind())
}
kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind,
_ => token.kind(),
},
None => token.kind(),
};
let preferred_kind = fetch_kind(&token);
let mut res = None;
self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| {
if fetch_kind(&value) == preferred_kind {
res = Some(value);
true
} else {
if let None = res {
res = Some(value)
}
false
}
});
res.unwrap_or(token)
}
/// Descend the token into its macro call if it is part of one, returning the token in the
/// expansion that it is associated with. If `offset` points into the token's range, it will
/// be considered for the mapping in case of inline format args.
pub fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken {
let mode = match mode {
DescendPreference::SameText => Dp::SameText(token.text()),
DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)),
DescendPreference::None => Dp::None,
};
let mut res = token.clone();
self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| {
self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
let is_a_match = match mode {
Dp::SameText(text) => value.text() == text,
Dp::SameKind(preferred_kind) => {
let kind = fetch_kind(&value);
kind == preferred_kind
// special case for derive macros
|| (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF)
}
Dp::None => true,
};
res = value;
true
if is_a_match {
ControlFlow::Break(())
} else {
ControlFlow::Continue(())
}
});
res
}
@ -527,177 +606,193 @@ impl<'db> SemanticsImpl<'db> {
fn descend_into_macros_impl(
&self,
token: SyntaxToken,
// FIXME: We might want this to be Option<TextSize> to be able to opt out of subrange
// mapping, specifically for node downmapping
offset: TextSize,
f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool,
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
) {
let _p = profile::span("descend_into_macros");
let relative_token_offset = token.text_range().start().checked_sub(offset);
let parent = match token.parent() {
let sa = match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
Some(it) => it,
None => return,
};
let sa = match self.analyze_no_infer(&parent) {
Some(it) => it,
None => return,
};
let def_map = sa.resolver.def_map();
let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
let span = match sa.file_id.file_id() {
Some(file_id) => self.db.real_span_map(file_id).span_for_range(token.text_range()),
None => {
stdx::never!();
return;
}
};
let mut cache = self.expansion_info_cache.borrow_mut();
let mut mcache = self.macro_call_cache.borrow_mut();
let def_map = sa.resolver.def_map();
let mut process_expansion_for_token =
|stack: &mut SmallVec<_>, macro_file, item, token: InFile<&_>| {
let expansion_info = cache
.entry(macro_file)
.or_insert_with(|| macro_file.expansion_info(self.db.upcast()))
.as_ref()?;
let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
let expansion_info = cache
.entry(macro_file)
.or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
{
let InFile { file_id, value } = expansion_info.expanded();
self.cache(value, file_id);
}
{
let InMacroFile { file_id, value } = expansion_info.expanded();
self.cache(value, file_id.into());
}
let mapped_tokens = expansion_info.map_token_down(
self.db.upcast(),
item,
token,
relative_token_offset,
)?;
let len = stack.len();
let InMacroFile { file_id, value: mapped_tokens } =
expansion_info.map_range_down(span)?;
let mapped_tokens: SmallVec<[_; 2]> = mapped_tokens.collect();
// requeue the tokens we got from mapping our current token down
stack.extend(mapped_tokens);
// if the length changed we have found a mapping for the token
(stack.len() != len).then_some(())
};
// if the length changed we have found a mapping for the token
let res = mapped_tokens.is_empty().not().then_some(());
// requeue the tokens we got from mapping our current token down
stack.push((HirFileId::from(file_id), mapped_tokens));
res
};
// Remap the next token in the queue into a macro call its in, if it is not being remapped
// either due to not being in a macro-call or because its unused push it into the result vec,
// otherwise push the remapped tokens back into the queue as they can potentially be remapped again.
while let Some(token) = stack.pop() {
self.db.unwind_if_cancelled();
let was_not_remapped = (|| {
// First expand into attribute invocations
let containing_attribute_macro_call = self.with_ctx(|ctx| {
token.value.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
if item.attrs().next().is_none() {
// Don't force populate the dyn cache for items that don't have an attribute anyways
return None;
}
Some((ctx.item_to_macro_call(token.with_value(item.clone()))?, item))
})
});
if let Some((call_id, item)) = containing_attribute_macro_call {
let file_id = call_id.as_file();
return process_expansion_for_token(
&mut stack,
file_id,
Some(item),
token.as_ref(),
);
}
let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(sa.file_id, smallvec![token])];
// Then check for token trees, that means we are either in a function-like macro or
// secondary attribute inputs
let tt = token.value.parent_ancestors().map_while(ast::TokenTree::cast).last()?;
let parent = tt.syntax().parent()?;
if tt.left_delimiter_token().map_or(false, |it| it == token.value) {
return None;
}
if tt.right_delimiter_token().map_or(false, |it| it == token.value) {
return None;
}
if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) {
let mcall = token.with_value(macro_call);
let file_id = match mcache.get(&mcall) {
Some(&it) => it,
None => {
let it = sa.expand(self.db, mcall.as_ref())?;
mcache.insert(mcall, it);
it
}
};
process_expansion_for_token(&mut stack, file_id, None, token.as_ref())
} else if let Some(meta) = ast::Meta::cast(parent) {
// attribute we failed expansion for earlier, this might be a derive invocation
// or derive helper attribute
let attr = meta.parent_attr()?;
let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast) {
// this might be a derive, or a derive helper on an ADT
let derive_call = self.with_ctx(|ctx| {
// so try downmapping the token into the pseudo derive expansion
// see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
ctx.attr_to_derive_macro_call(
token.with_value(&adt),
token.with_value(attr.clone()),
)
.map(|(_, call_id, _)| call_id)
});
match derive_call {
Some(call_id) => {
// resolved to a derive
let file_id = call_id.as_file();
return process_expansion_for_token(
&mut stack,
file_id,
Some(adt.into()),
token.as_ref(),
);
while let Some((file_id, mut tokens)) = stack.pop() {
while let Some(token) = tokens.pop() {
let was_not_remapped = (|| {
// First expand into attribute invocations
let containing_attribute_macro_call = self.with_ctx(|ctx| {
token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
if item.attrs().next().is_none() {
// Don't force populate the dyn cache for items that don't have an attribute anyways
return None;
}
None => Some(adt),
}
} else {
// Otherwise this could be a derive helper on a variant or field
if let Some(field) = attr.syntax().parent().and_then(ast::RecordField::cast)
{
field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
} else if let Some(field) =
attr.syntax().parent().and_then(ast::TupleField::cast)
{
field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
} else if let Some(variant) =
attr.syntax().parent().and_then(ast::Variant::cast)
{
variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
} else {
None
}
}?;
if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(token.file_id, &adt))) {
Some((
ctx.item_to_macro_call(InFile::new(file_id, item.clone()))?,
item,
))
})
});
if let Some((call_id, item)) = containing_attribute_macro_call {
let file_id = call_id.as_macro_file();
let attr_id = match self.db.lookup_intern_macro_call(call_id).kind {
hir_expand::MacroCallKind::Attr { invoc_attr_index, .. } => {
invoc_attr_index.ast_index()
}
_ => 0,
};
let text_range = item.syntax().text_range();
let start = item
.doc_comments_and_attrs()
.nth(attr_id)
.map(|attr| match attr {
Either::Left(it) => it.syntax().text_range().start(),
Either::Right(it) => it.syntax().text_range().start(),
})
.unwrap_or_else(|| text_range.start());
let text_range = TextRange::new(start, text_range.end());
// remove any other token in this macro input, all their mappings are the
// same as this one
tokens.retain(|t| !text_range.contains_range(t.text_range()));
return process_expansion_for_token(&mut stack, file_id);
}
// Then check for token trees, that means we are either in a function-like macro or
// secondary attribute inputs
let tt = token.parent_ancestors().map_while(ast::TokenTree::cast).last()?;
let parent = tt.syntax().parent()?;
if tt.left_delimiter_token().map_or(false, |it| it == token) {
return None;
}
// Not an attribute, nor a derive, so it's either a builtin or a derive helper
// Try to resolve to a derive helper and downmap
let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
let id = self.db.ast_id_map(token.file_id).ast_id(&adt);
let helpers =
def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?;
let item = Some(adt.into());
let mut res = None;
for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) {
res = res.or(process_expansion_for_token(
&mut stack,
derive.as_file(),
item.clone(),
token.as_ref(),
));
if tt.right_delimiter_token().map_or(false, |it| it == token) {
return None;
}
res
} else {
None
}
})()
.is_none();
if was_not_remapped && f(token) {
break;
if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) {
let mcall: hir_expand::files::InFileWrapper<HirFileId, ast::MacroCall> =
InFile::new(file_id, macro_call);
let file_id = match mcache.get(&mcall) {
Some(&it) => it,
None => {
let it = sa.expand(self.db, mcall.as_ref())?;
mcache.insert(mcall, it);
it
}
};
let text_range = tt.syntax().text_range();
// remove any other token in this macro input, all their mappings are the
// same as this one
tokens.retain(|t| !text_range.contains_range(t.text_range()));
process_expansion_for_token(&mut stack, file_id)
} else if let Some(meta) = ast::Meta::cast(parent) {
// attribute we failed expansion for earlier, this might be a derive invocation
// or derive helper attribute
let attr = meta.parent_attr()?;
let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast)
{
// this might be a derive, or a derive helper on an ADT
let derive_call = self.with_ctx(|ctx| {
// so try downmapping the token into the pseudo derive expansion
// see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
ctx.attr_to_derive_macro_call(
InFile::new(file_id, &adt),
InFile::new(file_id, attr.clone()),
)
.map(|(_, call_id, _)| call_id)
});
match derive_call {
Some(call_id) => {
// resolved to a derive
let file_id = call_id.as_macro_file();
let text_range = attr.syntax().text_range();
// remove any other token in this macro input, all their mappings are the
// same as this one
tokens.retain(|t| !text_range.contains_range(t.text_range()));
return process_expansion_for_token(&mut stack, file_id);
}
None => Some(adt),
}
} else {
// Otherwise this could be a derive helper on a variant or field
if let Some(field) =
attr.syntax().parent().and_then(ast::RecordField::cast)
{
field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
} else if let Some(field) =
attr.syntax().parent().and_then(ast::TupleField::cast)
{
field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
} else if let Some(variant) =
attr.syntax().parent().and_then(ast::Variant::cast)
{
variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
} else {
None
}
}?;
if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(file_id, &adt))) {
return None;
}
// Not an attribute, nor a derive, so it's either a builtin or a derive helper
// Try to resolve to a derive helper and downmap
let attr_name =
attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
let id = self.db.ast_id_map(file_id).ast_id(&adt);
let helpers = def_map.derive_helpers_in_scope(InFile::new(file_id, id))?;
let mut res = None;
for (.., derive) in
helpers.iter().filter(|(helper, ..)| *helper == attr_name)
{
res = res.or(process_expansion_for_token(
&mut stack,
derive.as_macro_file(),
));
}
res
} else {
None
}
})()
.is_none();
if was_not_remapped && f(InFile::new(file_id, token)).is_break() {
break;
}
}
}
}
@ -712,7 +807,7 @@ impl<'db> SemanticsImpl<'db> {
offset: TextSize,
) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
node.token_at_offset(offset)
.map(move |token| self.descend_into_macros(token, offset))
.map(move |token| self.descend_into_macros(DescendPreference::None, token))
.map(|descendants| {
descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
})
@ -737,14 +832,16 @@ impl<'db> SemanticsImpl<'db> {
pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
let node = self.find_file(node);
node.original_file_range_opt(self.db.upcast())
.filter(|(_, ctx)| ctx.is_root())
.map(TupleExt::head)
}
/// Attempts to map the node out of macro expanded files.
/// This only work for attribute expansions, as other ones do not have nodes as input.
pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map(
|InFile { file_id, value }| {
self.cache(find_root(value.syntax()), file_id);
|InRealFile { file_id, value }| {
self.cache(find_root(value.syntax()), file_id.into());
value
},
)
@ -755,8 +852,8 @@ impl<'db> SemanticsImpl<'db> {
pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
let InFile { file_id, .. } = self.find_file(node);
InFile::new(file_id, node).original_syntax_node(self.db.upcast()).map(
|InFile { file_id, value }| {
self.cache(find_root(&value), file_id);
|InRealFile { file_id, value }| {
self.cache(find_root(&value), file_id.into());
value
},
)
@ -787,7 +884,7 @@ impl<'db> SemanticsImpl<'db> {
Some(parent) => Some(InFile::new(file_id, parent)),
None => {
self.cache(value.clone(), file_id);
file_id.call_node(db)
Some(file_id.macro_file()?.call_node(db))
}
}
})
@ -851,9 +948,9 @@ impl<'db> SemanticsImpl<'db> {
pub fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
let analyze = self.analyze(path.syntax())?;
let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id);
let ctx = LowerCtx::with_hygiene(self.db.upcast(), &hygiene);
let hir_path = Path::from_src(path.clone(), &ctx)?;
let span_map = self.db.span_map(analyze.file_id);
let ctx = LowerCtx::with_span_map(self.db.upcast(), span_map);
let hir_path = Path::from_src(&ctx, path.clone())?;
match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), &hir_path)? {
TypeNs::TraitId(id) => Some(Trait { id }),
_ => None,
@ -937,14 +1034,15 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
}
fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<FunctionId> {
pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
self.analyze(call.syntax())?.resolve_method_call(self.db, call)
}
fn resolve_method_call_fallback(
/// Attempts to resolve this call expression as a method call falling back to resolving it as a field.
pub fn resolve_method_call_fallback(
&self,
call: &ast::MethodCallExpr,
) -> Option<Either<FunctionId, FieldId>> {
) -> Option<Either<Function, Field>> {
self.analyze(call.syntax())?.resolve_method_call_fallback(self.db, call)
}
@ -976,6 +1074,13 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(field.syntax())?.resolve_field(self.db, field)
}
pub fn resolve_field_fallback(
&self,
field: &ast::FieldExpr,
) -> Option<Either<Field, Function>> {
self.analyze(field.syntax())?.resolve_field_fallback(self.db, field)
}
pub fn resolve_record_field(
&self,
field: &ast::RecordExprField,
@ -1037,7 +1142,7 @@ impl<'db> SemanticsImpl<'db> {
fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
let mut cache = self.s2d_cache.borrow_mut();
let mut ctx = SourceToDefCtx { db: self.db, cache: &mut cache };
let mut ctx = SourceToDefCtx { db: self.db, dynmap_cache: &mut cache };
f(&mut ctx)
}
@ -1187,7 +1292,7 @@ impl<'db> SemanticsImpl<'db> {
return None;
}
let func = self.resolve_method_call(method_call_expr).map(Function::from)?;
let func = self.resolve_method_call(method_call_expr)?;
let res = match func.self_param(self.db)?.access(self.db) {
Access::Shared | Access::Exclusive => true,
Access::Owned => false,
@ -1451,7 +1556,7 @@ impl SemanticsScope<'_> {
/// necessary a heuristic, as it doesn't take hygiene into account.
pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> {
let ctx = LowerCtx::with_file_id(self.db.upcast(), self.file_id);
let path = Path::from_src(path.clone(), &ctx)?;
let path = Path::from_src(&ctx, path.clone())?;
resolve_hir_path(self.db, &self.resolver, &path)
}
@ -1478,6 +1583,10 @@ impl SemanticsScope<'_> {
pub fn extern_crate_decls(&self) -> impl Iterator<Item = Name> + '_ {
self.resolver.extern_crate_decls_in_scope(self.db.upcast())
}
pub fn has_same_self_type(&self, other: &SemanticsScope<'_>) -> bool {
self.resolver.impl_def() == other.resolver.impl_def()
}
}
#[derive(Debug)]

View file

@ -97,7 +97,7 @@ use hir_def::{
FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId,
StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId,
};
use hir_expand::{attrs::AttrId, name::AsName, HirFileId, MacroCallId};
use hir_expand::{attrs::AttrId, name::AsName, HirFileId, HirFileIdExt, MacroCallId};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use stdx::{impl_from, never};
@ -112,7 +112,7 @@ pub(super) type SourceToDefCache = FxHashMap<(ChildContainer, HirFileId), DynMap
pub(super) struct SourceToDefCtx<'a, 'b> {
pub(super) db: &'b dyn HirDatabase,
pub(super) cache: &'a mut SourceToDefCache,
pub(super) dynmap_cache: &'a mut SourceToDefCache,
}
impl SourceToDefCtx<'_, '_> {
@ -300,7 +300,7 @@ impl SourceToDefCtx<'_, '_> {
fn cache_for(&mut self, container: ChildContainer, file_id: HirFileId) -> &DynMap {
let db = self.db;
self.cache
self.dynmap_cache
.entry((container, file_id))
.or_insert_with(|| container.child_by_source(db, file_id))
}

View file

@ -26,11 +26,10 @@ use hir_def::{
};
use hir_expand::{
builtin_fn_macro::BuiltinFnLikeExpander,
hygiene::Hygiene,
mod_path::path,
name,
name::{AsName, Name},
HirFileId, InFile,
HirFileId, InFile, MacroFileId, MacroFileIdExt,
};
use hir_ty::{
diagnostics::{
@ -281,25 +280,49 @@ impl SourceAnalyzer {
&self,
db: &dyn HirDatabase,
call: &ast::MethodCallExpr,
) -> Option<FunctionId> {
) -> Option<Function> {
let expr_id = self.expr_id(db, &call.clone().into())?;
let (f_in_trait, substs) = self.infer.as_ref()?.method_resolution(expr_id)?;
Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs))
Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs).into())
}
pub(crate) fn resolve_method_call_fallback(
&self,
db: &dyn HirDatabase,
call: &ast::MethodCallExpr,
) -> Option<Either<FunctionId, FieldId>> {
) -> Option<Either<Function, Field>> {
let expr_id = self.expr_id(db, &call.clone().into())?;
let inference_result = self.infer.as_ref()?;
match inference_result.method_resolution(expr_id) {
Some((f_in_trait, substs)) => {
Some(Either::Left(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs)))
}
None => inference_result.field_resolution(expr_id).map(Either::Right),
Some((f_in_trait, substs)) => Some(Either::Left(
self.resolve_impl_method_or_trait_def(db, f_in_trait, substs).into(),
)),
None => inference_result.field_resolution(expr_id).map(Into::into).map(Either::Right),
}
}
pub(crate) fn resolve_field(
&self,
db: &dyn HirDatabase,
field: &ast::FieldExpr,
) -> Option<Field> {
let expr_id = self.expr_id(db, &field.clone().into())?;
self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into())
}
pub(crate) fn resolve_field_fallback(
&self,
db: &dyn HirDatabase,
field: &ast::FieldExpr,
) -> Option<Either<Field, Function>> {
let expr_id = self.expr_id(db, &field.clone().into())?;
let inference_result = self.infer.as_ref()?;
match inference_result.field_resolution(expr_id) {
Some(field) => Some(Either::Left(field.into())),
None => inference_result.method_resolution(expr_id).map(|(f, substs)| {
Either::Right(self.resolve_impl_method_or_trait_def(db, f, substs).into())
}),
}
}
@ -418,15 +441,6 @@ impl SourceAnalyzer {
Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
}
pub(crate) fn resolve_field(
&self,
db: &dyn HirDatabase,
field: &ast::FieldExpr,
) -> Option<Field> {
let expr_id = self.expr_id(db, &field.clone().into())?;
self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into())
}
pub(crate) fn resolve_record_field(
&self,
db: &dyn HirDatabase,
@ -484,7 +498,7 @@ impl SourceAnalyzer {
macro_call: InFile<&ast::MacroCall>,
) -> Option<Macro> {
let ctx = LowerCtx::with_file_id(db.upcast(), macro_call.file_id);
let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &ctx))?;
let path = macro_call.value.path().and_then(|ast| Path::from_src(&ctx, ast))?;
self.resolver
.resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Bang))
.map(|(it, _)| it.into())
@ -596,9 +610,8 @@ impl SourceAnalyzer {
}
// This must be a normal source file rather than macro file.
let hygiene = Hygiene::new(db.upcast(), self.file_id);
let ctx = LowerCtx::with_hygiene(db.upcast(), &hygiene);
let hir_path = Path::from_src(path.clone(), &ctx)?;
let ctx = LowerCtx::with_span_map(db.upcast(), db.span_map(self.file_id));
let hir_path = Path::from_src(&ctx, path.clone())?;
// Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are
// trying to resolve foo::bar.
@ -755,14 +768,15 @@ impl SourceAnalyzer {
&self,
db: &dyn HirDatabase,
macro_call: InFile<&ast::MacroCall>,
) -> Option<HirFileId> {
) -> Option<MacroFileId> {
let krate = self.resolver.krate();
let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| {
self.resolver
.resolve_path_as_macro(db.upcast(), &path, Some(MacroSubNs::Bang))
.map(|(it, _)| macro_id_to_def_id(db.upcast(), it))
})?;
Some(macro_call_id.as_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
// why the 64?
Some(macro_call_id.as_macro_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
}
pub(crate) fn resolve_variant(
@ -821,6 +835,52 @@ impl SourceAnalyzer {
false
}
pub(crate) fn resolve_offset_in_format_args(
&self,
db: &dyn HirDatabase,
format_args: InFile<&ast::FormatArgsExpr>,
offset: TextSize,
) -> Option<(TextRange, Option<PathResolution>)> {
let implicits = self.body_source_map()?.implicit_format_args(format_args)?;
implicits.iter().find(|(range, _)| range.contains_inclusive(offset)).map(|(range, name)| {
(
*range,
resolve_hir_value_path(
db,
&self.resolver,
self.resolver.body_owner(),
&Path::from_known_path_with_no_generic(ModPath::from_segments(
PathKind::Plain,
Some(name.clone()),
)),
),
)
})
}
pub(crate) fn as_format_args_parts<'a>(
&'a self,
db: &'a dyn HirDatabase,
format_args: InFile<&ast::FormatArgsExpr>,
) -> Option<impl Iterator<Item = (TextRange, Option<PathResolution>)> + 'a> {
Some(self.body_source_map()?.implicit_format_args(format_args)?.iter().map(
move |(range, name)| {
(
*range,
resolve_hir_value_path(
db,
&self.resolver,
self.resolver.body_owner(),
&Path::from_known_path_with_no_generic(ModPath::from_segments(
PathKind::Plain,
Some(name.clone()),
)),
),
)
},
))
}
fn resolve_impl_method_or_trait_def(
&self,
db: &dyn HirDatabase,
@ -894,11 +954,12 @@ fn scope_for_offset(
}
// FIXME handle attribute expansion
let source = iter::successors(file_id.call_node(db.upcast()), |it| {
it.file_id.call_node(db.upcast())
})
.find(|it| it.file_id == from_file)
.filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?;
let source =
iter::successors(file_id.macro_file().map(|it| it.call_node(db.upcast())), |it| {
Some(it.file_id.macro_file()?.call_node(db.upcast()))
})
.find(|it| it.file_id == from_file)
.filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?;
Some((source.value.text_range(), scope))
})
.filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end())
@ -1039,24 +1100,7 @@ fn resolve_hir_path_(
};
let body_owner = resolver.body_owner();
let values = || {
resolver.resolve_path_in_value_ns_fully(db.upcast(), path).and_then(|val| {
let res = match val {
ValueNs::LocalBinding(binding_id) => {
let var = Local { parent: body_owner?, binding_id };
PathResolution::Local(var)
}
ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()),
ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()),
};
Some(res)
})
};
let values = || resolve_hir_value_path(db, resolver, body_owner, path);
let items = || {
resolver
@ -1076,6 +1120,30 @@ fn resolve_hir_path_(
.or_else(macros)
}
fn resolve_hir_value_path(
db: &dyn HirDatabase,
resolver: &Resolver,
body_owner: Option<DefWithBodyId>,
path: &Path,
) -> Option<PathResolution> {
resolver.resolve_path_in_value_ns_fully(db.upcast(), path).and_then(|val| {
let res = match val {
ValueNs::LocalBinding(binding_id) => {
let var = Local { parent: body_owner?, binding_id };
PathResolution::Local(var)
}
ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()),
ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()),
};
Some(res)
})
}
/// Resolves a path where we know it is a qualifier of another path.
///
/// For example, if we have:

View file

@ -9,7 +9,7 @@ use hir_def::{
};
use hir_expand::{HirFileId, InFile};
use hir_ty::db::HirDatabase;
use syntax::{ast::HasName, AstNode, SmolStr, SyntaxNode, SyntaxNodePtr};
use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr};
use crate::{Module, ModuleDef, Semantics};
@ -32,7 +32,7 @@ pub struct DeclarationLocation {
/// This points to the whole syntax node of the declaration.
pub ptr: SyntaxNodePtr,
/// This points to the [`syntax::ast::Name`] identifier of the declaration.
pub name_ptr: SyntaxNodePtr,
pub name_ptr: AstPtr<syntax::ast::Name>,
}
impl DeclarationLocation {
@ -49,15 +49,6 @@ impl DeclarationLocation {
let node = resolve_node(db, self.hir_file_id, &self.ptr);
node.as_ref().original_file_range(db.upcast())
}
pub fn original_name_range(&self, db: &dyn HirDatabase) -> Option<FileRange> {
if let Some(file_id) = self.hir_file_id.file_id() {
// fast path to prevent parsing
return Some(FileRange { file_id, range: self.name_ptr.text_range() });
}
let node = resolve_node(db, self.hir_file_id, &self.name_ptr);
node.as_ref().original_file_range_opt(db.upcast())
}
}
fn resolve_node(
@ -190,7 +181,7 @@ impl<'a> SymbolCollector<'a> {
let dec_loc = DeclarationLocation {
hir_file_id: source.file_id,
ptr: SyntaxNodePtr::new(use_tree_src.syntax()),
name_ptr: SyntaxNodePtr::new(name.syntax()),
name_ptr: AstPtr::new(&name),
};
self.symbols.push(FileSymbol {
@ -294,7 +285,7 @@ impl<'a> SymbolCollector<'a> {
let dec_loc = DeclarationLocation {
hir_file_id: source.file_id,
ptr: SyntaxNodePtr::new(source.value.syntax()),
name_ptr: SyntaxNodePtr::new(name_node.syntax()),
name_ptr: AstPtr::new(&name_node),
};
if let Some(attrs) = def.attrs(self.db) {
@ -327,7 +318,7 @@ impl<'a> SymbolCollector<'a> {
let dec_loc = DeclarationLocation {
hir_file_id: declaration.file_id,
ptr: SyntaxNodePtr::new(module.syntax()),
name_ptr: SyntaxNodePtr::new(name_node.syntax()),
name_ptr: AstPtr::new(&name_node),
};
let def = ModuleDef::Module(module_id.into());

View file

@ -328,6 +328,7 @@ fn augment_references_with_imports(
references
.iter()
.filter_map(|FileReference { range, name, .. }| {
let name = name.clone().into_name_like()?;
ctx.sema.scope(name.syntax()).map(|scope| (*range, name, scope.module()))
})
.map(|(range, name, ref_module)| {
@ -455,6 +456,7 @@ fn add_enum_def(
.iter()
.flat_map(|(_, refs)| refs)
.filter_map(|FileReference { name, .. }| {
let name = name.clone().into_name_like()?;
ctx.sema.scope(name.syntax()).map(|scope| scope.module())
})
.any(|module| module.nearest_non_block_module(ctx.db()) != *target_module);

View file

@ -186,6 +186,7 @@ fn augment_references_with_imports(
references
.iter()
.filter_map(|FileReference { name, .. }| {
let name = name.clone().into_name_like()?;
ctx.sema.scope(name.syntax()).map(|scope| (name, scope.module()))
})
.map(|(name, ref_module)| {
@ -238,6 +239,7 @@ fn add_tuple_struct_def(
.iter()
.flat_map(|(_, refs)| refs)
.filter_map(|FileReference { name, .. }| {
let name = name.clone().into_name_like()?;
ctx.sema.scope(name.syntax()).map(|scope| scope.module())
})
.any(|module| module.nearest_non_block_module(ctx.db()) != *target_module);

View file

@ -1,4 +1,5 @@
use crate::{AssistContext, Assists};
use hir::DescendPreference;
use ide_db::{
assists::{AssistId, AssistKind},
syntax_helpers::{
@ -35,7 +36,8 @@ pub(crate) fn extract_expressions_from_format_string(
let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?;
let expanded_t = ast::String::cast(
ctx.sema.descend_into_macros_with_kind_preference(fmt_string.syntax().clone(), 0.into()),
ctx.sema
.descend_into_macros_single(DescendPreference::SameKind, fmt_string.syntax().clone()),
)?;
if !is_format_string(&expanded_t) {
return None;

View file

@ -3,8 +3,8 @@ use std::iter;
use ast::make;
use either::Either;
use hir::{
HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef, PathResolution, Semantics,
TypeInfo, TypeParam,
DescendPreference, HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef,
PathResolution, Semantics, TypeInfo, TypeParam,
};
use ide_db::{
defs::{Definition, NameRefClass},
@ -147,7 +147,12 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
_ => format_function(ctx, module, &fun, old_indent, new_indent),
};
if fn_def.contains("ControlFlow") {
// There are external control flows
if fun
.control_flow
.kind
.is_some_and(|kind| matches!(kind, FlowKind::Break(_, _) | FlowKind::Continue(_)))
{
let scope = match scope {
ImportScope::File(it) => ImportScope::File(builder.make_mut(it)),
ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
@ -751,7 +756,7 @@ impl FunctionBody {
.descendants_with_tokens()
.filter_map(SyntaxElement::into_token)
.filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self]))
.flat_map(|t| sema.descend_into_macros(t, 0.into()))
.flat_map(|t| sema.descend_into_macros(DescendPreference::None, t))
.for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast)));
}
}
@ -4968,6 +4973,27 @@ pub fn testfn(arg: &mut Foo) {
fn $0fun_name(arg: &mut Foo) {
arg.field = 8;
}
"#,
);
}
#[test]
fn does_not_import_control_flow() {
check_assist(
extract_function,
r#"
//- minicore: try
fn func() {
$0let cf = "I'm ControlFlow";$0
}
"#,
r#"
fn func() {
fun_name();
}
fn $0fun_name() {
let cf = "I'm ControlFlow";
}
"#,
);
}

View file

@ -3,7 +3,7 @@ use std::{
iter,
};
use hir::{HasSource, ModuleSource};
use hir::{HasSource, HirFileIdExt, ModuleSource};
use ide_db::{
assists::{AssistId, AssistKind},
base_db::FileId,

View file

@ -1,4 +1,6 @@
use hir::{db::HirDatabase, HasSource, HasVisibility, ModuleDef, PathResolution, ScopeDef};
use hir::{
db::HirDatabase, HasSource, HasVisibility, HirFileIdExt, ModuleDef, PathResolution, ScopeDef,
};
use ide_db::base_db::FileId;
use syntax::{
ast::{self, edit_in_place::HasVisibilityEdit, make, HasVisibility as _},

View file

@ -19,8 +19,19 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
// ```
pub(crate) fn flip_binexpr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let expr = ctx.find_node_at_offset::<BinExpr>()?;
let lhs = expr.lhs()?.syntax().clone();
let rhs = expr.rhs()?.syntax().clone();
let lhs = expr.lhs()?.syntax().clone();
let lhs = if let Some(bin_expr) = BinExpr::cast(lhs.clone()) {
if bin_expr.op_kind() == expr.op_kind() {
bin_expr.rhs()?.syntax().clone()
} else {
lhs
}
} else {
lhs
};
let op_range = expr.op_token()?.text_range();
// The assist should be applied only if the cursor is on the operator
let cursor_in_range = op_range.contains_range(ctx.selection_trimmed());
@ -114,6 +125,24 @@ mod tests {
)
}
#[test]
fn flip_binexpr_works_for_lhs_arith() {
check_assist(
flip_binexpr,
r"fn f() { let res = 1 + (2 - 3) +$0 4 + 5; }",
r"fn f() { let res = 1 + 4 + (2 - 3) + 5; }",
)
}
#[test]
fn flip_binexpr_works_for_lhs_cmp() {
check_assist(
flip_binexpr,
r"fn f() { let res = 1 + (2 - 3) >$0 4 + 5; }",
r"fn f() { let res = 4 + 5 < 1 + (2 - 3); }",
)
}
#[test]
fn flip_binexpr_works_inside_match() {
check_assist(

View file

@ -1,5 +1,5 @@
use crate::assist_context::{AssistContext, Assists};
use hir::{HasVisibility, HirDisplay, Module};
use hir::{HasVisibility, HirDisplay, HirFileIdExt, Module};
use ide_db::{
assists::{AssistId, AssistKind},
base_db::{FileId, Upcast},

View file

@ -1,6 +1,7 @@
use std::collections::HashSet;
use hir::{self, HasCrate, HasSource, HasVisibility};
use hir::{self, HasCrate, HasVisibility};
use ide_db::path_transform::PathTransform;
use syntax::{
ast::{
self, edit_in_place::Indent, make, AstNode, HasGenericParams, HasName, HasVisibility as _,
@ -105,7 +106,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
target,
|edit| {
// Create the function
let method_source = match method.source(ctx.db()) {
let method_source = match ctx.sema.source(method) {
Some(source) => source.value,
None => return,
};
@ -130,7 +131,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
vis,
fn_name,
type_params,
None,
method_source.where_clause(),
params,
body,
ret_type,
@ -183,6 +184,12 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
let assoc_items = impl_def.get_or_create_assoc_item_list();
assoc_items.add_item(f.clone().into());
if let Some((target, source)) =
ctx.sema.scope(strukt.syntax()).zip(ctx.sema.scope(method_source.syntax()))
{
PathTransform::generic_transformation(&target, &source).apply(f.syntax());
}
if let Some(cap) = ctx.config.snippet_cap {
edit.add_tabstop_before(cap, f)
}
@ -454,6 +461,209 @@ impl Person {
);
}
#[test]
fn test_preserve_where_clause() {
check_assist(
generate_delegate_methods,
r#"
struct Inner<T>(T);
impl<T> Inner<T> {
fn get(&self) -> T
where
T: Copy,
T: PartialEq,
{
self.0
}
}
struct Struct<T> {
$0field: Inner<T>,
}
"#,
r#"
struct Inner<T>(T);
impl<T> Inner<T> {
fn get(&self) -> T
where
T: Copy,
T: PartialEq,
{
self.0
}
}
struct Struct<T> {
field: Inner<T>,
}
impl<T> Struct<T> {
$0fn get(&self) -> T where
T: Copy,
T: PartialEq, {
self.field.get()
}
}
"#,
);
}
#[test]
fn test_fixes_basic_self_references() {
check_assist(
generate_delegate_methods,
r#"
struct Foo {
field: $0Bar,
}
struct Bar;
impl Bar {
fn bar(&self, other: Self) -> Self {
other
}
}
"#,
r#"
struct Foo {
field: Bar,
}
impl Foo {
$0fn bar(&self, other: Bar) -> Bar {
self.field.bar(other)
}
}
struct Bar;
impl Bar {
fn bar(&self, other: Self) -> Self {
other
}
}
"#,
);
}
#[test]
fn test_fixes_nested_self_references() {
check_assist(
generate_delegate_methods,
r#"
struct Foo {
field: $0Bar,
}
struct Bar;
impl Bar {
fn bar(&mut self, a: (Self, [Self; 4]), b: Vec<Self>) {}
}
"#,
r#"
struct Foo {
field: Bar,
}
impl Foo {
$0fn bar(&mut self, a: (Bar, [Bar; 4]), b: Vec<Bar>) {
self.field.bar(a, b)
}
}
struct Bar;
impl Bar {
fn bar(&mut self, a: (Self, [Self; 4]), b: Vec<Self>) {}
}
"#,
);
}
#[test]
fn test_fixes_self_references_with_lifetimes_and_generics() {
check_assist(
generate_delegate_methods,
r#"
struct Foo<'a, T> {
$0field: Bar<'a, T>,
}
struct Bar<'a, T>(&'a T);
impl<'a, T> Bar<'a, T> {
fn bar(self, mut b: Vec<&'a Self>) -> &'a Self {
b.pop().unwrap()
}
}
"#,
r#"
struct Foo<'a, T> {
field: Bar<'a, T>,
}
impl<'a, T> Foo<'a, T> {
$0fn bar(self, mut b: Vec<&'a Bar<'_, T>>) -> &'a Bar<'_, T> {
self.field.bar(b)
}
}
struct Bar<'a, T>(&'a T);
impl<'a, T> Bar<'a, T> {
fn bar(self, mut b: Vec<&'a Self>) -> &'a Self {
b.pop().unwrap()
}
}
"#,
);
}
#[test]
fn test_fixes_self_references_across_macros() {
check_assist(
generate_delegate_methods,
r#"
//- /bar.rs
macro_rules! test_method {
() => {
pub fn test(self, b: Bar) -> Self {
self
}
};
}
pub struct Bar;
impl Bar {
test_method!();
}
//- /main.rs
mod bar;
struct Foo {
$0bar: bar::Bar,
}
"#,
r#"
mod bar;
struct Foo {
bar: bar::Bar,
}
impl Foo {
$0pub fn test(self,b:bar::Bar) ->bar::Bar {
self.bar.test(b)
}
}
"#,
);
}
#[test]
fn test_generate_delegate_visibility() {
check_assist_not_applicable(

View file

@ -1,4 +1,4 @@
use hir::{HasSource, HirDisplay, InFile};
use hir::{HasSource, HirDisplay, InRealFile};
use ide_db::assists::{AssistId, AssistKind};
use syntax::{
ast::{self, make, HasArgList},
@ -114,14 +114,14 @@ fn add_variant_to_accumulator(
parent: PathParent,
) -> Option<()> {
let db = ctx.db();
let InFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?;
let InRealFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?;
acc.add(
AssistId("generate_enum_variant", AssistKind::Generate),
"Generate variant",
target,
|builder| {
builder.edit_file(file_id.original_file(db));
builder.edit_file(file_id);
let node = builder.make_mut(enum_node);
let variant = make_variant(ctx, name_ref, parent);
node.variant_list().map(|it| it.add_variant(variant.clone_for_update()));

View file

@ -1,5 +1,6 @@
use hir::{
Adt, AsAssocItem, HasSource, HirDisplay, Module, PathResolution, Semantics, Type, TypeInfo,
Adt, AsAssocItem, HasSource, HirDisplay, HirFileIdExt, Module, PathResolution, Semantics, Type,
TypeInfo,
};
use ide_db::{
base_db::FileId,
@ -510,7 +511,7 @@ fn assoc_fn_target_info(
}
fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize {
match &target {
match target {
GeneratedFunctionTarget::BehindItem(it) => it.text_range().end(),
GeneratedFunctionTarget::InEmptyItemList(it) => it.text_range().start() + TextSize::of('{'),
}

View file

@ -8,7 +8,7 @@ use ide_db::{
defs::Definition,
imports::insert_use::remove_path_if_in_use_stmt,
path_transform::PathTransform,
search::{FileReference, SearchScope},
search::{FileReference, FileReferenceNode, SearchScope},
source_change::SourceChangeBuilder,
syntax_helpers::{insert_whitespace_into_node::insert_ws_into, node_ext::expr_as_name_ref},
RootDatabase,
@ -148,7 +148,7 @@ pub(super) fn split_refs_and_uses<T: ast::AstNode>(
) -> (Vec<T>, Vec<ast::Path>) {
iter.into_iter()
.filter_map(|file_ref| match file_ref.name {
ast::NameLike::NameRef(name_ref) => Some(name_ref),
FileReferenceNode::NameRef(name_ref) => Some(name_ref),
_ => None,
})
.filter_map(|name_ref| match name_ref.syntax().ancestors().find_map(ast::UseTree::cast) {
@ -346,7 +346,7 @@ fn inline(
match param.as_local(sema.db) {
Some(l) => usages_for_locals(l)
.map(|FileReference { name, range, .. }| match name {
ast::NameLike::NameRef(_) => body
FileReferenceNode::NameRef(_) => body
.syntax()
.covering_element(range)
.ancestors()
@ -372,7 +372,7 @@ fn inline(
if let Some(self_local) = params[0].2.as_local(sema.db) {
usages_for_locals(self_local)
.filter_map(|FileReference { name, range, .. }| match name {
ast::NameLike::NameRef(_) => Some(body.syntax().covering_element(range)),
FileReferenceNode::NameRef(_) => Some(body.syntax().covering_element(range)),
_ => None,
})
.for_each(|usage| {

View file

@ -2,7 +2,7 @@ use hir::{PathResolution, Semantics};
use ide_db::{
base_db::FileId,
defs::Definition,
search::{FileReference, UsageSearchResult},
search::{FileReference, FileReferenceNode, UsageSearchResult},
RootDatabase,
};
use syntax::{
@ -63,7 +63,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>)
let wrap_in_parens = references
.into_iter()
.filter_map(|FileReference { range, name, .. }| match name {
ast::NameLike::NameRef(name) => Some((range, name)),
FileReferenceNode::NameRef(name) => Some((range, name)),
_ => None,
})
.map(|(range, name_ref)| {

View file

@ -1,6 +1,6 @@
use std::collections::{hash_map::Entry, HashMap};
use hir::{InFile, Module, ModuleSource};
use hir::{HirFileIdExt, InFile, InRealFile, Module, ModuleSource};
use ide_db::{
base_db::FileRange,
defs::Definition,
@ -167,7 +167,7 @@ fn used_once_in_scope(ctx: &AssistContext<'_>, def: Definition, scopes: &Vec<Sea
fn module_search_scope(db: &RootDatabase, module: hir::Module) -> Vec<SearchScope> {
let (file_id, range) = {
let InFile { file_id, value } = module.definition_source(db);
if let Some((file_id, call_source)) = file_id.original_call_node(db) {
if let Some(InRealFile { file_id, value: call_source }) = file_id.original_call_node(db) {
(file_id, Some(call_source.text_range()))
} else {
(

View file

@ -1,4 +1,4 @@
use hir::{InFile, ModuleDef};
use hir::{InFile, MacroFileIdExt, ModuleDef};
use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator};
use itertools::Itertools;
use syntax::{
@ -43,12 +43,12 @@ pub(crate) fn replace_derive_with_manual_impl(
) -> Option<()> {
let attr = ctx.find_node_at_offset_with_descend::<ast::Attr>()?;
let path = attr.path()?;
let hir_file = ctx.sema.hir_file_for(attr.syntax());
if !hir_file.is_derive_attr_pseudo_expansion(ctx.db()) {
let macro_file = ctx.sema.hir_file_for(attr.syntax()).macro_file()?;
if !macro_file.is_derive_attr_pseudo_expansion(ctx.db()) {
return None;
}
let InFile { file_id, value } = hir_file.call_node(ctx.db())?;
let InFile { file_id, value } = macro_file.call_node(ctx.db());
if file_id.is_macro() {
// FIXME: make this work in macro files
return None;
@ -56,7 +56,7 @@ pub(crate) fn replace_derive_with_manual_impl(
// collect the derive paths from the #[derive] expansion
let current_derives = ctx
.sema
.parse_or_expand(hir_file)
.parse_or_expand(macro_file.into())
.descendants()
.filter_map(ast::Attr::cast)
.filter_map(|attr| attr.path())

View file

@ -59,7 +59,10 @@ pub(crate) fn replace_named_generic_with_impl(
let mut path_types_to_replace = Vec::new();
for (_a, refs) in usage_refs.iter() {
for usage_ref in refs {
let param_node = find_path_type(&ctx.sema, &type_param_name, &usage_ref.name)?;
let Some(name_like) = usage_ref.name.clone().into_name_like() else {
continue;
};
let param_node = find_path_type(&ctx.sema, &type_param_name, &name_like)?;
path_types_to_replace.push(param_node);
}
}

Some files were not shown because too many files have changed in this diff Show more