Merge commit '5ecace48f693afaa6adf8cb23086b651db3aec96' into sync-from-ra

This commit is contained in:
Laurențiu Nicola 2024-03-17 11:04:52 +02:00
parent ce642071d8
commit 5a95a53a39
153 changed files with 3382 additions and 1925 deletions

View file

@ -36,7 +36,6 @@ jobs:
- os: ubuntu-20.04 - os: ubuntu-20.04
target: x86_64-unknown-linux-gnu target: x86_64-unknown-linux-gnu
code-target: linux-x64 code-target: linux-x64
container: ubuntu:18.04
- os: ubuntu-20.04 - os: ubuntu-20.04
target: aarch64-unknown-linux-gnu target: aarch64-unknown-linux-gnu
code-target: linux-arm64 code-target: linux-arm64
@ -63,14 +62,6 @@ jobs:
with: with:
fetch-depth: ${{ env.FETCH_DEPTH }} fetch-depth: ${{ env.FETCH_DEPTH }}
- name: Install toolchain dependencies
if: matrix.container == 'ubuntu:18.04'
shell: bash
run: |
apt-get update && apt-get install -y build-essential curl
curl --proto '=https' --tlsv1.2 --retry 10 --retry-connrefused -fsSL "https://sh.rustup.rs" | sh -s -- --profile minimal --default-toolchain none -y
echo "${CARGO_HOME:-$HOME/.cargo}/bin" >> $GITHUB_PATH
- name: Install Rust toolchain - name: Install Rust toolchain
run: | run: |
rustup update --no-self-update stable rustup update --no-self-update stable

16
Cargo.lock generated
View file

@ -71,6 +71,7 @@ version = "0.0.0"
dependencies = [ dependencies = [
"cfg", "cfg",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"lz4_flex",
"rustc-hash", "rustc-hash",
"salsa", "salsa",
"semver", "semver",
@ -134,9 +135,9 @@ dependencies = [
[[package]] [[package]]
name = "cc" name = "cc"
version = "1.0.89" version = "1.0.90"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0ba8f7aaa012f30d5b2861462f6708eccd49c3c39863fe083a308035f63d723" checksum = "8cd6604a82acf3039f1144f54b8eb34e91ffba622051189e71b781822d5ee1f5"
[[package]] [[package]]
name = "cfg" name = "cfg"
@ -874,9 +875,9 @@ checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
[[package]] [[package]]
name = "libloading" name = "libloading"
version = "0.8.2" version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2caa5afb8bf9f3a2652760ce7d4f62d21c4d5a423e68466fca30df82f2330164" checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"windows-targets 0.52.4", "windows-targets 0.52.4",
@ -992,6 +993,12 @@ dependencies = [
"url", "url",
] ]
[[package]]
name = "lz4_flex"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "912b45c753ff5f7f5208307e8ace7d2a2e30d024e26d3509f3dce546c044ce15"
[[package]] [[package]]
name = "mbe" name = "mbe"
version = "0.0.0" version = "0.0.0"
@ -1597,6 +1604,7 @@ dependencies = [
"rayon", "rayon",
"rustc-hash", "rustc-hash",
"scip", "scip",
"semver",
"serde", "serde",
"serde_json", "serde_json",
"sourcegen", "sourcegen",

View file

@ -105,6 +105,10 @@ anyhow = "1.0.75"
arrayvec = "0.7.4" arrayvec = "0.7.4"
bitflags = "2.4.1" bitflags = "2.4.1"
cargo_metadata = "0.18.1" cargo_metadata = "0.18.1"
chalk-solve = { version = "0.96.0", default-features = false }
chalk-ir = "0.96.0"
chalk-recursive = { version = "0.96.0", default-features = false }
chalk-derive = "0.96.0"
command-group = "2.0.1" command-group = "2.0.1"
crossbeam-channel = "0.5.8" crossbeam-channel = "0.5.8"
dissimilar = "1.0.7" dissimilar = "1.0.7"

View file

@ -12,6 +12,8 @@ rust-version.workspace = true
doctest = false doctest = false
[dependencies] [dependencies]
lz4_flex = { version = "0.11", default-features = false }
la-arena.workspace = true la-arena.workspace = true
salsa.workspace = true salsa.workspace = true
rustc-hash.workspace = true rustc-hash.workspace = true

View file

@ -7,13 +7,13 @@ use salsa::Durability;
use triomphe::Arc; use triomphe::Arc;
use vfs::FileId; use vfs::FileId;
use crate::{CrateGraph, SourceDatabaseExt, SourceRoot, SourceRootId}; use crate::{CrateGraph, SourceDatabaseExt, SourceDatabaseExt2, SourceRoot, SourceRootId};
/// Encapsulate a bunch of raw `.set` calls on the database. /// Encapsulate a bunch of raw `.set` calls on the database.
#[derive(Default)] #[derive(Default)]
pub struct FileChange { pub struct FileChange {
pub roots: Option<Vec<SourceRoot>>, pub roots: Option<Vec<SourceRoot>>,
pub files_changed: Vec<(FileId, Option<Arc<str>>)>, pub files_changed: Vec<(FileId, Option<String>)>,
pub crate_graph: Option<CrateGraph>, pub crate_graph: Option<CrateGraph>,
} }
@ -42,7 +42,7 @@ impl FileChange {
self.roots = Some(roots); self.roots = Some(roots);
} }
pub fn change_file(&mut self, file_id: FileId, new_text: Option<Arc<str>>) { pub fn change_file(&mut self, file_id: FileId, new_text: Option<String>) {
self.files_changed.push((file_id, new_text)) self.files_changed.push((file_id, new_text))
} }
@ -68,8 +68,8 @@ impl FileChange {
let source_root = db.source_root(source_root_id); let source_root = db.source_root(source_root_id);
let durability = durability(&source_root); let durability = durability(&source_root);
// XXX: can't actually remove the file, just reset the text // XXX: can't actually remove the file, just reset the text
let text = text.unwrap_or_else(|| Arc::from("")); let text = text.unwrap_or_default();
db.set_file_text_with_durability(file_id, text, durability) db.set_file_text_with_durability(file_id, &text, durability)
} }
if let Some(crate_graph) = self.crate_graph { if let Some(crate_graph) = self.crate_graph {
db.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH); db.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH);

View file

@ -7,6 +7,7 @@ mod input;
use std::panic; use std::panic;
use salsa::Durability;
use syntax::{ast, Parse, SourceFile}; use syntax::{ast, Parse, SourceFile};
use triomphe::Arc; use triomphe::Arc;
@ -42,6 +43,7 @@ pub trait Upcast<T: ?Sized> {
fn upcast(&self) -> &T; fn upcast(&self) -> &T;
} }
pub const DEFAULT_FILE_TEXT_LRU_CAP: usize = 16;
pub const DEFAULT_PARSE_LRU_CAP: usize = 128; pub const DEFAULT_PARSE_LRU_CAP: usize = 128;
pub const DEFAULT_BORROWCK_LRU_CAP: usize = 1024; pub const DEFAULT_BORROWCK_LRU_CAP: usize = 1024;
@ -89,7 +91,10 @@ fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
#[salsa::query_group(SourceDatabaseExtStorage)] #[salsa::query_group(SourceDatabaseExtStorage)]
pub trait SourceDatabaseExt: SourceDatabase { pub trait SourceDatabaseExt: SourceDatabase {
#[salsa::input] #[salsa::input]
fn compressed_file_text(&self, file_id: FileId) -> Arc<[u8]>;
fn file_text(&self, file_id: FileId) -> Arc<str>; fn file_text(&self, file_id: FileId) -> Arc<str>;
/// Path to a file, relative to the root of its source root. /// Path to a file, relative to the root of its source root.
/// Source root of the file. /// Source root of the file.
#[salsa::input] #[salsa::input]
@ -101,6 +106,44 @@ pub trait SourceDatabaseExt: SourceDatabase {
fn source_root_crates(&self, id: SourceRootId) -> Arc<[CrateId]>; fn source_root_crates(&self, id: SourceRootId) -> Arc<[CrateId]>;
} }
fn file_text(db: &dyn SourceDatabaseExt, file_id: FileId) -> Arc<str> {
let bytes = db.compressed_file_text(file_id);
let bytes =
lz4_flex::decompress_size_prepended(&bytes).expect("lz4 decompression should not fail");
let text = std::str::from_utf8(&bytes).expect("file contents should be valid UTF-8");
Arc::from(text)
}
pub trait SourceDatabaseExt2 {
fn set_file_text(&mut self, file_id: FileId, text: &str) {
self.set_file_text_with_durability(file_id, text, Durability::LOW);
}
fn set_file_text_with_durability(
&mut self,
file_id: FileId,
text: &str,
durability: Durability,
);
}
impl<Db: ?Sized + SourceDatabaseExt> SourceDatabaseExt2 for Db {
fn set_file_text_with_durability(
&mut self,
file_id: FileId,
text: &str,
durability: Durability,
) {
let bytes = text.as_bytes();
let compressed = lz4_flex::compress_prepend_size(bytes);
self.set_compressed_file_text_with_durability(
file_id,
Arc::from(compressed.as_slice()),
durability,
)
}
}
fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<[CrateId]> { fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<[CrateId]> {
let graph = db.crate_graph(); let graph = db.crate_graph();
let mut crates = graph let mut crates = graph

View file

@ -31,4 +31,4 @@ mbe.workspace = true
syntax.workspace = true syntax.workspace = true
[lints] [lints]
workspace = true workspace = true

View file

@ -47,6 +47,7 @@ impl CfgExpr {
pub fn parse<S>(tt: &tt::Subtree<S>) -> CfgExpr { pub fn parse<S>(tt: &tt::Subtree<S>) -> CfgExpr {
next_cfg_expr(&mut tt.token_trees.iter()).unwrap_or(CfgExpr::Invalid) next_cfg_expr(&mut tt.token_trees.iter()).unwrap_or(CfgExpr::Invalid)
} }
/// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates. /// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates.
pub fn fold(&self, query: &dyn Fn(&CfgAtom) -> bool) -> Option<bool> { pub fn fold(&self, query: &dyn Fn(&CfgAtom) -> bool) -> Option<bool> {
match self { match self {
@ -62,7 +63,6 @@ impl CfgExpr {
} }
} }
} }
fn next_cfg_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<CfgExpr> { fn next_cfg_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<CfgExpr> {
let name = match it.next() { let name = match it.next() {
None => return None, None => return None,

View file

@ -28,19 +28,20 @@ pub enum CargoTestMessage {
}, },
Suite, Suite,
Finished, Finished,
Custom {
text: String,
},
} }
impl ParseFromLine for CargoTestMessage { impl ParseFromLine for CargoTestMessage {
fn from_line(line: &str, error: &mut String) -> Option<Self> { fn from_line(line: &str, _: &mut String) -> Option<Self> {
let mut deserializer = serde_json::Deserializer::from_str(line); let mut deserializer = serde_json::Deserializer::from_str(line);
deserializer.disable_recursion_limit(); deserializer.disable_recursion_limit();
if let Ok(message) = CargoTestMessage::deserialize(&mut deserializer) { if let Ok(message) = CargoTestMessage::deserialize(&mut deserializer) {
return Some(message); return Some(message);
} }
error.push_str(line); Some(CargoTestMessage::Custom { text: line.to_owned() })
error.push('\n');
None
} }
fn from_eof() -> Option<Self> { fn from_eof() -> Option<Self> {

View file

@ -10,7 +10,6 @@ use std::ops::Index;
use base_db::CrateId; use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions}; use cfg::{CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{name::Name, HirFileId, InFile}; use hir_expand::{name::Name, HirFileId, InFile};
use la_arena::{Arena, ArenaMap}; use la_arena::{Arena, ArenaMap};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
@ -45,7 +44,8 @@ pub struct Body {
/// ///
/// If this `Body` is for the body of a constant, this will just be /// If this `Body` is for the body of a constant, this will just be
/// empty. /// empty.
pub params: Vec<PatId>, pub params: Box<[PatId]>,
pub self_param: Option<BindingId>,
/// The `ExprId` of the actual body expression. /// The `ExprId` of the actual body expression.
pub body_expr: ExprId, pub body_expr: ExprId,
/// Block expressions in this body that may contain inner items. /// Block expressions in this body that may contain inner items.
@ -55,7 +55,7 @@ pub struct Body {
pub type ExprPtr = AstPtr<ast::Expr>; pub type ExprPtr = AstPtr<ast::Expr>;
pub type ExprSource = InFile<ExprPtr>; pub type ExprSource = InFile<ExprPtr>;
pub type PatPtr = AstPtr<Either<ast::Pat, ast::SelfParam>>; pub type PatPtr = AstPtr<ast::Pat>;
pub type PatSource = InFile<PatPtr>; pub type PatSource = InFile<PatPtr>;
pub type LabelPtr = AstPtr<ast::Label>; pub type LabelPtr = AstPtr<ast::Label>;
@ -63,6 +63,7 @@ pub type LabelSource = InFile<LabelPtr>;
pub type FieldPtr = AstPtr<ast::RecordExprField>; pub type FieldPtr = AstPtr<ast::RecordExprField>;
pub type FieldSource = InFile<FieldPtr>; pub type FieldSource = InFile<FieldPtr>;
pub type PatFieldPtr = AstPtr<ast::RecordPatField>; pub type PatFieldPtr = AstPtr<ast::RecordPatField>;
pub type PatFieldSource = InFile<PatFieldPtr>; pub type PatFieldSource = InFile<PatFieldPtr>;
@ -88,6 +89,8 @@ pub struct BodySourceMap {
label_map: FxHashMap<LabelSource, LabelId>, label_map: FxHashMap<LabelSource, LabelId>,
label_map_back: ArenaMap<LabelId, LabelSource>, label_map_back: ArenaMap<LabelId, LabelSource>,
self_param: Option<InFile<AstPtr<ast::SelfParam>>>,
/// We don't create explicit nodes for record fields (`S { record_field: 92 }`). /// We don't create explicit nodes for record fields (`S { record_field: 92 }`).
/// Instead, we use id of expression (`92`) to identify the field. /// Instead, we use id of expression (`92`) to identify the field.
field_map_back: FxHashMap<ExprId, FieldSource>, field_map_back: FxHashMap<ExprId, FieldSource>,
@ -215,10 +218,11 @@ impl Body {
fn shrink_to_fit(&mut self) { fn shrink_to_fit(&mut self) {
let Self { let Self {
body_expr: _, body_expr: _,
params: _,
self_param: _,
block_scopes, block_scopes,
exprs, exprs,
labels, labels,
params,
pats, pats,
bindings, bindings,
binding_owners, binding_owners,
@ -226,7 +230,6 @@ impl Body {
block_scopes.shrink_to_fit(); block_scopes.shrink_to_fit();
exprs.shrink_to_fit(); exprs.shrink_to_fit();
labels.shrink_to_fit(); labels.shrink_to_fit();
params.shrink_to_fit();
pats.shrink_to_fit(); pats.shrink_to_fit();
bindings.shrink_to_fit(); bindings.shrink_to_fit();
binding_owners.shrink_to_fit(); binding_owners.shrink_to_fit();
@ -297,6 +300,7 @@ impl Default for Body {
params: Default::default(), params: Default::default(),
block_scopes: Default::default(), block_scopes: Default::default(),
binding_owners: Default::default(), binding_owners: Default::default(),
self_param: Default::default(),
} }
} }
} }
@ -354,14 +358,12 @@ impl BodySourceMap {
self.pat_map_back.get(pat).cloned().ok_or(SyntheticSyntax) self.pat_map_back.get(pat).cloned().ok_or(SyntheticSyntax)
} }
pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option<PatId> { pub fn self_param_syntax(&self) -> Option<InFile<AstPtr<ast::SelfParam>>> {
let src = node.map(|it| AstPtr::new(it).wrap_left()); self.self_param
self.pat_map.get(&src).cloned()
} }
pub fn node_self_param(&self, node: InFile<&ast::SelfParam>) -> Option<PatId> { pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option<PatId> {
let src = node.map(|it| AstPtr::new(it).wrap_right()); self.pat_map.get(&node.map(AstPtr::new)).cloned()
self.pat_map.get(&src).cloned()
} }
pub fn label_syntax(&self, label: LabelId) -> LabelSource { pub fn label_syntax(&self, label: LabelId) -> LabelSource {
@ -401,6 +403,7 @@ impl BodySourceMap {
fn shrink_to_fit(&mut self) { fn shrink_to_fit(&mut self) {
let Self { let Self {
self_param: _,
expr_map, expr_map,
expr_map_back, expr_map_back,
pat_map, pat_map,

View file

@ -4,7 +4,6 @@
use std::mem; use std::mem;
use base_db::CrateId; use base_db::CrateId;
use either::Either;
use hir_expand::{ use hir_expand::{
name::{name, AsName, Name}, name::{name, AsName, Name},
ExpandError, InFile, ExpandError, InFile,
@ -29,7 +28,6 @@ use crate::{
db::DefDatabase, db::DefDatabase,
expander::Expander, expander::Expander,
hir::{ hir::{
dummy_expr_id,
format_args::{ format_args::{
self, FormatAlignment, FormatArgs, FormatArgsPiece, FormatArgument, FormatArgumentKind, self, FormatAlignment, FormatArgs, FormatArgsPiece, FormatArgument, FormatArgumentKind,
FormatArgumentsCollector, FormatCount, FormatDebugHex, FormatOptions, FormatArgumentsCollector, FormatCount, FormatDebugHex, FormatOptions,
@ -66,16 +64,7 @@ pub(super) fn lower(
def_map: expander.module.def_map(db), def_map: expander.module.def_map(db),
source_map: BodySourceMap::default(), source_map: BodySourceMap::default(),
ast_id_map: db.ast_id_map(expander.current_file_id()), ast_id_map: db.ast_id_map(expander.current_file_id()),
body: Body { body: Body::default(),
exprs: Default::default(),
pats: Default::default(),
bindings: Default::default(),
binding_owners: Default::default(),
labels: Default::default(),
params: Vec::new(),
body_expr: dummy_expr_id(),
block_scopes: Vec::new(),
},
expander, expander,
current_try_block_label: None, current_try_block_label: None,
is_lowering_assignee_expr: false, is_lowering_assignee_expr: false,
@ -191,35 +180,35 @@ impl ExprCollector<'_> {
is_async_fn: bool, is_async_fn: bool,
) -> (Body, BodySourceMap) { ) -> (Body, BodySourceMap) {
if let Some((param_list, mut attr_enabled)) = param_list { if let Some((param_list, mut attr_enabled)) = param_list {
let mut params = vec![];
if let Some(self_param) = if let Some(self_param) =
param_list.self_param().filter(|_| attr_enabled.next().unwrap_or(false)) param_list.self_param().filter(|_| attr_enabled.next().unwrap_or(false))
{ {
let is_mutable = let is_mutable =
self_param.mut_token().is_some() && self_param.amp_token().is_none(); self_param.mut_token().is_some() && self_param.amp_token().is_none();
let ptr = AstPtr::new(&Either::Right(self_param));
let binding_id: la_arena::Idx<Binding> = let binding_id: la_arena::Idx<Binding> =
self.alloc_binding(name![self], BindingAnnotation::new(is_mutable, false)); self.alloc_binding(name![self], BindingAnnotation::new(is_mutable, false));
let param_pat = self.alloc_pat(Pat::Bind { id: binding_id, subpat: None }, ptr); self.body.self_param = Some(binding_id);
self.add_definition_to_binding(binding_id, param_pat); self.source_map.self_param = Some(self.expander.in_file(AstPtr::new(&self_param)));
self.body.params.push(param_pat);
} }
for (param, _) in param_list.params().zip(attr_enabled).filter(|(_, enabled)| *enabled) for (param, _) in param_list.params().zip(attr_enabled).filter(|(_, enabled)| *enabled)
{ {
let param_pat = self.collect_pat_top(param.pat()); let param_pat = self.collect_pat_top(param.pat());
self.body.params.push(param_pat); params.push(param_pat);
} }
self.body.params = params.into_boxed_slice();
}; };
self.body.body_expr = self.with_label_rib(RibKind::Closure, |this| { self.body.body_expr = self.with_label_rib(RibKind::Closure, |this| {
if is_async_fn { if is_async_fn {
match body { match body {
Some(e) => { Some(e) => {
let syntax_ptr = AstPtr::new(&e);
let expr = this.collect_expr(e); let expr = this.collect_expr(e);
this.alloc_expr_desugared(Expr::Async { this.alloc_expr_desugared_with_ptr(
id: None, Expr::Async { id: None, statements: Box::new([]), tail: Some(expr) },
statements: Box::new([]), syntax_ptr,
tail: Some(expr), )
})
} }
None => this.missing_expr(), None => this.missing_expr(),
} }
@ -405,7 +394,7 @@ impl ExprCollector<'_> {
} }
ast::Expr::ParenExpr(e) => { ast::Expr::ParenExpr(e) => {
let inner = self.collect_expr_opt(e.expr()); let inner = self.collect_expr_opt(e.expr());
// make the paren expr point to the inner expression as well // make the paren expr point to the inner expression as well for IDE resolution
let src = self.expander.in_file(syntax_ptr); let src = self.expander.in_file(syntax_ptr);
self.source_map.expr_map.insert(src, inner); self.source_map.expr_map.insert(src, inner);
inner inner
@ -707,6 +696,7 @@ impl ExprCollector<'_> {
.alloc_label_desugared(Label { name: Name::generate_new_name(self.body.labels.len()) }); .alloc_label_desugared(Label { name: Name::generate_new_name(self.body.labels.len()) });
let old_label = self.current_try_block_label.replace(label); let old_label = self.current_try_block_label.replace(label);
let ptr = AstPtr::new(&e).upcast();
let (btail, expr_id) = self.with_labeled_rib(label, |this| { let (btail, expr_id) = self.with_labeled_rib(label, |this| {
let mut btail = None; let mut btail = None;
let block = this.collect_block_(e, |id, statements, tail| { let block = this.collect_block_(e, |id, statements, tail| {
@ -716,23 +706,21 @@ impl ExprCollector<'_> {
(btail, block) (btail, block)
}); });
let callee = self.alloc_expr_desugared(Expr::Path(try_from_output)); let callee = self.alloc_expr_desugared_with_ptr(Expr::Path(try_from_output), ptr);
let next_tail = match btail { let next_tail = match btail {
Some(tail) => self.alloc_expr_desugared(Expr::Call { Some(tail) => self.alloc_expr_desugared_with_ptr(
callee, Expr::Call { callee, args: Box::new([tail]), is_assignee_expr: false },
args: Box::new([tail]), ptr,
is_assignee_expr: false, ),
}),
None => { None => {
let unit = self.alloc_expr_desugared(Expr::Tuple { let unit = self.alloc_expr_desugared_with_ptr(
exprs: Box::new([]), Expr::Tuple { exprs: Box::new([]), is_assignee_expr: false },
is_assignee_expr: false, ptr,
}); );
self.alloc_expr_desugared(Expr::Call { self.alloc_expr_desugared_with_ptr(
callee, Expr::Call { callee, args: Box::new([unit]), is_assignee_expr: false },
args: Box::new([unit]), ptr,
is_assignee_expr: false, )
})
} }
}; };
let Expr::Block { tail, .. } = &mut self.body.exprs[expr_id] else { let Expr::Block { tail, .. } = &mut self.body.exprs[expr_id] else {
@ -1067,16 +1055,12 @@ impl ExprCollector<'_> {
None => None, None => None,
}, },
); );
match expansion { expansion.inspect(|&tail| {
Some(tail) => { // Make the macro-call point to its expanded expression so we can query
// Make the macro-call point to its expanded expression so we can query // semantics on syntax pointers to the macro
// semantics on syntax pointers to the macro let src = self.expander.in_file(syntax_ptr);
let src = self.expander.in_file(syntax_ptr); self.source_map.expr_map.insert(src, tail);
self.source_map.expr_map.insert(src, tail); })
Some(tail)
}
None => None,
}
} }
fn collect_stmt(&mut self, statements: &mut Vec<Statement>, s: ast::Stmt) { fn collect_stmt(&mut self, statements: &mut Vec<Statement>, s: ast::Stmt) {
@ -1261,7 +1245,7 @@ impl ExprCollector<'_> {
(Some(id), Pat::Bind { id, subpat }) (Some(id), Pat::Bind { id, subpat })
}; };
let ptr = AstPtr::new(&Either::Left(pat)); let ptr = AstPtr::new(&pat);
let pat = self.alloc_pat(pattern, ptr); let pat = self.alloc_pat(pattern, ptr);
if let Some(binding_id) = binding { if let Some(binding_id) = binding {
self.add_definition_to_binding(binding_id, pat); self.add_definition_to_binding(binding_id, pat);
@ -1359,9 +1343,10 @@ impl ExprCollector<'_> {
suffix: suffix.into_iter().map(|p| self.collect_pat(p, binding_list)).collect(), suffix: suffix.into_iter().map(|p| self.collect_pat(p, binding_list)).collect(),
} }
} }
#[rustfmt::skip] // https://github.com/rust-lang/rustfmt/issues/5676
ast::Pat::LiteralPat(lit) => 'b: { ast::Pat::LiteralPat(lit) => 'b: {
let Some((hir_lit, ast_lit)) = pat_literal_to_hir(lit) else { break 'b Pat::Missing }; let Some((hir_lit, ast_lit)) = pat_literal_to_hir(lit) else {
break 'b Pat::Missing;
};
let expr = Expr::Literal(hir_lit); let expr = Expr::Literal(hir_lit);
let expr_ptr = AstPtr::new(&ast::Expr::Literal(ast_lit)); let expr_ptr = AstPtr::new(&ast::Expr::Literal(ast_lit));
let expr_id = self.alloc_expr(expr, expr_ptr); let expr_id = self.alloc_expr(expr, expr_ptr);
@ -1397,7 +1382,7 @@ impl ExprCollector<'_> {
ast::Pat::MacroPat(mac) => match mac.macro_call() { ast::Pat::MacroPat(mac) => match mac.macro_call() {
Some(call) => { Some(call) => {
let macro_ptr = AstPtr::new(&call); let macro_ptr = AstPtr::new(&call);
let src = self.expander.in_file(AstPtr::new(&Either::Left(pat))); let src = self.expander.in_file(AstPtr::new(&pat));
let pat = let pat =
self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| { self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| {
this.collect_pat_opt(expanded_pat, binding_list) this.collect_pat_opt(expanded_pat, binding_list)
@ -1426,7 +1411,7 @@ impl ExprCollector<'_> {
Pat::Range { start, end } Pat::Range { start, end }
} }
}; };
let ptr = AstPtr::new(&Either::Left(pat)); let ptr = AstPtr::new(&pat);
self.alloc_pat(pattern, ptr) self.alloc_pat(pattern, ptr)
} }
@ -1987,10 +1972,19 @@ impl ExprCollector<'_> {
self.source_map.expr_map.insert(src, id); self.source_map.expr_map.insert(src, id);
id id
} }
// FIXME: desugared exprs don't have ptr, that's wrong and should be fixed somehow. // FIXME: desugared exprs don't have ptr, that's wrong and should be fixed.
// Migrate to alloc_expr_desugared_with_ptr and then rename back
fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId { fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId {
self.body.exprs.alloc(expr) self.body.exprs.alloc(expr)
} }
fn alloc_expr_desugared_with_ptr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId {
let src = self.expander.in_file(ptr);
let id = self.body.exprs.alloc(expr);
self.source_map.expr_map_back.insert(id, src);
// We intentionally don't fill this as it could overwrite a non-desugared entry
// self.source_map.expr_map.insert(src, id);
id
}
fn missing_expr(&mut self) -> ExprId { fn missing_expr(&mut self) -> ExprId {
self.alloc_expr_desugared(Expr::Missing) self.alloc_expr_desugared(Expr::Missing)
} }

View file

@ -48,7 +48,16 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo
let mut p = Printer { db, body, buf: header, indent_level: 0, needs_indent: false }; let mut p = Printer { db, body, buf: header, indent_level: 0, needs_indent: false };
if let DefWithBodyId::FunctionId(it) = owner { if let DefWithBodyId::FunctionId(it) = owner {
p.buf.push('('); p.buf.push('(');
body.params.iter().zip(db.function_data(it).params.iter()).for_each(|(&param, ty)| { let params = &db.function_data(it).params;
let mut params = params.iter();
if let Some(self_param) = body.self_param {
p.print_binding(self_param);
p.buf.push(':');
if let Some(ty) = params.next() {
p.print_type_ref(ty);
}
}
body.params.iter().zip(params).for_each(|(&param, ty)| {
p.print_pat(param); p.print_pat(param);
p.buf.push(':'); p.buf.push(':');
p.print_type_ref(ty); p.print_type_ref(ty);

View file

@ -96,6 +96,9 @@ impl ExprScopes {
scope_by_expr: ArenaMap::with_capacity(body.exprs.len()), scope_by_expr: ArenaMap::with_capacity(body.exprs.len()),
}; };
let mut root = scopes.root_scope(); let mut root = scopes.root_scope();
if let Some(self_param) = body.self_param {
scopes.add_bindings(body, root, self_param);
}
scopes.add_params_bindings(body, root, &body.params); scopes.add_params_bindings(body, root, &body.params);
compute_expr_scopes(body.body_expr, body, &mut scopes, &mut root); compute_expr_scopes(body.body_expr, body, &mut scopes, &mut root);
scopes scopes

View file

@ -76,7 +76,7 @@ impl ChildBySource for ItemScope {
self.extern_crate_decls() self.extern_crate_decls()
.for_each(|ext| insert_item_loc(db, res, file_id, ext, keys::EXTERN_CRATE)); .for_each(|ext| insert_item_loc(db, res, file_id, ext, keys::EXTERN_CRATE));
self.use_decls().for_each(|ext| insert_item_loc(db, res, file_id, ext, keys::USE)); self.use_decls().for_each(|ext| insert_item_loc(db, res, file_id, ext, keys::USE));
self.unnamed_consts(db) self.unnamed_consts()
.for_each(|konst| insert_item_loc(db, res, file_id, konst, keys::CONST)); .for_each(|konst| insert_item_loc(db, res, file_id, konst, keys::CONST));
self.attr_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each( self.attr_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each(
|(ast_id, call_id)| { |(ast_id, call_id)| {

View file

@ -715,7 +715,7 @@ impl<'a> AssocItemCollector<'a> {
} }
AssocItem::MacroCall(call) => { AssocItem::MacroCall(call) => {
let file_id = self.expander.current_file_id(); let file_id = self.expander.current_file_id();
let MacroCall { ast_id, expand_to, call_site, ref path } = item_tree[call]; let MacroCall { ast_id, expand_to, ctxt, ref path } = item_tree[call];
let module = self.expander.module.local_id; let module = self.expander.module.local_id;
let resolver = |path| { let resolver = |path| {
@ -734,7 +734,7 @@ impl<'a> AssocItemCollector<'a> {
match macro_call_as_call_id( match macro_call_as_call_id(
self.db.upcast(), self.db.upcast(),
&AstIdWithPath::new(file_id, ast_id, Clone::clone(path)), &AstIdWithPath::new(file_id, ast_id, Clone::clone(path)),
call_site, ctxt,
expand_to, expand_to,
self.expander.module.krate(), self.expander.module.krate(),
resolver, resolver,
@ -745,6 +745,7 @@ impl<'a> AssocItemCollector<'a> {
self.collect_macro_items(res, &|| hir_expand::MacroCallKind::FnLike { self.collect_macro_items(res, &|| hir_expand::MacroCallKind::FnLike {
ast_id: InFile::new(file_id, ast_id), ast_id: InFile::new(file_id, ast_id),
expand_to: hir_expand::ExpandTo::Items, expand_to: hir_expand::ExpandTo::Items,
eager: None,
}); });
} }
Ok(None) => (), Ok(None) => (),
@ -754,6 +755,7 @@ impl<'a> AssocItemCollector<'a> {
MacroCallKind::FnLike { MacroCallKind::FnLike {
ast_id: InFile::new(file_id, ast_id), ast_id: InFile::new(file_id, ast_id),
expand_to, expand_to,
eager: None,
}, },
Clone::clone(path), Clone::clone(path),
)); ));

View file

@ -191,9 +191,9 @@ impl StructData {
let krate = loc.container.krate; let krate = loc.container.krate;
let item_tree = loc.id.item_tree(db); let item_tree = loc.id.item_tree(db);
let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into()); let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone(); let cfg_options = db.crate_graph()[krate].cfg_options.clone();
let attrs = item_tree.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into()); let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into());
let mut flags = StructFlags::NO_FLAGS; let mut flags = StructFlags::NO_FLAGS;
if attrs.by_key("rustc_has_incoherent_inherent_impls").exists() { if attrs.by_key("rustc_has_incoherent_inherent_impls").exists() {
@ -248,9 +248,9 @@ impl StructData {
let krate = loc.container.krate; let krate = loc.container.krate;
let item_tree = loc.id.item_tree(db); let item_tree = loc.id.item_tree(db);
let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into()); let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone(); let cfg_options = db.crate_graph()[krate].cfg_options.clone();
let attrs = item_tree.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into()); let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into());
let mut flags = StructFlags::NO_FLAGS; let mut flags = StructFlags::NO_FLAGS;
if attrs.by_key("rustc_has_incoherent_inherent_impls").exists() { if attrs.by_key("rustc_has_incoherent_inherent_impls").exists() {
flags |= StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL; flags |= StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL;

View file

@ -309,13 +309,9 @@ fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId {
kind: kind(loc.expander, loc.id.file_id(), makro.ast_id.upcast()), kind: kind(loc.expander, loc.id.file_id(), makro.ast_id.upcast()),
local_inner: false, local_inner: false,
allow_internal_unsafe: loc.allow_internal_unsafe, allow_internal_unsafe: loc.allow_internal_unsafe,
span: db
.span_map(loc.id.file_id())
.span_for_range(db.ast_id_map(loc.id.file_id()).get(makro.ast_id).text_range()),
edition: loc.edition, edition: loc.edition,
} }
} }
MacroId::MacroRulesId(it) => { MacroId::MacroRulesId(it) => {
let loc: MacroRulesLoc = it.lookup(db); let loc: MacroRulesLoc = it.lookup(db);
@ -328,9 +324,6 @@ fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId {
allow_internal_unsafe: loc allow_internal_unsafe: loc
.flags .flags
.contains(MacroRulesLocFlags::ALLOW_INTERNAL_UNSAFE), .contains(MacroRulesLocFlags::ALLOW_INTERNAL_UNSAFE),
span: db
.span_map(loc.id.file_id())
.span_for_range(db.ast_id_map(loc.id.file_id()).get(makro.ast_id).text_range()),
edition: loc.edition, edition: loc.edition,
} }
} }
@ -348,9 +341,6 @@ fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId {
), ),
local_inner: false, local_inner: false,
allow_internal_unsafe: false, allow_internal_unsafe: false,
span: db
.span_map(loc.id.file_id())
.span_for_range(db.ast_id_map(loc.id.file_id()).get(makro.ast_id).text_range()),
edition: loc.edition, edition: loc.edition,
} }
} }

View file

@ -241,30 +241,8 @@ impl ItemScope {
}) })
} }
pub fn unnamed_consts<'a>( pub fn unnamed_consts(&self) -> impl Iterator<Item = ConstId> + '_ {
&'a self, self.unnamed_consts.iter().copied()
db: &'a dyn DefDatabase,
) -> impl Iterator<Item = ConstId> + 'a {
// FIXME: Also treat consts named `_DERIVE_*` as unnamed, since synstructure generates those.
// Should be removed once synstructure stops doing that.
let synstructure_hack_consts = self.values.values().filter_map(|(item, _, _)| match item {
&ModuleDefId::ConstId(id) => {
let loc = id.lookup(db);
let item_tree = loc.id.item_tree(db);
if item_tree[loc.id.value]
.name
.as_ref()
.map_or(false, |n| n.to_smol_str().starts_with("_DERIVE_"))
{
Some(id)
} else {
None
}
}
_ => None,
});
self.unnamed_consts.iter().copied().chain(synstructure_hack_consts)
} }
/// Iterate over all module scoped macros /// Iterate over all module scoped macros

View file

@ -49,7 +49,7 @@ use intern::Interned;
use la_arena::{Arena, Idx, IdxRange, RawIdx}; use la_arena::{Arena, Idx, IdxRange, RawIdx};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use smallvec::SmallVec; use smallvec::SmallVec;
use span::{AstIdNode, FileAstId, Span}; use span::{AstIdNode, FileAstId, SyntaxContextId};
use stdx::never; use stdx::never;
use syntax::{ast, match_ast, SyntaxKind}; use syntax::{ast, match_ast, SyntaxKind};
use triomphe::Arc; use triomphe::Arc;
@ -790,8 +790,7 @@ pub struct MacroCall {
pub path: Interned<ModPath>, pub path: Interned<ModPath>,
pub ast_id: FileAstId<ast::MacroCall>, pub ast_id: FileAstId<ast::MacroCall>,
pub expand_to: ExpandTo, pub expand_to: ExpandTo,
// FIXME: We need to move this out. It invalidates the item tree when typing inside the macro call. pub ctxt: SyntaxContextId,
pub call_site: Span,
} }
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]

View file

@ -560,35 +560,32 @@ impl<'a> Ctx<'a> {
fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<FileItemTreeId<MacroCall>> { fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<FileItemTreeId<MacroCall>> {
let span_map = self.span_map(); let span_map = self.span_map();
let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, &mut |range| { let path = m.path()?;
let range = path.syntax().text_range();
let path = Interned::new(ModPath::from_src(self.db.upcast(), path, &mut |range| {
span_map.span_for_range(range).ctx span_map.span_for_range(range).ctx
})?); })?);
let ast_id = self.source_ast_id_map.ast_id(m); let ast_id = self.source_ast_id_map.ast_id(m);
let expand_to = hir_expand::ExpandTo::from_call_site(m); let expand_to = hir_expand::ExpandTo::from_call_site(m);
let res = MacroCall { let res = MacroCall { path, ast_id, expand_to, ctxt: span_map.span_for_range(range).ctx };
path,
ast_id,
expand_to,
call_site: span_map.span_for_range(m.syntax().text_range()),
};
Some(id(self.data().macro_calls.alloc(res))) Some(id(self.data().macro_calls.alloc(res)))
} }
fn lower_macro_rules(&mut self, m: &ast::MacroRules) -> Option<FileItemTreeId<MacroRules>> { fn lower_macro_rules(&mut self, m: &ast::MacroRules) -> Option<FileItemTreeId<MacroRules>> {
let name = m.name().map(|it| it.as_name())?; let name = m.name()?;
let ast_id = self.source_ast_id_map.ast_id(m); let ast_id = self.source_ast_id_map.ast_id(m);
let res = MacroRules { name, ast_id }; let res = MacroRules { name: name.as_name(), ast_id };
Some(id(self.data().macro_rules.alloc(res))) Some(id(self.data().macro_rules.alloc(res)))
} }
fn lower_macro_def(&mut self, m: &ast::MacroDef) -> Option<FileItemTreeId<Macro2>> { fn lower_macro_def(&mut self, m: &ast::MacroDef) -> Option<FileItemTreeId<Macro2>> {
let name = m.name().map(|it| it.as_name())?; let name = m.name()?;
let ast_id = self.source_ast_id_map.ast_id(m); let ast_id = self.source_ast_id_map.ast_id(m);
let visibility = self.lower_visibility(m); let visibility = self.lower_visibility(m);
let res = Macro2 { name, ast_id, visibility }; let res = Macro2 { name: name.as_name(), ast_id, visibility };
Some(id(self.data().macro_defs.alloc(res))) Some(id(self.data().macro_defs.alloc(res)))
} }

View file

@ -487,12 +487,12 @@ impl Printer<'_> {
} }
} }
ModItem::MacroCall(it) => { ModItem::MacroCall(it) => {
let MacroCall { path, ast_id, expand_to, call_site } = &self.tree[it]; let MacroCall { path, ast_id, expand_to, ctxt } = &self.tree[it];
let _ = writeln!( let _ = writeln!(
self, self,
"// AstId: {:?}, Span: {}, ExpandTo: {:?}", "// AstId: {:?}, SyntaxContext: {}, ExpandTo: {:?}",
ast_id.erase().into_raw(), ast_id.erase().into_raw(),
call_site, ctxt,
expand_to expand_to
); );
wln!(self, "{}!(...);", path.display(self.db.upcast())); wln!(self, "{}!(...);", path.display(self.db.upcast()));

View file

@ -278,7 +278,7 @@ m!();
// AstId: 2 // AstId: 2
pub macro m2 { ... } pub macro m2 { ... }
// AstId: 3, Span: 0:3@0..5#0, ExpandTo: Items // AstId: 3, SyntaxContext: 0, ExpandTo: Items
m!(...); m!(...);
"#]], "#]],
); );

View file

@ -90,7 +90,7 @@ use hir_expand::{
use item_tree::ExternBlock; use item_tree::ExternBlock;
use la_arena::Idx; use la_arena::Idx;
use nameres::DefMap; use nameres::DefMap;
use span::{AstIdNode, FileAstId, FileId, Span}; use span::{AstIdNode, FileAstId, FileId, SyntaxContextId};
use stdx::impl_from; use stdx::impl_from;
use syntax::{ast, AstNode}; use syntax::{ast, AstNode};
@ -1342,21 +1342,22 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value)); let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
let span_map = db.span_map(self.file_id); let span_map = db.span_map(self.file_id);
let path = self.value.path().and_then(|path| { let path = self.value.path().and_then(|path| {
path::ModPath::from_src(db, path, &mut |range| { let range = path.syntax().text_range();
let mod_path = path::ModPath::from_src(db, path, &mut |range| {
span_map.as_ref().span_for_range(range).ctx span_map.as_ref().span_for_range(range).ctx
}) })?;
let call_site = span_map.span_for_range(range);
Some((call_site, mod_path))
}); });
let Some(path) = path else { let Some((call_site, path)) = path else {
return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation"))); return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
}; };
let call_site = span_map.span_for_range(self.value.syntax().text_range());
macro_call_as_call_id_with_eager( macro_call_as_call_id_with_eager(
db, db,
&AstIdWithPath::new(ast_id.file_id, ast_id.value, path), &AstIdWithPath::new(ast_id.file_id, ast_id.value, path),
call_site, call_site.ctx,
expands_to, expands_to,
krate, krate,
resolver, resolver,
@ -1381,7 +1382,7 @@ impl<T: AstIdNode> AstIdWithPath<T> {
fn macro_call_as_call_id( fn macro_call_as_call_id(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>, call: &AstIdWithPath<ast::MacroCall>,
call_site: Span, call_site: SyntaxContextId,
expand_to: ExpandTo, expand_to: ExpandTo,
krate: CrateId, krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy, resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
@ -1393,7 +1394,7 @@ fn macro_call_as_call_id(
fn macro_call_as_call_id_with_eager( fn macro_call_as_call_id_with_eager(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>, call: &AstIdWithPath<ast::MacroCall>,
call_site: Span, call_site: SyntaxContextId,
expand_to: ExpandTo, expand_to: ExpandTo,
krate: CrateId, krate: CrateId,
resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>, resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>,
@ -1403,17 +1404,20 @@ fn macro_call_as_call_id_with_eager(
resolver(call.path.clone()).ok_or_else(|| UnresolvedMacro { path: call.path.clone() })?; resolver(call.path.clone()).ok_or_else(|| UnresolvedMacro { path: call.path.clone() })?;
let res = match def.kind { let res = match def.kind {
MacroDefKind::BuiltInEager(..) => { MacroDefKind::BuiltInEager(..) => expand_eager_macro_input(
let macro_call = InFile::new(call.ast_id.file_id, call.ast_id.to_node(db)); db,
expand_eager_macro_input(db, krate, macro_call, def, call_site, &|path| { krate,
eager_resolver(path).filter(MacroDefId::is_fn_like) &call.ast_id.to_node(db),
}) call.ast_id,
} def,
call_site,
&|path| eager_resolver(path).filter(MacroDefId::is_fn_like),
),
_ if def.is_fn_like() => ExpandResult { _ if def.is_fn_like() => ExpandResult {
value: Some(def.as_lazy_macro( value: Some(def.make_call(
db, db,
krate, krate,
MacroCallKind::FnLike { ast_id: call.ast_id, expand_to }, MacroCallKind::FnLike { ast_id: call.ast_id, expand_to, eager: None },
call_site, call_site,
)), )),
err: None, err: None,

View file

@ -528,3 +528,121 @@ impl < > $crate::fmt::Debug for Command< > where {
}"#]], }"#]],
); );
} }
#[test]
fn test_debug_expand_with_cfg() {
check(
r#"
//- minicore: derive, fmt
use core::fmt::Debug;
#[derive(Debug)]
struct HideAndShow {
#[cfg(never)]
always_hide: u32,
#[cfg(not(never))]
always_show: u32,
}
#[derive(Debug)]
enum HideAndShowEnum {
#[cfg(never)]
AlwaysHide,
#[cfg(not(never))]
AlwaysShow{
#[cfg(never)]
always_hide: u32,
#[cfg(not(never))]
always_show: u32,
}
}
"#,
expect![[r#"
use core::fmt::Debug;
#[derive(Debug)]
struct HideAndShow {
#[cfg(never)]
always_hide: u32,
#[cfg(not(never))]
always_show: u32,
}
#[derive(Debug)]
enum HideAndShowEnum {
#[cfg(never)]
AlwaysHide,
#[cfg(not(never))]
AlwaysShow{
#[cfg(never)]
always_hide: u32,
#[cfg(not(never))]
always_show: u32,
}
}
impl < > $crate::fmt::Debug for HideAndShow< > where {
fn fmt(&self , f: &mut $crate::fmt::Formatter) -> $crate::fmt::Result {
match self {
HideAndShow {
always_show: always_show,
}
=>f.debug_struct("HideAndShow").field("always_show", &always_show).finish()
}
}
}
impl < > $crate::fmt::Debug for HideAndShowEnum< > where {
fn fmt(&self , f: &mut $crate::fmt::Formatter) -> $crate::fmt::Result {
match self {
HideAndShowEnum::AlwaysShow {
always_show: always_show,
}
=>f.debug_struct("AlwaysShow").field("always_show", &always_show).finish(),
}
}
}"#]],
);
}
#[test]
fn test_default_expand_with_cfg() {
check(
r#"
//- minicore: derive, default
#[derive(Default)]
struct Foo {
field1: i32,
#[cfg(never)]
field2: (),
}
#[derive(Default)]
enum Bar {
Foo,
#[cfg_attr(not(never), default)]
Bar,
}
"#,
expect![[r#"
#[derive(Default)]
struct Foo {
field1: i32,
#[cfg(never)]
field2: (),
}
#[derive(Default)]
enum Bar {
Foo,
#[cfg_attr(not(never), default)]
Bar,
}
impl < > $crate::default::Default for Foo< > where {
fn default() -> Self {
Foo {
field1: $crate::default::Default::default(),
}
}
}
impl < > $crate::default::Default for Bar< > where {
fn default() -> Self {
Bar::Bar
}
}"#]],
);
}

View file

@ -171,7 +171,7 @@ fn main(foo: ()) {
} }
fn main(foo: ()) { fn main(foo: ()) {
/* error: unresolved macro unresolved */"helloworld!"#0:3@207..323#2#; /* error: unresolved macro unresolved */"helloworld!"#0:3@236..321#0#;
} }
} }

View file

@ -33,7 +33,7 @@ m!(&k");
"#, "#,
expect![[r#" expect![[r#"
macro_rules! m { ($i:literal) => {}; } macro_rules! m { ($i:literal) => {}; }
/* error: mismatched delimiters */"#]], /* error: expected literal */"#]],
); );
} }

View file

@ -98,7 +98,7 @@ macro_rules! m1 { ($x:ident) => { ($x } }
macro_rules! m2 { ($x:ident) => {} } macro_rules! m2 { ($x:ident) => {} }
/* error: macro definition has parse errors */ /* error: macro definition has parse errors */
/* error: mismatched delimiters */ /* error: expected ident */
"#]], "#]],
) )
} }

View file

@ -61,15 +61,16 @@ use std::ops::Deref;
use base_db::{CrateId, Edition, FileId}; use base_db::{CrateId, Edition, FileId};
use hir_expand::{ use hir_expand::{
name::Name, proc_macro::ProcMacroKind, HirFileId, InFile, MacroCallId, MacroDefId, name::Name, proc_macro::ProcMacroKind, ErasedAstId, HirFileId, InFile, MacroCallId, MacroDefId,
}; };
use itertools::Itertools; use itertools::Itertools;
use la_arena::Arena; use la_arena::Arena;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use span::FileAstId; use span::{FileAstId, ROOT_ERASED_FILE_AST_ID};
use stdx::format_to; use stdx::format_to;
use syntax::{ast, SmolStr}; use syntax::{ast, SmolStr};
use triomphe::Arc; use triomphe::Arc;
use tt::TextRange;
use crate::{ use crate::{
db::DefDatabase, db::DefDatabase,
@ -677,6 +678,25 @@ impl ModuleData {
} }
} }
pub fn definition_source_range(&self, db: &dyn DefDatabase) -> InFile<TextRange> {
match &self.origin {
&ModuleOrigin::File { definition, .. } | &ModuleOrigin::CrateRoot { definition } => {
InFile::new(
definition.into(),
ErasedAstId::new(definition.into(), ROOT_ERASED_FILE_AST_ID)
.to_range(db.upcast()),
)
}
&ModuleOrigin::Inline { definition, definition_tree_id } => InFile::new(
definition_tree_id.file_id(),
AstId::new(definition_tree_id.file_id(), definition).to_range(db.upcast()),
),
ModuleOrigin::BlockExpr { block, .. } => {
InFile::new(block.file_id, block.to_range(db.upcast()))
}
}
}
/// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`. /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
/// `None` for the crate root or block. /// `None` for the crate root or block.
pub fn declaration_source(&self, db: &dyn DefDatabase) -> Option<InFile<ast::Module>> { pub fn declaration_source(&self, db: &dyn DefDatabase) -> Option<InFile<ast::Module>> {
@ -684,6 +704,13 @@ impl ModuleData {
let value = decl.to_node(db.upcast()); let value = decl.to_node(db.upcast());
Some(InFile { file_id: decl.file_id, value }) Some(InFile { file_id: decl.file_id, value })
} }
/// Returns the range which declares this module, either a `mod foo;` or a `mod foo {}`.
/// `None` for the crate root or block.
pub fn declaration_source_range(&self, db: &dyn DefDatabase) -> Option<InFile<TextRange>> {
let decl = self.origin.declaration()?;
Some(InFile { file_id: decl.file_id, value: decl.to_range(db.upcast()) })
}
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]

View file

@ -5,7 +5,7 @@ use hir_expand::{
attrs::{Attr, AttrId, AttrInput}, attrs::{Attr, AttrId, AttrInput},
MacroCallId, MacroCallKind, MacroDefId, MacroCallId, MacroCallKind, MacroDefId,
}; };
use span::Span; use span::SyntaxContextId;
use syntax::{ast, SmolStr}; use syntax::{ast, SmolStr};
use triomphe::Arc; use triomphe::Arc;
@ -109,14 +109,14 @@ pub(super) fn attr_macro_as_call_id(
let arg = match macro_attr.input.as_deref() { let arg = match macro_attr.input.as_deref() {
Some(AttrInput::TokenTree(tt)) => { Some(AttrInput::TokenTree(tt)) => {
let mut tt = tt.as_ref().clone(); let mut tt = tt.as_ref().clone();
tt.delimiter = tt::Delimiter::invisible_spanned(macro_attr.span); tt.delimiter.kind = tt::DelimiterKind::Invisible;
Some(tt) Some(tt)
} }
_ => None, _ => None,
}; };
def.as_lazy_macro( def.make_call(
db.upcast(), db.upcast(),
krate, krate,
MacroCallKind::Attr { MacroCallKind::Attr {
@ -124,7 +124,7 @@ pub(super) fn attr_macro_as_call_id(
attr_args: arg.map(Arc::new), attr_args: arg.map(Arc::new),
invoc_attr_index: macro_attr.id, invoc_attr_index: macro_attr.id,
}, },
macro_attr.span, macro_attr.ctxt,
) )
} }
@ -133,14 +133,14 @@ pub(super) fn derive_macro_as_call_id(
item_attr: &AstIdWithPath<ast::Adt>, item_attr: &AstIdWithPath<ast::Adt>,
derive_attr_index: AttrId, derive_attr_index: AttrId,
derive_pos: u32, derive_pos: u32,
call_site: Span, call_site: SyntaxContextId,
krate: CrateId, krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>, resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> { ) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
let (macro_id, def_id) = resolver(item_attr.path.clone()) let (macro_id, def_id) = resolver(item_attr.path.clone())
.filter(|(_, def_id)| def_id.is_derive()) .filter(|(_, def_id)| def_id.is_derive())
.ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?; .ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?;
let call_id = def_id.as_lazy_macro( let call_id = def_id.make_call(
db.upcast(), db.upcast(),
krate, krate,
MacroCallKind::Derive { MacroCallKind::Derive {

View file

@ -230,13 +230,13 @@ enum MacroDirectiveKind {
FnLike { FnLike {
ast_id: AstIdWithPath<ast::MacroCall>, ast_id: AstIdWithPath<ast::MacroCall>,
expand_to: ExpandTo, expand_to: ExpandTo,
call_site: Span, ctxt: SyntaxContextId,
}, },
Derive { Derive {
ast_id: AstIdWithPath<ast::Adt>, ast_id: AstIdWithPath<ast::Adt>,
derive_attr: AttrId, derive_attr: AttrId,
derive_pos: usize, derive_pos: usize,
call_site: Span, ctxt: SyntaxContextId,
}, },
Attr { Attr {
ast_id: AstIdWithPath<ast::Item>, ast_id: AstIdWithPath<ast::Item>,
@ -1126,7 +1126,7 @@ impl DefCollector<'_> {
let resolver_def_id = |path| resolver(path).map(|(_, it)| it); let resolver_def_id = |path| resolver(path).map(|(_, it)| it);
match &directive.kind { match &directive.kind {
MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => { MacroDirectiveKind::FnLike { ast_id, expand_to, ctxt: call_site } => {
let call_id = macro_call_as_call_id( let call_id = macro_call_as_call_id(
self.db.upcast(), self.db.upcast(),
ast_id, ast_id,
@ -1146,7 +1146,7 @@ impl DefCollector<'_> {
return Resolved::Yes; return Resolved::Yes;
} }
} }
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site } => { MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, ctxt: call_site } => {
let id = derive_macro_as_call_id( let id = derive_macro_as_call_id(
self.db, self.db,
ast_id, ast_id,
@ -1266,7 +1266,7 @@ impl DefCollector<'_> {
ast_id, ast_id,
derive_attr: attr.id, derive_attr: attr.id,
derive_pos: idx, derive_pos: idx,
call_site, ctxt: call_site.ctx,
}, },
container: directive.container, container: directive.container,
}); });
@ -1428,7 +1428,7 @@ impl DefCollector<'_> {
for directive in &self.unresolved_macros { for directive in &self.unresolved_macros {
match &directive.kind { match &directive.kind {
MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => { MacroDirectiveKind::FnLike { ast_id, expand_to, ctxt: call_site } => {
// FIXME: we shouldn't need to re-resolve the macro here just to get the unresolved error! // FIXME: we shouldn't need to re-resolve the macro here just to get the unresolved error!
let macro_call_as_call_id = macro_call_as_call_id( let macro_call_as_call_id = macro_call_as_call_id(
self.db.upcast(), self.db.upcast(),
@ -1451,12 +1451,16 @@ impl DefCollector<'_> {
if let Err(UnresolvedMacro { path }) = macro_call_as_call_id { if let Err(UnresolvedMacro { path }) = macro_call_as_call_id {
self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call( self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
directive.module_id, directive.module_id,
MacroCallKind::FnLike { ast_id: ast_id.ast_id, expand_to: *expand_to }, MacroCallKind::FnLike {
ast_id: ast_id.ast_id,
expand_to: *expand_to,
eager: None,
},
path, path,
)); ));
} }
} }
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site: _ } => { MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, ctxt: _ } => {
self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call( self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
directive.module_id, directive.module_id,
MacroCallKind::Derive { MacroCallKind::Derive {
@ -2285,7 +2289,7 @@ impl ModCollector<'_, '_> {
fn collect_macro_call( fn collect_macro_call(
&mut self, &mut self,
&MacroCall { ref path, ast_id, expand_to, call_site }: &MacroCall, &MacroCall { ref path, ast_id, expand_to, ctxt }: &MacroCall,
container: ItemContainerId, container: ItemContainerId,
) { ) {
let ast_id = AstIdWithPath::new(self.file_id(), ast_id, ModPath::clone(path)); let ast_id = AstIdWithPath::new(self.file_id(), ast_id, ModPath::clone(path));
@ -2299,7 +2303,7 @@ impl ModCollector<'_, '_> {
if let Ok(res) = macro_call_as_call_id_with_eager( if let Ok(res) = macro_call_as_call_id_with_eager(
db.upcast(), db.upcast(),
&ast_id, &ast_id,
call_site, ctxt,
expand_to, expand_to,
self.def_collector.def_map.krate, self.def_collector.def_map.krate,
|path| { |path| {
@ -2357,7 +2361,7 @@ impl ModCollector<'_, '_> {
self.def_collector.unresolved_macros.push(MacroDirective { self.def_collector.unresolved_macros.push(MacroDirective {
module_id: self.module_id, module_id: self.module_id,
depth: self.macro_depth + 1, depth: self.macro_depth + 1,
kind: MacroDirectiveKind::FnLike { ast_id, expand_to, call_site }, kind: MacroDirectiveKind::FnLike { ast_id, expand_to, ctxt },
container, container,
}); });
} }

View file

@ -1,6 +1,5 @@
use base_db::{SourceDatabase, SourceDatabaseExt}; use base_db::{SourceDatabase, SourceDatabaseExt2 as _};
use test_fixture::WithFixture; use test_fixture::WithFixture;
use triomphe::Arc;
use crate::{db::DefDatabase, nameres::tests::TestDB, AdtId, ModuleDefId}; use crate::{db::DefDatabase, nameres::tests::TestDB, AdtId, ModuleDefId};
@ -17,7 +16,7 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change:
}); });
assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}") assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
} }
db.set_file_text(pos.file_id, Arc::from(ra_fixture_change)); db.set_file_text(pos.file_id, ra_fixture_change);
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {
@ -267,7 +266,7 @@ fn quux() { 92 }
m!(Y); m!(Y);
m!(Z); m!(Z);
"#; "#;
db.set_file_text(pos.file_id, Arc::from(new_text)); db.set_file_text(pos.file_id, new_text);
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {

View file

@ -3,7 +3,7 @@
use either::Either; use either::Either;
use hir_expand::InFile; use hir_expand::InFile;
use la_arena::ArenaMap; use la_arena::ArenaMap;
use syntax::ast; use syntax::{ast, AstNode, AstPtr};
use crate::{ use crate::{
data::adt::lower_struct, db::DefDatabase, item_tree::ItemTreeNode, trace::Trace, GenericDefId, data::adt::lower_struct, db::DefDatabase, item_tree::ItemTreeNode, trace::Trace, GenericDefId,
@ -12,8 +12,12 @@ use crate::{
}; };
pub trait HasSource { pub trait HasSource {
type Value; type Value: AstNode;
fn source(&self, db: &dyn DefDatabase) -> InFile<Self::Value>; fn source(&self, db: &dyn DefDatabase) -> InFile<Self::Value> {
let InFile { file_id, value } = self.ast_ptr(db);
InFile::new(file_id, value.to_node(&db.parse_or_expand(file_id)))
}
fn ast_ptr(&self, db: &dyn DefDatabase) -> InFile<AstPtr<Self::Value>>;
} }
impl<T> HasSource for T impl<T> HasSource for T
@ -22,16 +26,14 @@ where
T::Id: ItemTreeNode, T::Id: ItemTreeNode,
{ {
type Value = <T::Id as ItemTreeNode>::Source; type Value = <T::Id as ItemTreeNode>::Source;
fn ast_ptr(&self, db: &dyn DefDatabase) -> InFile<AstPtr<Self::Value>> {
fn source(&self, db: &dyn DefDatabase) -> InFile<Self::Value> {
let id = self.item_tree_id(); let id = self.item_tree_id();
let file_id = id.file_id(); let file_id = id.file_id();
let tree = id.item_tree(db); let tree = id.item_tree(db);
let ast_id_map = db.ast_id_map(file_id); let ast_id_map = db.ast_id_map(file_id);
let root = db.parse_or_expand(file_id);
let node = &tree[id.value]; let node = &tree[id.value];
InFile::new(file_id, ast_id_map.get(node.ast_id()).to_node(&root)) InFile::new(file_id, ast_id_map.get(node.ast_id()))
} }
} }

View file

@ -7,7 +7,7 @@ use either::Either;
use intern::Interned; use intern::Interned;
use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct}; use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
use span::Span; use span::{Span, SyntaxContextId};
use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode}; use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode};
use triomphe::Arc; use triomphe::Arc;
@ -53,7 +53,7 @@ impl RawAttrs {
id, id,
input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))), input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
path: Interned::new(ModPath::from(crate::name!(doc))), path: Interned::new(ModPath::from(crate::name!(doc))),
span: span_map.span_for_range(comment.syntax().text_range()), ctxt: span_map.span_for_range(comment.syntax().text_range()).ctx,
}), }),
}); });
let entries: Arc<[Attr]> = Arc::from_iter(entries); let entries: Arc<[Attr]> = Arc::from_iter(entries);
@ -173,7 +173,7 @@ pub struct Attr {
pub id: AttrId, pub id: AttrId,
pub path: Interned<ModPath>, pub path: Interned<ModPath>,
pub input: Option<Interned<AttrInput>>, pub input: Option<Interned<AttrInput>>,
pub span: Span, pub ctxt: SyntaxContextId,
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -201,10 +201,12 @@ impl Attr {
span_map: SpanMapRef<'_>, span_map: SpanMapRef<'_>,
id: AttrId, id: AttrId,
) -> Option<Attr> { ) -> Option<Attr> {
let path = Interned::new(ModPath::from_src(db, ast.path()?, &mut |range| { let path = ast.path()?;
let range = path.syntax().text_range();
let path = Interned::new(ModPath::from_src(db, path, &mut |range| {
span_map.span_for_range(range).ctx span_map.span_for_range(range).ctx
})?); })?);
let span = span_map.span_for_range(ast.syntax().text_range()); let span = span_map.span_for_range(range);
let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() { let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
let value = match lit.kind() { let value = match lit.kind() {
ast::LiteralKind::String(string) => string.value()?.into(), ast::LiteralKind::String(string) => string.value()?.into(),
@ -217,11 +219,11 @@ impl Attr {
} else { } else {
None None
}; };
Some(Attr { id, path, input, span }) Some(Attr { id, path, input, ctxt: span.ctx })
} }
fn from_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree], id: AttrId) -> Option<Attr> { fn from_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree], id: AttrId) -> Option<Attr> {
let span = tt.first()?.first_span(); let ctxt = tt.first()?.first_span().ctx;
let path_end = tt let path_end = tt
.iter() .iter()
.position(|tt| { .position(|tt| {
@ -253,7 +255,7 @@ impl Attr {
} }
_ => None, _ => None,
}; };
Some(Attr { id, path, input, span }) Some(Attr { id, path, input, ctxt })
} }
pub fn path(&self) -> &ModPath { pub fn path(&self) -> &ModPath {

View file

@ -11,7 +11,7 @@ macro_rules! register_builtin {
} }
impl BuiltinAttrExpander { impl BuiltinAttrExpander {
pub fn expander(&self) -> fn (&dyn ExpandDatabase, MacroCallId, &tt::Subtree) -> ExpandResult<tt::Subtree> { pub fn expander(&self) -> fn (&dyn ExpandDatabase, MacroCallId, &tt::Subtree, Span) -> ExpandResult<tt::Subtree> {
match *self { match *self {
$( BuiltinAttrExpander::$variant => $expand, )* $( BuiltinAttrExpander::$variant => $expand, )*
} }
@ -34,8 +34,9 @@ impl BuiltinAttrExpander {
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
self.expander()(db, id, tt) self.expander()(db, id, tt, span)
} }
pub fn is_derive(self) -> bool { pub fn is_derive(self) -> bool {
@ -71,6 +72,7 @@ fn dummy_attr_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_id: MacroCallId, _id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
_span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
ExpandResult::ok(tt.clone()) ExpandResult::ok(tt.clone())
} }
@ -100,6 +102,7 @@ fn derive_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(id); let loc = db.lookup_intern_macro_call(id);
let derives = match &loc.kind { let derives = match &loc.kind {
@ -107,17 +110,14 @@ fn derive_expand(
attr_args attr_args
} }
_ => { _ => {
return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan { return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan { open: span, close: span }))
open: loc.call_site,
close: loc.call_site,
}))
} }
}; };
pseudo_derive_attr_expansion(tt, derives, loc.call_site) pseudo_derive_attr_expansion(tt, derives, span)
} }
pub fn pseudo_derive_attr_expansion( pub fn pseudo_derive_attr_expansion(
tt: &tt::Subtree, _: &tt::Subtree,
args: &tt::Subtree, args: &tt::Subtree,
call_site: Span, call_site: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
@ -141,7 +141,7 @@ pub fn pseudo_derive_attr_expansion(
token_trees.push(mk_leaf(']')); token_trees.push(mk_leaf(']'));
} }
ExpandResult::ok(tt::Subtree { ExpandResult::ok(tt::Subtree {
delimiter: tt.delimiter, delimiter: args.delimiter,
token_trees: token_trees.into_boxed_slice(), token_trees: token_trees.into_boxed_slice(),
}) })
} }

View file

@ -50,8 +50,8 @@ impl BuiltinDeriveExpander {
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let span = db.lookup_intern_macro_call(id).call_site;
let span = span_with_def_site_ctxt(db, span, id); let span = span_with_def_site_ctxt(db, span, id);
self.expander()(span, tt) self.expander()(span, tt)
} }

View file

@ -19,14 +19,14 @@ use crate::{
}; };
macro_rules! register_builtin { macro_rules! register_builtin {
( LAZY: $(($name:ident, $kind: ident) => $expand:ident),* , EAGER: $(($e_name:ident, $e_kind: ident) => $e_expand:ident),* ) => { ( $LAZY:ident: $(($name:ident, $kind: ident) => $expand:ident),* , $EAGER:ident: $(($e_name:ident, $e_kind: ident) => $e_expand:ident),* ) => {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum BuiltinFnLikeExpander { pub enum $LAZY {
$($kind),* $($kind),*
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum EagerExpander { pub enum $EAGER {
$($e_kind),* $($e_kind),*
} }
@ -62,8 +62,8 @@ impl BuiltinFnLikeExpander {
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let span = db.lookup_intern_macro_call(id).call_site;
let span = span_with_def_site_ctxt(db, span, id); let span = span_with_def_site_ctxt(db, span, id);
self.expander()(db, id, tt, span) self.expander()(db, id, tt, span)
} }
@ -75,8 +75,8 @@ impl EagerExpander {
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let span = db.lookup_intern_macro_call(id).call_site;
let span = span_with_def_site_ctxt(db, span, id); let span = span_with_def_site_ctxt(db, span, id);
self.expander()(db, id, tt, span) self.expander()(db, id, tt, span)
} }
@ -84,6 +84,17 @@ impl EagerExpander {
pub fn is_include(&self) -> bool { pub fn is_include(&self) -> bool {
matches!(self, EagerExpander::Include) matches!(self, EagerExpander::Include)
} }
pub fn is_include_like(&self) -> bool {
matches!(
self,
EagerExpander::Include | EagerExpander::IncludeStr | EagerExpander::IncludeBytes
)
}
pub fn is_env_or_option_env(&self) -> bool {
matches!(self, EagerExpander::Env | EagerExpander::OptionEnv)
}
} }
pub fn find_builtin_macro( pub fn find_builtin_macro(
@ -93,7 +104,7 @@ pub fn find_builtin_macro(
} }
register_builtin! { register_builtin! {
LAZY: BuiltinFnLikeExpander:
(column, Column) => line_expand, (column, Column) => line_expand,
(file, File) => file_expand, (file, File) => file_expand,
(line, Line) => line_expand, (line, Line) => line_expand,
@ -114,7 +125,7 @@ register_builtin! {
(format_args_nl, FormatArgsNl) => format_args_nl_expand, (format_args_nl, FormatArgsNl) => format_args_nl_expand,
(quote, Quote) => quote_expand, (quote, Quote) => quote_expand,
EAGER: EagerExpander:
(compile_error, CompileError) => compile_error_expand, (compile_error, CompileError) => compile_error_expand,
(concat, Concat) => concat_expand, (concat, Concat) => concat_expand,
(concat_idents, ConcatIdents) => concat_idents_expand, (concat_idents, ConcatIdents) => concat_idents_expand,
@ -426,22 +437,25 @@ fn use_panic_2021(db: &dyn ExpandDatabase, span: Span) -> bool {
} }
} }
fn unquote_str(lit: &tt::Literal) -> Option<String> { fn unquote_str(lit: &tt::Literal) -> Option<(String, Span)> {
let span = lit.span;
let lit = ast::make::tokens::literal(&lit.to_string()); let lit = ast::make::tokens::literal(&lit.to_string());
let token = ast::String::cast(lit)?; let token = ast::String::cast(lit)?;
token.value().map(|it| it.into_owned()) token.value().map(|it| (it.into_owned(), span))
} }
fn unquote_char(lit: &tt::Literal) -> Option<char> { fn unquote_char(lit: &tt::Literal) -> Option<(char, Span)> {
let span = lit.span;
let lit = ast::make::tokens::literal(&lit.to_string()); let lit = ast::make::tokens::literal(&lit.to_string());
let token = ast::Char::cast(lit)?; let token = ast::Char::cast(lit)?;
token.value() token.value().zip(Some(span))
} }
fn unquote_byte_string(lit: &tt::Literal) -> Option<Vec<u8>> { fn unquote_byte_string(lit: &tt::Literal) -> Option<(Vec<u8>, Span)> {
let span = lit.span;
let lit = ast::make::tokens::literal(&lit.to_string()); let lit = ast::make::tokens::literal(&lit.to_string());
let token = ast::ByteString::cast(lit)?; let token = ast::ByteString::cast(lit)?;
token.value().map(|it| it.into_owned()) token.value().map(|it| (it.into_owned(), span))
} }
fn compile_error_expand( fn compile_error_expand(
@ -452,7 +466,7 @@ fn compile_error_expand(
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let err = match &*tt.token_trees { let err = match &*tt.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) { [tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) {
Some(unquoted) => ExpandError::other(unquoted.into_boxed_str()), Some((unquoted, _)) => ExpandError::other(unquoted.into_boxed_str()),
None => ExpandError::other("`compile_error!` argument must be a string"), None => ExpandError::other("`compile_error!` argument must be a string"),
}, },
_ => ExpandError::other("`compile_error!` argument must be a string"), _ => ExpandError::other("`compile_error!` argument must be a string"),
@ -465,10 +479,16 @@ fn concat_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_arg_id: MacroCallId, _arg_id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: Span, _: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let mut err = None; let mut err = None;
let mut text = String::new(); let mut text = String::new();
let mut span: Option<Span> = None;
let mut record_span = |s: Span| match &mut span {
Some(span) if span.anchor == s.anchor => span.range = span.range.cover(s.range),
Some(_) => (),
None => span = Some(s),
};
for (i, mut t) in tt.token_trees.iter().enumerate() { for (i, mut t) in tt.token_trees.iter().enumerate() {
// FIXME: hack on top of a hack: `$e:expr` captures get surrounded in parentheses // FIXME: hack on top of a hack: `$e:expr` captures get surrounded in parentheses
// to ensure the right parsing order, so skip the parentheses here. Ideally we'd // to ensure the right parsing order, so skip the parentheses here. Ideally we'd
@ -486,11 +506,14 @@ fn concat_expand(
// concat works with string and char literals, so remove any quotes. // concat works with string and char literals, so remove any quotes.
// It also works with integer, float and boolean literals, so just use the rest // It also works with integer, float and boolean literals, so just use the rest
// as-is. // as-is.
if let Some(c) = unquote_char(it) { if let Some((c, span)) = unquote_char(it) {
text.push(c); text.push(c);
record_span(span);
} else { } else {
let component = unquote_str(it).unwrap_or_else(|| it.text.to_string()); let (component, span) =
unquote_str(it).unwrap_or_else(|| (it.text.to_string(), it.span));
text.push_str(&component); text.push_str(&component);
record_span(span);
} }
} }
// handle boolean literals // handle boolean literals
@ -498,6 +521,7 @@ fn concat_expand(
if i % 2 == 0 && (id.text == "true" || id.text == "false") => if i % 2 == 0 && (id.text == "true" || id.text == "false") =>
{ {
text.push_str(id.text.as_str()); text.push_str(id.text.as_str());
record_span(id.span);
} }
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (), tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
_ => { _ => {
@ -505,6 +529,7 @@ fn concat_expand(
} }
} }
} }
let span = span.unwrap_or(tt.delimiter.open);
ExpandResult { value: quote!(span =>#text), err } ExpandResult { value: quote!(span =>#text), err }
} }
@ -512,18 +537,25 @@ fn concat_bytes_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_arg_id: MacroCallId, _arg_id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: Span, call_site: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let mut bytes = Vec::new(); let mut bytes = Vec::new();
let mut err = None; let mut err = None;
let mut span: Option<Span> = None;
let mut record_span = |s: Span| match &mut span {
Some(span) if span.anchor == s.anchor => span.range = span.range.cover(s.range),
Some(_) => (),
None => span = Some(s),
};
for (i, t) in tt.token_trees.iter().enumerate() { for (i, t) in tt.token_trees.iter().enumerate() {
match t { match t {
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
let token = ast::make::tokens::literal(&lit.to_string()); let token = ast::make::tokens::literal(&lit.to_string());
record_span(lit.span);
match token.kind() { match token.kind() {
syntax::SyntaxKind::BYTE => bytes.push(token.text().to_owned()), syntax::SyntaxKind::BYTE => bytes.push(token.text().to_owned()),
syntax::SyntaxKind::BYTE_STRING => { syntax::SyntaxKind::BYTE_STRING => {
let components = unquote_byte_string(lit).unwrap_or_default(); let components = unquote_byte_string(lit).map_or(vec![], |(it, _)| it);
components.into_iter().for_each(|it| bytes.push(it.to_string())); components.into_iter().for_each(|it| bytes.push(it.to_string()));
} }
_ => { _ => {
@ -534,7 +566,7 @@ fn concat_bytes_expand(
} }
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (), tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
tt::TokenTree::Subtree(tree) if tree.delimiter.kind == tt::DelimiterKind::Bracket => { tt::TokenTree::Subtree(tree) if tree.delimiter.kind == tt::DelimiterKind::Bracket => {
if let Err(e) = concat_bytes_expand_subtree(tree, &mut bytes) { if let Err(e) = concat_bytes_expand_subtree(tree, &mut bytes, &mut record_span) {
err.get_or_insert(e); err.get_or_insert(e);
break; break;
} }
@ -546,17 +578,24 @@ fn concat_bytes_expand(
} }
} }
let value = tt::Subtree { let value = tt::Subtree {
delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket }, delimiter: tt::Delimiter {
open: call_site,
close: call_site,
kind: tt::DelimiterKind::Bracket,
},
token_trees: { token_trees: {
Itertools::intersperse_with( Itertools::intersperse_with(
bytes.into_iter().map(|it| { bytes.into_iter().map(|it| {
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: it.into(), span })) tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text: it.into(),
span: span.unwrap_or(call_site),
}))
}), }),
|| { || {
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char: ',', char: ',',
spacing: tt::Spacing::Alone, spacing: tt::Spacing::Alone,
span, span: call_site,
})) }))
}, },
) )
@ -569,13 +608,15 @@ fn concat_bytes_expand(
fn concat_bytes_expand_subtree( fn concat_bytes_expand_subtree(
tree: &tt::Subtree, tree: &tt::Subtree,
bytes: &mut Vec<String>, bytes: &mut Vec<String>,
mut record_span: impl FnMut(Span),
) -> Result<(), ExpandError> { ) -> Result<(), ExpandError> {
for (ti, tt) in tree.token_trees.iter().enumerate() { for (ti, tt) in tree.token_trees.iter().enumerate() {
match tt { match tt {
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => {
let lit = ast::make::tokens::literal(&lit.to_string()); let lit = ast::make::tokens::literal(&it.to_string());
match lit.kind() { match lit.kind() {
syntax::SyntaxKind::BYTE | syntax::SyntaxKind::INT_NUMBER => { syntax::SyntaxKind::BYTE | syntax::SyntaxKind::INT_NUMBER => {
record_span(it.span);
bytes.push(lit.text().to_owned()) bytes.push(lit.text().to_owned())
} }
_ => { _ => {
@ -635,7 +676,7 @@ fn relative_file(
} }
} }
fn parse_string(tt: &tt::Subtree) -> Result<String, ExpandError> { fn parse_string(tt: &tt::Subtree) -> Result<(String, Span), ExpandError> {
tt.token_trees tt.token_trees
.first() .first()
.and_then(|tt| match tt { .and_then(|tt| match tt {
@ -675,7 +716,7 @@ pub fn include_input_to_file_id(
arg_id: MacroCallId, arg_id: MacroCallId,
arg: &tt::Subtree, arg: &tt::Subtree,
) -> Result<FileId, ExpandError> { ) -> Result<FileId, ExpandError> {
relative_file(db, arg_id, &parse_string(arg)?, false) relative_file(db, arg_id, &parse_string(arg)?.0, false)
} }
fn include_bytes_expand( fn include_bytes_expand(
@ -701,7 +742,7 @@ fn include_str_expand(
tt: &tt::Subtree, tt: &tt::Subtree,
span: Span, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let path = match parse_string(tt) { let (path, span) = match parse_string(tt) {
Ok(it) => it, Ok(it) => it,
Err(e) => { Err(e) => {
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e) return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
@ -736,7 +777,7 @@ fn env_expand(
tt: &tt::Subtree, tt: &tt::Subtree,
span: Span, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let key = match parse_string(tt) { let (key, span) = match parse_string(tt) {
Ok(it) => it, Ok(it) => it,
Err(e) => { Err(e) => {
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e) return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
@ -766,18 +807,24 @@ fn option_env_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
arg_id: MacroCallId, arg_id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: Span, call_site: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let key = match parse_string(tt) { let (key, span) = match parse_string(tt) {
Ok(it) => it, Ok(it) => it,
Err(e) => { Err(e) => {
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e) return ExpandResult::new(
tt::Subtree::empty(DelimSpan { open: call_site, close: call_site }),
e,
)
} }
}; };
let dollar_crate = dollar_crate(span); let dollar_crate = dollar_crate(call_site);
let expanded = match get_env_inner(db, arg_id, &key) { let expanded = match get_env_inner(db, arg_id, &key) {
None => quote! {span => #dollar_crate::option::Option::None::<&str> }, None => quote! {call_site => #dollar_crate::option::Option::None::<&str> },
Some(s) => quote! {span => #dollar_crate::option::Option::Some(#s) }, Some(s) => {
let s = quote! (span => #s);
quote! {call_site => #dollar_crate::option::Option::Some(#s) }
}
}; };
ExpandResult::ok(expanded) ExpandResult::ok(expanded)

View file

@ -0,0 +1,327 @@
//! Processes out #[cfg] and #[cfg_attr] attributes from the input for the derive macro
use std::iter::Peekable;
use cfg::{CfgAtom, CfgExpr};
use rustc_hash::FxHashSet;
use syntax::{
ast::{self, Attr, HasAttrs, Meta, VariantList},
AstNode, NodeOrToken, SyntaxElement, SyntaxNode, T,
};
use tracing::{debug, warn};
use tt::SmolStr;
use crate::{db::ExpandDatabase, MacroCallKind, MacroCallLoc};
fn check_cfg_attr(attr: &Attr, loc: &MacroCallLoc, db: &dyn ExpandDatabase) -> Option<bool> {
if !attr.simple_name().as_deref().map(|v| v == "cfg")? {
return None;
}
debug!("Evaluating cfg {}", attr);
let cfg = parse_from_attr_meta(attr.meta()?)?;
debug!("Checking cfg {:?}", cfg);
let enabled = db.crate_graph()[loc.krate].cfg_options.check(&cfg) != Some(false);
Some(enabled)
}
fn check_cfg_attr_attr(attr: &Attr, loc: &MacroCallLoc, db: &dyn ExpandDatabase) -> Option<bool> {
if !attr.simple_name().as_deref().map(|v| v == "cfg_attr")? {
return None;
}
debug!("Evaluating cfg_attr {}", attr);
let cfg_expr = parse_from_attr_meta(attr.meta()?)?;
debug!("Checking cfg_attr {:?}", cfg_expr);
let enabled = db.crate_graph()[loc.krate].cfg_options.check(&cfg_expr) != Some(false);
Some(enabled)
}
fn process_has_attrs_with_possible_comma<I: HasAttrs>(
items: impl Iterator<Item = I>,
loc: &MacroCallLoc,
db: &dyn ExpandDatabase,
remove: &mut FxHashSet<SyntaxElement>,
) -> Option<()> {
for item in items {
let field_attrs = item.attrs();
'attrs: for attr in field_attrs {
if check_cfg_attr(&attr, loc, db).map(|enabled| !enabled).unwrap_or_default() {
debug!("censoring type {:?}", item.syntax());
remove.insert(item.syntax().clone().into());
// We need to remove the , as well
remove_possible_comma(&item, remove);
break 'attrs;
}
if let Some(enabled) = check_cfg_attr_attr(&attr, loc, db) {
if enabled {
debug!("Removing cfg_attr tokens {:?}", attr);
let meta = attr.meta()?;
let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
remove.extend(removes_from_cfg_attr);
} else {
debug!("censoring type cfg_attr {:?}", item.syntax());
remove.insert(attr.syntax().clone().into());
continue;
}
}
}
}
Some(())
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
enum CfgExprStage {
/// Stripping the CFGExpr part of the attribute
StrippigCfgExpr,
/// Found the comma after the CFGExpr. Will keep all tokens until the next comma or the end of the attribute
FoundComma,
/// Everything following the attribute. This could be another attribute or the end of the attribute.
// FIXME: cfg_attr with multiple attributes will not be handled correctly. We will only keep the first attribute
// Related Issue: https://github.com/rust-lang/rust-analyzer/issues/10110
EverythingElse,
}
/// This function creates its own set of tokens to remove. To help prevent malformed syntax as input.
fn remove_tokens_within_cfg_attr(meta: Meta) -> Option<FxHashSet<SyntaxElement>> {
let mut remove: FxHashSet<SyntaxElement> = FxHashSet::default();
debug!("Enabling attribute {}", meta);
let meta_path = meta.path()?;
debug!("Removing {:?}", meta_path.syntax());
remove.insert(meta_path.syntax().clone().into());
let meta_tt = meta.token_tree()?;
debug!("meta_tt {}", meta_tt);
let mut stage = CfgExprStage::StrippigCfgExpr;
for tt in meta_tt.token_trees_and_tokens() {
debug!("Checking {:?}. Stage: {:?}", tt, stage);
match (stage, tt) {
(CfgExprStage::StrippigCfgExpr, syntax::NodeOrToken::Node(node)) => {
remove.insert(node.syntax().clone().into());
}
(CfgExprStage::StrippigCfgExpr, syntax::NodeOrToken::Token(token)) => {
if token.kind() == T![,] {
stage = CfgExprStage::FoundComma;
}
remove.insert(token.into());
}
(CfgExprStage::FoundComma, syntax::NodeOrToken::Token(token))
if (token.kind() == T![,] || token.kind() == T![')']) =>
{
// The end of the attribute or separator for the next attribute
stage = CfgExprStage::EverythingElse;
remove.insert(token.into());
}
(CfgExprStage::EverythingElse, syntax::NodeOrToken::Node(node)) => {
remove.insert(node.syntax().clone().into());
}
(CfgExprStage::EverythingElse, syntax::NodeOrToken::Token(token)) => {
remove.insert(token.into());
}
// This is an actual attribute
_ => {}
}
}
if stage != CfgExprStage::EverythingElse {
warn!("Invalid cfg_attr attribute. {:?}", meta_tt);
return None;
}
Some(remove)
}
/// Removes a possible comma after the [AstNode]
fn remove_possible_comma(item: &impl AstNode, res: &mut FxHashSet<SyntaxElement>) {
if let Some(comma) = item.syntax().next_sibling_or_token().filter(|it| it.kind() == T![,]) {
res.insert(comma);
}
}
fn process_enum(
variants: VariantList,
loc: &MacroCallLoc,
db: &dyn ExpandDatabase,
remove: &mut FxHashSet<SyntaxElement>,
) -> Option<()> {
'variant: for variant in variants.variants() {
for attr in variant.attrs() {
if check_cfg_attr(&attr, loc, db).map(|enabled| !enabled).unwrap_or_default() {
// Rustc does not strip the attribute if it is enabled. So we will will leave it
debug!("censoring type {:?}", variant.syntax());
remove.insert(variant.syntax().clone().into());
// We need to remove the , as well
remove_possible_comma(&variant, remove);
continue 'variant;
};
if let Some(enabled) = check_cfg_attr_attr(&attr, loc, db) {
if enabled {
debug!("Removing cfg_attr tokens {:?}", attr);
let meta = attr.meta()?;
let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
remove.extend(removes_from_cfg_attr);
} else {
debug!("censoring type cfg_attr {:?}", variant.syntax());
remove.insert(attr.syntax().clone().into());
continue;
}
}
}
if let Some(fields) = variant.field_list() {
match fields {
ast::FieldList::RecordFieldList(fields) => {
process_has_attrs_with_possible_comma(fields.fields(), loc, db, remove)?;
}
ast::FieldList::TupleFieldList(fields) => {
process_has_attrs_with_possible_comma(fields.fields(), loc, db, remove)?;
}
}
}
}
Some(())
}
pub(crate) fn process_cfg_attrs(
node: &SyntaxNode,
loc: &MacroCallLoc,
db: &dyn ExpandDatabase,
) -> Option<FxHashSet<SyntaxElement>> {
// FIXME: #[cfg_eval] is not implemented. But it is not stable yet
if !matches!(loc.kind, MacroCallKind::Derive { .. }) {
return None;
}
let mut remove = FxHashSet::default();
let item = ast::Item::cast(node.clone())?;
for attr in item.attrs() {
if let Some(enabled) = check_cfg_attr_attr(&attr, loc, db) {
if enabled {
debug!("Removing cfg_attr tokens {:?}", attr);
let meta = attr.meta()?;
let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
remove.extend(removes_from_cfg_attr);
} else {
debug!("censoring type cfg_attr {:?}", item.syntax());
remove.insert(attr.syntax().clone().into());
continue;
}
}
}
match item {
ast::Item::Struct(it) => match it.field_list()? {
ast::FieldList::RecordFieldList(fields) => {
process_has_attrs_with_possible_comma(fields.fields(), loc, db, &mut remove)?;
}
ast::FieldList::TupleFieldList(fields) => {
process_has_attrs_with_possible_comma(fields.fields(), loc, db, &mut remove)?;
}
},
ast::Item::Enum(it) => {
process_enum(it.variant_list()?, loc, db, &mut remove)?;
}
ast::Item::Union(it) => {
process_has_attrs_with_possible_comma(
it.record_field_list()?.fields(),
loc,
db,
&mut remove,
)?;
}
// FIXME: Implement for other items if necessary. As we do not support #[cfg_eval] yet, we do not need to implement it for now
_ => {}
}
Some(remove)
}
/// Parses a `cfg` attribute from the meta
fn parse_from_attr_meta(meta: Meta) -> Option<CfgExpr> {
let tt = meta.token_tree()?;
let mut iter = tt.token_trees_and_tokens().skip(1).peekable();
next_cfg_expr_from_syntax(&mut iter)
}
fn next_cfg_expr_from_syntax<I>(iter: &mut Peekable<I>) -> Option<CfgExpr>
where
I: Iterator<Item = NodeOrToken<ast::TokenTree, syntax::SyntaxToken>>,
{
let name = match iter.next() {
None => return None,
Some(NodeOrToken::Token(element)) => match element.kind() {
syntax::T![ident] => SmolStr::new(element.text()),
_ => return Some(CfgExpr::Invalid),
},
Some(_) => return Some(CfgExpr::Invalid),
};
let result = match name.as_str() {
"all" | "any" | "not" => {
let mut preds = Vec::new();
let Some(NodeOrToken::Node(tree)) = iter.next() else {
return Some(CfgExpr::Invalid);
};
let mut tree_iter = tree.token_trees_and_tokens().skip(1).peekable();
while tree_iter
.peek()
.filter(
|element| matches!(element, NodeOrToken::Token(token) if (token.kind() != syntax::T![')'])),
)
.is_some()
{
let pred = next_cfg_expr_from_syntax(&mut tree_iter);
if let Some(pred) = pred {
preds.push(pred);
}
}
let group = match name.as_str() {
"all" => CfgExpr::All(preds),
"any" => CfgExpr::Any(preds),
"not" => CfgExpr::Not(Box::new(preds.pop().unwrap_or(CfgExpr::Invalid))),
_ => unreachable!(),
};
Some(group)
}
_ => match iter.peek() {
Some(NodeOrToken::Token(element)) if (element.kind() == syntax::T![=]) => {
iter.next();
match iter.next() {
Some(NodeOrToken::Token(value_token))
if (value_token.kind() == syntax::SyntaxKind::STRING) =>
{
let value = value_token.text();
let value = SmolStr::new(value.trim_matches('"'));
Some(CfgExpr::Atom(CfgAtom::KeyValue { key: name, value }))
}
_ => None,
}
}
_ => Some(CfgExpr::Atom(CfgAtom::Flag(name))),
},
};
if let Some(NodeOrToken::Token(element)) = iter.peek() {
if element.kind() == syntax::T![,] {
iter.next();
}
}
result
}
#[cfg(test)]
mod tests {
use cfg::DnfExpr;
use expect_test::{expect, Expect};
use syntax::{ast::Attr, AstNode, SourceFile};
use crate::cfg_process::parse_from_attr_meta;
fn check_dnf_from_syntax(input: &str, expect: Expect) {
let parse = SourceFile::parse(input);
let node = match parse.tree().syntax().descendants().find_map(Attr::cast) {
Some(it) => it,
None => {
let node = std::any::type_name::<Attr>();
panic!("Failed to make ast node `{node}` from text {input}")
}
};
let node = node.clone_subtree();
assert_eq!(node.syntax().text_range().start(), 0.into());
let cfg = parse_from_attr_meta(node.meta().unwrap()).unwrap();
let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
expect.assert_eq(&actual);
}
#[test]
fn cfg_from_attr() {
check_dnf_from_syntax(r#"#[cfg(test)]"#, expect![[r#"#![cfg(test)]"#]]);
check_dnf_from_syntax(r#"#[cfg(not(never))]"#, expect![[r#"#![cfg(not(never))]"#]]);
}
}

View file

@ -48,7 +48,7 @@ impl ChangeWithProcMacros {
} }
} }
pub fn change_file(&mut self, file_id: FileId, new_text: Option<Arc<str>>) { pub fn change_file(&mut self, file_id: FileId, new_text: Option<String>) {
self.source_change.change_file(file_id, new_text) self.source_change.change_file(file_id, new_text)
} }

View file

@ -3,21 +3,19 @@
use base_db::{salsa, CrateId, FileId, SourceDatabase}; use base_db::{salsa, CrateId, FileId, SourceDatabase};
use either::Either; use either::Either;
use limit::Limit; use limit::Limit;
use mbe::{syntax_node_to_token_tree, ValueResult}; use mbe::syntax_node_to_token_tree;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use span::{AstIdMap, SyntaxContextData, SyntaxContextId}; use span::{AstIdMap, Span, SyntaxContextData, SyntaxContextId};
use syntax::{ use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T};
ast::{self, HasAttrs},
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
};
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
attrs::collect_attrs, attrs::{collect_attrs, AttrId},
builtin_attr_macro::pseudo_derive_attr_expansion, builtin_attr_macro::pseudo_derive_attr_expansion,
builtin_fn_macro::EagerExpander, builtin_fn_macro::EagerExpander,
cfg_process,
declarative::DeclarativeMacroExpander, declarative::DeclarativeMacroExpander,
fixup::{self, reverse_fixups, SyntaxFixupUndoInfo}, fixup::{self, SyntaxFixupUndoInfo},
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt}, hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt},
proc_macro::ProcMacros, proc_macro::ProcMacros,
span_map::{RealSpanMap, SpanMap, SpanMapRef}, span_map::{RealSpanMap, SpanMap, SpanMapRef},
@ -100,10 +98,7 @@ pub trait ExpandDatabase: SourceDatabase {
/// Lowers syntactic macro call to a token tree representation. That's a firewall /// Lowers syntactic macro call to a token tree representation. That's a firewall
/// query, only typing in the macro call itself changes the returned /// query, only typing in the macro call itself changes the returned
/// subtree. /// subtree.
fn macro_arg( fn macro_arg(&self, id: MacroCallId) -> (Arc<tt::Subtree>, SyntaxFixupUndoInfo, Span);
&self,
id: MacroCallId,
) -> ValueResult<(Arc<tt::Subtree>, SyntaxFixupUndoInfo), Arc<Box<[SyntaxError]>>>;
/// Fetches the expander for this macro. /// Fetches the expander for this macro.
#[salsa::transparent] #[salsa::transparent]
#[salsa::invoke(TokenExpander::macro_expander)] #[salsa::invoke(TokenExpander::macro_expander)]
@ -120,6 +115,12 @@ pub trait ExpandDatabase: SourceDatabase {
/// non-determinism breaks salsa in a very, very, very bad way. /// non-determinism breaks salsa in a very, very, very bad way.
/// @edwin0cheng heroically debugged this once! See #4315 for details /// @edwin0cheng heroically debugged this once! See #4315 for details
fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>; fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
/// Retrieves the span to be used for a proc-macro expansions spans.
/// This is a firewall query as it requires parsing the file, which we don't want proc-macros to
/// directly depend on as that would cause to frequent invalidations, mainly because of the
/// parse queries being LRU cached. If they weren't the invalidations would only happen if the
/// user wrote in the file that defines the proc-macro.
fn proc_macro_span(&self, fun: AstId<ast::Fn>) -> Span;
/// Firewall query that returns the errors from the `parse_macro_expansion` query. /// Firewall query that returns the errors from the `parse_macro_expansion` query.
fn parse_macro_expansion_error( fn parse_macro_expansion_error(
&self, &self,
@ -139,30 +140,50 @@ pub fn expand_speculative(
) -> Option<(SyntaxNode, SyntaxToken)> { ) -> Option<(SyntaxNode, SyntaxToken)> {
let loc = db.lookup_intern_macro_call(actual_macro_call); let loc = db.lookup_intern_macro_call(actual_macro_call);
// FIXME: This BOGUS here is dangerous once the proc-macro server can call back into the database!
let span_map = RealSpanMap::absolute(FileId::BOGUS); let span_map = RealSpanMap::absolute(FileId::BOGUS);
let span_map = SpanMapRef::RealSpanMap(&span_map); let span_map = SpanMapRef::RealSpanMap(&span_map);
let (_, _, span) = db.macro_arg(actual_macro_call);
// Build the subtree and token mapping for the speculative args // Build the subtree and token mapping for the speculative args
let (mut tt, undo_info) = match loc.kind { let (mut tt, undo_info) = match loc.kind {
MacroCallKind::FnLike { .. } => ( MacroCallKind::FnLike { .. } => (
mbe::syntax_node_to_token_tree(speculative_args, span_map, loc.call_site), mbe::syntax_node_to_token_tree(speculative_args, span_map, span),
SyntaxFixupUndoInfo::NONE, SyntaxFixupUndoInfo::NONE,
), ),
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => { MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => (
let censor = censor_for_macro_input(&loc, speculative_args); mbe::syntax_node_to_token_tree(speculative_args, span_map, span),
let mut fixups = fixup::fixup_syntax(span_map, speculative_args, loc.call_site); SyntaxFixupUndoInfo::NONE,
),
MacroCallKind::Derive { derive_attr_index: index, .. }
| MacroCallKind::Attr { invoc_attr_index: index, .. } => {
let censor = if let MacroCallKind::Derive { .. } = loc.kind {
censor_derive_input(index, &ast::Adt::cast(speculative_args.clone())?)
} else {
attr_source(index, &ast::Item::cast(speculative_args.clone())?)
.into_iter()
.map(|it| it.syntax().clone().into())
.collect()
};
let censor_cfg =
cfg_process::process_cfg_attrs(speculative_args, &loc, db).unwrap_or_default();
let mut fixups = fixup::fixup_syntax(span_map, speculative_args, span);
fixups.append.retain(|it, _| match it { fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Node(it) => !censor.contains(it),
syntax::NodeOrToken::Token(_) => true, syntax::NodeOrToken::Token(_) => true,
it => !censor.contains(it) && !censor_cfg.contains(it),
}); });
fixups.remove.extend(censor); fixups.remove.extend(censor);
fixups.remove.extend(censor_cfg);
( (
mbe::syntax_node_to_token_tree_modified( mbe::syntax_node_to_token_tree_modified(
speculative_args, speculative_args,
span_map, span_map,
fixups.append, fixups.append,
fixups.remove, fixups.remove,
loc.call_site, span,
), ),
fixups.undo_info, fixups.undo_info,
) )
@ -184,9 +205,8 @@ pub fn expand_speculative(
}?; }?;
match attr.token_tree() { match attr.token_tree() {
Some(token_tree) => { Some(token_tree) => {
let mut tree = let mut tree = syntax_node_to_token_tree(token_tree.syntax(), span_map, span);
syntax_node_to_token_tree(token_tree.syntax(), span_map, loc.call_site); tree.delimiter = tt::Delimiter::invisible_spanned(span);
tree.delimiter = tt::Delimiter::invisible_spanned(loc.call_site);
Some(tree) Some(tree)
} }
@ -199,36 +219,36 @@ pub fn expand_speculative(
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args // Do the actual expansion, we need to directly expand the proc macro due to the attribute args
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead. // Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
let mut speculative_expansion = match loc.def.kind { let mut speculative_expansion = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => { MacroDefKind::ProcMacro(expander, _, ast) => {
tt.delimiter = tt::Delimiter::invisible_spanned(loc.call_site); let span = db.proc_macro_span(ast);
tt.delimiter = tt::Delimiter::invisible_spanned(span);
expander.expand( expander.expand(
db, db,
loc.def.krate, loc.def.krate,
loc.krate, loc.krate,
&tt, &tt,
attr_arg.as_ref(), attr_arg.as_ref(),
span_with_def_site_ctxt(db, loc.def.span, actual_macro_call), span_with_def_site_ctxt(db, span, actual_macro_call),
span_with_call_site_ctxt(db, loc.def.span, actual_macro_call), span_with_call_site_ctxt(db, span, actual_macro_call),
span_with_mixed_site_ctxt(db, loc.def.span, actual_macro_call), span_with_mixed_site_ctxt(db, span, actual_macro_call),
) )
} }
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => { MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, loc.call_site) pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span)
}
MacroDefKind::Declarative(it) => {
db.decl_macro_expander(loc.krate, it).expand_unhygienic(db, tt, loc.def.krate, span)
}
MacroDefKind::BuiltIn(it, _) => {
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
} }
MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand_unhygienic(
db,
tt,
loc.def.krate,
loc.call_site,
),
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
MacroDefKind::BuiltInDerive(it, ..) => { MacroDefKind::BuiltInDerive(it, ..) => {
it.expand(db, actual_macro_call, &tt).map_err(Into::into) it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
} }
MacroDefKind::BuiltInEager(it, _) => { MacroDefKind::BuiltInEager(it, _) => {
it.expand(db, actual_macro_call, &tt).map_err(Into::into) it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
} }
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt), MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt, span),
}; };
let expand_to = loc.expand_to(); let expand_to = loc.expand_to();
@ -319,181 +339,161 @@ pub(crate) fn parse_with_map(
} }
} }
// FIXME: for derive attributes, this will return separate copies of the same structures! // FIXME: for derive attributes, this will return separate copies of the same structures! Though
// they may differ in spans due to differing call sites...
fn macro_arg( fn macro_arg(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
// FIXME: consider the following by putting fixup info into eager call info args ) -> (Arc<tt::Subtree>, SyntaxFixupUndoInfo, Span) {
// ) -> ValueResult<Arc<(tt::Subtree, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>> {
) -> ValueResult<(Arc<tt::Subtree>, SyntaxFixupUndoInfo), Arc<Box<[SyntaxError]>>> {
let loc = db.lookup_intern_macro_call(id); let loc = db.lookup_intern_macro_call(id);
if let Some(EagerCallInfo { arg, .. }) = matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
.then(|| loc.eager.as_deref()) if let MacroCallLoc {
.flatten() def: MacroDefId { kind: MacroDefKind::BuiltInEager(..), .. },
kind: MacroCallKind::FnLike { eager: Some(eager), .. },
..
} = &loc
{ {
ValueResult::ok((arg.clone(), SyntaxFixupUndoInfo::NONE)) return (eager.arg.clone(), SyntaxFixupUndoInfo::NONE, eager.span);
} else {
let (parse, map) = parse_with_map(db, loc.kind.file_id());
let root = parse.syntax_node();
let syntax = match loc.kind {
MacroCallKind::FnLike { ast_id, .. } => {
let dummy_tt = |kind| {
(
Arc::new(tt::Subtree {
delimiter: tt::Delimiter {
open: loc.call_site,
close: loc.call_site,
kind,
},
token_trees: Box::default(),
}),
SyntaxFixupUndoInfo::default(),
)
};
let node = &ast_id.to_ptr(db).to_node(&root);
let offset = node.syntax().text_range().start();
let Some(tt) = node.token_tree() else {
return ValueResult::new(
dummy_tt(tt::DelimiterKind::Invisible),
Arc::new(Box::new([SyntaxError::new_at_offset(
"missing token tree".to_owned(),
offset,
)])),
);
};
let first = tt.left_delimiter_token().map(|it| it.kind()).unwrap_or(T!['(']);
let last = tt.right_delimiter_token().map(|it| it.kind()).unwrap_or(T![.]);
let mismatched_delimiters = !matches!(
(first, last),
(T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}'])
);
if mismatched_delimiters {
// Don't expand malformed (unbalanced) macro invocations. This is
// less than ideal, but trying to expand unbalanced macro calls
// sometimes produces pathological, deeply nested code which breaks
// all kinds of things.
//
// So instead, we'll return an empty subtree here
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
let kind = match first {
_ if loc.def.is_proc_macro() => tt::DelimiterKind::Invisible,
T!['('] => tt::DelimiterKind::Parenthesis,
T!['['] => tt::DelimiterKind::Bracket,
T!['{'] => tt::DelimiterKind::Brace,
_ => tt::DelimiterKind::Invisible,
};
return ValueResult::new(
dummy_tt(kind),
Arc::new(Box::new([SyntaxError::new_at_offset(
"mismatched delimiters".to_owned(),
offset,
)])),
);
}
tt.syntax().clone()
}
MacroCallKind::Derive { ast_id, .. } => {
ast_id.to_ptr(db).to_node(&root).syntax().clone()
}
MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(),
};
let (mut tt, undo_info) = match loc.kind {
MacroCallKind::FnLike { .. } => (
mbe::syntax_node_to_token_tree(&syntax, map.as_ref(), loc.call_site),
SyntaxFixupUndoInfo::NONE,
),
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
let censor = censor_for_macro_input(&loc, &syntax);
let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax, loc.call_site);
fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Node(it) => !censor.contains(it),
syntax::NodeOrToken::Token(_) => true,
});
fixups.remove.extend(censor);
{
let mut tt = mbe::syntax_node_to_token_tree_modified(
&syntax,
map.as_ref(),
fixups.append.clone(),
fixups.remove.clone(),
loc.call_site,
);
reverse_fixups(&mut tt, &fixups.undo_info);
}
(
mbe::syntax_node_to_token_tree_modified(
&syntax,
map,
fixups.append,
fixups.remove,
loc.call_site,
),
fixups.undo_info,
)
}
};
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter.kind = tt::DelimiterKind::Invisible;
}
if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) {
match parse.errors() {
errors if errors.is_empty() => ValueResult::ok((Arc::new(tt), undo_info)),
errors => ValueResult::new(
(Arc::new(tt), undo_info),
// Box::<[_]>::from(res.errors()), not stable yet
Arc::new(errors.to_vec().into_boxed_slice()),
),
}
} else {
ValueResult::ok((Arc::new(tt), undo_info))
}
} }
let (parse, map) = parse_with_map(db, loc.kind.file_id());
let root = parse.syntax_node();
let (censor, item_node, span) = match loc.kind {
MacroCallKind::FnLike { ast_id, .. } => {
let node = &ast_id.to_ptr(db).to_node(&root);
let path_range = node
.path()
.map_or_else(|| node.syntax().text_range(), |path| path.syntax().text_range());
let span = map.span_for_range(path_range);
let dummy_tt = |kind| {
(
Arc::new(tt::Subtree {
delimiter: tt::Delimiter { open: span, close: span, kind },
token_trees: Box::default(),
}),
SyntaxFixupUndoInfo::default(),
span,
)
};
let Some(tt) = node.token_tree() else {
return dummy_tt(tt::DelimiterKind::Invisible);
};
let first = tt.left_delimiter_token().map(|it| it.kind()).unwrap_or(T!['(']);
let last = tt.right_delimiter_token().map(|it| it.kind()).unwrap_or(T![.]);
let mismatched_delimiters = !matches!(
(first, last),
(T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}'])
);
if mismatched_delimiters {
// Don't expand malformed (unbalanced) macro invocations. This is
// less than ideal, but trying to expand unbalanced macro calls
// sometimes produces pathological, deeply nested code which breaks
// all kinds of things.
//
// So instead, we'll return an empty subtree here
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
let kind = match first {
_ if loc.def.is_proc_macro() => tt::DelimiterKind::Invisible,
T!['('] => tt::DelimiterKind::Parenthesis,
T!['['] => tt::DelimiterKind::Bracket,
T!['{'] => tt::DelimiterKind::Brace,
_ => tt::DelimiterKind::Invisible,
};
return dummy_tt(kind);
}
let mut tt = mbe::syntax_node_to_token_tree(tt.syntax(), map.as_ref(), span);
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter.kind = tt::DelimiterKind::Invisible;
}
return (Arc::new(tt), SyntaxFixupUndoInfo::NONE, span);
}
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
let node = ast_id.to_ptr(db).to_node(&root);
let censor_derive_input = censor_derive_input(derive_attr_index, &node);
let item_node = node.into();
let attr_source = attr_source(derive_attr_index, &item_node);
// FIXME: This is wrong, this should point to the path of the derive attribute`
let span =
map.span_for_range(attr_source.as_ref().and_then(|it| it.path()).map_or_else(
|| item_node.syntax().text_range(),
|it| it.syntax().text_range(),
));
(censor_derive_input, item_node, span)
}
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
let node = ast_id.to_ptr(db).to_node(&root);
let attr_source = attr_source(invoc_attr_index, &node);
let span = map.span_for_range(
attr_source
.as_ref()
.and_then(|it| it.path())
.map_or_else(|| node.syntax().text_range(), |it| it.syntax().text_range()),
);
(attr_source.into_iter().map(|it| it.syntax().clone().into()).collect(), node, span)
}
};
let (mut tt, undo_info) = {
let syntax = item_node.syntax();
let censor_cfg = cfg_process::process_cfg_attrs(syntax, &loc, db).unwrap_or_default();
let mut fixups = fixup::fixup_syntax(map.as_ref(), syntax, span);
fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Token(_) => true,
it => !censor.contains(it) && !censor_cfg.contains(it),
});
fixups.remove.extend(censor);
fixups.remove.extend(censor_cfg);
(
mbe::syntax_node_to_token_tree_modified(
syntax,
map,
fixups.append,
fixups.remove,
span,
),
fixups.undo_info,
)
};
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter.kind = tt::DelimiterKind::Invisible;
}
(Arc::new(tt), undo_info, span)
} }
// FIXME: Censoring info should be calculated by the caller! Namely by name resolution // FIXME: Censoring info should be calculated by the caller! Namely by name resolution
/// Certain macro calls expect some nodes in the input to be preprocessed away, namely: /// Derives expect all `#[derive(..)]` invocations up to (and including) the currently invoked one to be stripped
/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped fn censor_derive_input(derive_attr_index: AttrId, node: &ast::Adt) -> FxHashSet<SyntaxElement> {
/// - attributes expect the invoking attribute to be stripped
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
// FIXME: handle `cfg_attr` // FIXME: handle `cfg_attr`
(|| { cov_mark::hit!(derive_censoring);
let censor = match loc.kind { collect_attrs(node)
MacroCallKind::FnLike { .. } => return None, .take(derive_attr_index.ast_index() + 1)
MacroCallKind::Derive { derive_attr_index, .. } => { .filter_map(|(_, attr)| Either::left(attr))
cov_mark::hit!(derive_censoring); // FIXME, this resolution should not be done syntactically
ast::Item::cast(node.clone())? // derive is a proper macro now, no longer builtin
.attrs() // But we do not have resolution at this stage, this means
.take(derive_attr_index.ast_index() + 1) // we need to know about all macro calls for the given ast item here
// FIXME, this resolution should not be done syntactically // so we require some kind of mapping...
// derive is a proper macro now, no longer builtin .filter(|attr| attr.simple_name().as_deref() == Some("derive"))
// But we do not have resolution at this stage, this means .map(|it| it.syntax().clone().into())
// we need to know about all macro calls for the given ast item here .collect()
// so we require some kind of mapping... }
.filter(|attr| attr.simple_name().as_deref() == Some("derive"))
.map(|it| it.syntax().clone()) /// Attributes expect the invoking attribute to be stripped
.collect() fn attr_source(invoc_attr_index: AttrId, node: &ast::Item) -> Option<ast::Attr> {
} // FIXME: handle `cfg_attr`
MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => return None, cov_mark::hit!(attribute_macro_attr_censoring);
MacroCallKind::Attr { invoc_attr_index, .. } => { collect_attrs(node).nth(invoc_attr_index.ast_index()).and_then(|(_, attr)| Either::left(attr))
cov_mark::hit!(attribute_macro_attr_censoring);
collect_attrs(&ast::Item::cast(node.clone())?)
.nth(invoc_attr_index.ast_index())
.and_then(|x| Either::left(x.1))
.map(|attr| attr.syntax().clone())
.into_iter()
.collect()
}
};
Some(censor)
})()
.unwrap_or_default()
} }
impl TokenExpander { impl TokenExpander {
@ -523,74 +523,64 @@ fn macro_expand(
) -> ExpandResult<CowArc<tt::Subtree>> { ) -> ExpandResult<CowArc<tt::Subtree>> {
let _p = tracing::span!(tracing::Level::INFO, "macro_expand").entered(); let _p = tracing::span!(tracing::Level::INFO, "macro_expand").entered();
let ExpandResult { value: tt, mut err } = match loc.def.kind { let (ExpandResult { value: tt, err }, span) = match loc.def.kind {
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id).map(CowArc::Arc), MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id).map(CowArc::Arc),
_ => { _ => {
let ValueResult { value: (macro_arg, undo_info), err } = db.macro_arg(macro_call_id); let (macro_arg, undo_info, span) = db.macro_arg(macro_call_id);
let format_parse_err = |err: Arc<Box<[SyntaxError]>>| {
let mut buf = String::new();
for err in &**err {
use std::fmt::Write;
_ = write!(buf, "{}, ", err);
}
buf.pop();
buf.pop();
ExpandError::other(buf)
};
let arg = &*macro_arg; let arg = &*macro_arg;
let res = match loc.def.kind { let res =
MacroDefKind::Declarative(id) => { match loc.def.kind {
db.decl_macro_expander(loc.def.krate, id).expand(db, arg.clone(), macro_call_id) MacroDefKind::Declarative(id) => db
} .decl_macro_expander(loc.def.krate, id)
MacroDefKind::BuiltIn(it, _) => { .expand(db, arg.clone(), macro_call_id, span),
it.expand(db, macro_call_id, arg).map_err(Into::into) MacroDefKind::BuiltIn(it, _) => {
} it.expand(db, macro_call_id, arg, span).map_err(Into::into)
// This might look a bit odd, but we do not expand the inputs to eager macros here. }
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls. MacroDefKind::BuiltInDerive(it, _) => {
// That kind of expansion uses the ast id map of an eager macros input though which goes through it.expand(db, macro_call_id, arg, span).map_err(Into::into)
// the HirFileId machinery. As eager macro inputs are assigned a macro file id that query }
// will end up going through here again, whereas we want to just want to inspect the raw input. MacroDefKind::BuiltInEager(it, _) => {
// As such we just return the input subtree here. // This might look a bit odd, but we do not expand the inputs to eager macros here.
MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => { // Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
return ExpandResult { // That kind of expansion uses the ast id map of an eager macros input though which goes through
value: CowArc::Arc(macro_arg.clone()), // the HirFileId machinery. As eager macro inputs are assigned a macro file id that query
err: err.map(format_parse_err), // will end up going through here again, whereas we want to just want to inspect the raw input.
}; // As such we just return the input subtree here.
} let eager = match &loc.kind {
MacroDefKind::BuiltInDerive(it, _) => { MacroCallKind::FnLike { eager: None, .. } => {
it.expand(db, macro_call_id, arg).map_err(Into::into) return ExpandResult::ok(CowArc::Arc(macro_arg.clone()));
} }
MacroDefKind::BuiltInEager(it, _) => { MacroCallKind::FnLike { eager: Some(eager), .. } => Some(&**eager),
it.expand(db, macro_call_id, arg).map_err(Into::into) _ => None,
} };
MacroDefKind::BuiltInAttr(it, _) => {
let mut res = it.expand(db, macro_call_id, arg); let mut res = it.expand(db, macro_call_id, arg, span).map_err(Into::into);
fixup::reverse_fixups(&mut res.value, &undo_info);
res if let Some(EagerCallInfo { error, .. }) = eager {
} // FIXME: We should report both errors!
_ => unreachable!(), res.err = error.clone().or(res.err);
}; }
ExpandResult { res
value: res.value, }
// if the arg had parse errors, show them instead of the expansion errors MacroDefKind::BuiltInAttr(it, _) => {
err: err.map(format_parse_err).or(res.err), let mut res = it.expand(db, macro_call_id, arg, span);
} fixup::reverse_fixups(&mut res.value, &undo_info);
res
}
_ => unreachable!(),
};
(ExpandResult { value: res.value, err: res.err }, span)
} }
}; };
if let Some(EagerCallInfo { error, .. }) = loc.eager.as_deref() {
// FIXME: We should report both errors!
err = error.clone().or(err);
}
// Skip checking token tree limit for include! macro call // Skip checking token tree limit for include! macro call
if !loc.def.is_include() { if !loc.def.is_include() {
// Set a hard limit for the expanded tt // Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt) { if let Err(value) = check_tt_count(&tt) {
return value.map(|()| { return value.map(|()| {
CowArc::Owned(tt::Subtree { CowArc::Owned(tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(loc.call_site), delimiter: tt::Delimiter::invisible_spanned(span),
token_trees: Box::new([]), token_trees: Box::new([]),
}) })
}); });
@ -600,12 +590,23 @@ fn macro_expand(
ExpandResult { value: CowArc::Owned(tt), err } ExpandResult { value: CowArc::Owned(tt), err }
} }
fn proc_macro_span(db: &dyn ExpandDatabase, ast: AstId<ast::Fn>) -> Span {
let root = db.parse_or_expand(ast.file_id);
let ast_id_map = &db.ast_id_map(ast.file_id);
let span_map = &db.span_map(ast.file_id);
let node = ast_id_map.get(ast.value).to_node(&root);
let range = ast::HasName::name(&node)
.map_or_else(|| node.syntax().text_range(), |name| name.syntax().text_range());
span_map.span_for_range(range)
}
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> { fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
let loc = db.lookup_intern_macro_call(id); let loc = db.lookup_intern_macro_call(id);
let (macro_arg, undo_info) = db.macro_arg(id).value; let (macro_arg, undo_info, span) = db.macro_arg(id);
let expander = match loc.def.kind { let (expander, ast) = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => expander, MacroDefKind::ProcMacro(expander, _, ast) => (expander, ast),
_ => unreachable!(), _ => unreachable!(),
}; };
@ -614,22 +615,25 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
_ => None, _ => None,
}; };
let ExpandResult { value: mut tt, err } = expander.expand( let ExpandResult { value: mut tt, err } = {
db, let span = db.proc_macro_span(ast);
loc.def.krate, expander.expand(
loc.krate, db,
&macro_arg, loc.def.krate,
attr_arg, loc.krate,
span_with_def_site_ctxt(db, loc.def.span, id), &macro_arg,
span_with_call_site_ctxt(db, loc.def.span, id), attr_arg,
span_with_mixed_site_ctxt(db, loc.def.span, id), span_with_def_site_ctxt(db, span, id),
); span_with_call_site_ctxt(db, span, id),
span_with_mixed_site_ctxt(db, span, id),
)
};
// Set a hard limit for the expanded tt // Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt) { if let Err(value) = check_tt_count(&tt) {
return value.map(|()| { return value.map(|()| {
Arc::new(tt::Subtree { Arc::new(tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(loc.call_site), delimiter: tt::Delimiter::invisible_spanned(span),
token_trees: Box::new([]), token_trees: Box::new([]),
}) })
}); });

View file

@ -29,6 +29,7 @@ impl DeclarativeMacroExpander {
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
tt: tt::Subtree, tt: tt::Subtree,
call_id: MacroCallId, call_id: MacroCallId,
span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(call_id); let loc = db.lookup_intern_macro_call(call_id);
let toolchain = db.toolchain(loc.def.krate); let toolchain = db.toolchain(loc.def.krate);
@ -45,7 +46,7 @@ impl DeclarativeMacroExpander {
}); });
match self.mac.err() { match self.mac.err() {
Some(_) => ExpandResult::new( Some(_) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }), tt::Subtree::empty(tt::DelimSpan { open: span, close: span }),
ExpandError::MacroDefinition, ExpandError::MacroDefinition,
), ),
None => self None => self
@ -54,7 +55,7 @@ impl DeclarativeMacroExpander {
&tt, &tt,
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency), |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
new_meta_vars, new_meta_vars,
loc.call_site, span,
) )
.map_err(Into::into), .map_err(Into::into),
} }

View file

@ -19,7 +19,7 @@
//! //!
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros> //! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
use base_db::CrateId; use base_db::CrateId;
use span::Span; use span::SyntaxContextId;
use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent}; use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent};
use triomphe::Arc; use triomphe::Arc;
@ -27,22 +27,20 @@ use crate::{
ast::{self, AstNode}, ast::{self, AstNode},
db::ExpandDatabase, db::ExpandDatabase,
mod_path::ModPath, mod_path::ModPath,
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, Intern, AstId, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, Intern,
MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
}; };
pub fn expand_eager_macro_input( pub fn expand_eager_macro_input(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
krate: CrateId, krate: CrateId,
macro_call: InFile<ast::MacroCall>, macro_call: &ast::MacroCall,
ast_id: AstId<ast::MacroCall>,
def: MacroDefId, def: MacroDefId,
call_site: Span, call_site: SyntaxContextId,
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>, resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<MacroCallId>> { ) -> ExpandResult<Option<MacroCallId>> {
let ast_map = db.ast_id_map(macro_call.file_id); let expand_to = ExpandTo::from_call_site(macro_call);
// the expansion which the ast id map is built upon has no whitespace, so the offsets are wrong as macro_call is from the token tree that has whitespace!
let call_id = InFile::new(macro_call.file_id, ast_map.ast_id(&macro_call.value));
let expand_to = ExpandTo::from_call_site(&macro_call.value);
// Note: // Note:
// When `lazy_expand` is called, its *parent* file must already exist. // When `lazy_expand` is called, its *parent* file must already exist.
@ -51,11 +49,11 @@ pub fn expand_eager_macro_input(
let arg_id = MacroCallLoc { let arg_id = MacroCallLoc {
def, def,
krate, krate,
eager: None, kind: MacroCallKind::FnLike { ast_id, expand_to: ExpandTo::Expr, eager: None },
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr }, ctxt: call_site,
call_site,
} }
.intern(db); .intern(db);
let (_, _, span) = db.macro_arg(arg_id);
let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } = let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
db.parse_macro_expansion(arg_id.as_macro_file()); db.parse_macro_expansion(arg_id.as_macro_file());
@ -82,16 +80,24 @@ pub fn expand_eager_macro_input(
return ExpandResult { value: None, err }; return ExpandResult { value: None, err };
}; };
let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map, call_site); let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map, span);
subtree.delimiter.kind = crate::tt::DelimiterKind::Invisible; subtree.delimiter.kind = crate::tt::DelimiterKind::Invisible;
let loc = MacroCallLoc { let loc = MacroCallLoc {
def, def,
krate, krate,
eager: Some(Arc::new(EagerCallInfo { arg: Arc::new(subtree), arg_id, error: err.clone() })), kind: MacroCallKind::FnLike {
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to }, ast_id,
call_site, expand_to,
eager: Some(Arc::new(EagerCallInfo {
arg: Arc::new(subtree),
arg_id,
error: err.clone(),
span,
})),
},
ctxt: call_site,
}; };
ExpandResult { value: Some(loc.intern(db)), err } ExpandResult { value: Some(loc.intern(db)), err }
@ -100,15 +106,18 @@ pub fn expand_eager_macro_input(
fn lazy_expand( fn lazy_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
def: &MacroDefId, def: &MacroDefId,
macro_call: InFile<ast::MacroCall>, macro_call: &ast::MacroCall,
ast_id: AstId<ast::MacroCall>,
krate: CrateId, krate: CrateId,
call_site: Span, call_site: SyntaxContextId,
) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<ExpansionSpanMap>)> { ) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<ExpansionSpanMap>)> {
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(&macro_call.value); let expand_to = ExpandTo::from_call_site(macro_call);
let id = def.make_call(
let expand_to = ExpandTo::from_call_site(&macro_call.value); db,
let ast_id = macro_call.with_value(ast_id); krate,
let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to }, call_site); MacroCallKind::FnLike { ast_id, expand_to, eager: None },
call_site,
);
let macro_file = id.as_macro_file(); let macro_file = id.as_macro_file();
db.parse_macro_expansion(macro_file) db.parse_macro_expansion(macro_file)
@ -122,7 +131,7 @@ fn eager_macro_recur(
mut offset: TextSize, mut offset: TextSize,
curr: InFile<SyntaxNode>, curr: InFile<SyntaxNode>,
krate: CrateId, krate: CrateId,
call_site: Span, call_site: SyntaxContextId,
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>, macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<(SyntaxNode, TextSize)>> { ) -> ExpandResult<Option<(SyntaxNode, TextSize)>> {
let original = curr.value.clone_for_update(); let original = curr.value.clone_for_update();
@ -172,12 +181,14 @@ fn eager_macro_recur(
continue; continue;
} }
}; };
let ast_id = db.ast_id_map(curr.file_id).ast_id(&call);
let ExpandResult { value, err } = match def.kind { let ExpandResult { value, err } = match def.kind {
MacroDefKind::BuiltInEager(..) => { MacroDefKind::BuiltInEager(..) => {
let ExpandResult { value, err } = expand_eager_macro_input( let ExpandResult { value, err } = expand_eager_macro_input(
db, db,
krate, krate,
curr.with_value(call.clone()), &call,
curr.with_value(ast_id),
def, def,
call_site, call_site,
macro_resolver, macro_resolver,
@ -207,7 +218,7 @@ fn eager_macro_recur(
| MacroDefKind::BuiltInDerive(..) | MacroDefKind::BuiltInDerive(..)
| MacroDefKind::ProcMacro(..) => { | MacroDefKind::ProcMacro(..) => {
let ExpandResult { value: (parse, tm), err } = let ExpandResult { value: (parse, tm), err } =
lazy_expand(db, &def, curr.with_value(call.clone()), krate, call_site); lazy_expand(db, &def, &call, curr.with_value(ast_id), krate, call_site);
// replace macro inside // replace macro inside
let ExpandResult { value, err: error } = eager_macro_recur( let ExpandResult { value, err: error } = eager_macro_recur(

View file

@ -10,7 +10,7 @@ use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
use crate::{ use crate::{
db::{self, ExpandDatabase}, db::{self, ExpandDatabase},
map_node_range_up, span_for_offset, MacroFileIdExt, map_node_range_up, map_node_range_up_rooted, span_for_offset, MacroFileIdExt,
}; };
/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree. /// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
@ -38,6 +38,9 @@ impl<N: AstIdNode> AstId<N> {
pub fn to_node(&self, db: &dyn ExpandDatabase) -> N { pub fn to_node(&self, db: &dyn ExpandDatabase) -> N {
self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)) self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
} }
pub fn to_range(&self, db: &dyn ExpandDatabase) -> TextRange {
self.to_ptr(db).text_range()
}
pub fn to_in_file_node(&self, db: &dyn ExpandDatabase) -> crate::InFile<N> { pub fn to_in_file_node(&self, db: &dyn ExpandDatabase) -> crate::InFile<N> {
crate::InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))) crate::InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)))
} }
@ -49,6 +52,9 @@ impl<N: AstIdNode> AstId<N> {
pub type ErasedAstId = crate::InFile<ErasedFileAstId>; pub type ErasedAstId = crate::InFile<ErasedFileAstId>;
impl ErasedAstId { impl ErasedAstId {
pub fn to_range(&self, db: &dyn ExpandDatabase) -> TextRange {
self.to_ptr(db).text_range()
}
pub fn to_ptr(&self, db: &dyn ExpandDatabase) -> SyntaxNodePtr { pub fn to_ptr(&self, db: &dyn ExpandDatabase) -> SyntaxNodePtr {
db.ast_id_map(self.file_id).get_erased(self.value) db.ast_id_map(self.file_id).get_erased(self.value)
} }
@ -173,24 +179,8 @@ impl InFile<&SyntaxNode> {
/// ///
/// For attributes and derives, this will point back to the attribute only. /// For attributes and derives, this will point back to the attribute only.
/// For the entire item use [`InFile::original_file_range_full`]. /// For the entire item use [`InFile::original_file_range_full`].
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange { pub fn original_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() { self.map(SyntaxNode::text_range).original_node_file_range_rooted(db)
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
if let Some((res, ctxt)) =
map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range())
{
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if ctxt.is_root() {
return res;
}
}
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range(db)
}
}
} }
/// Falls back to the macro call range if the node cannot be mapped up fully. /// Falls back to the macro call range if the node cannot be mapped up fully.
@ -198,23 +188,7 @@ impl InFile<&SyntaxNode> {
self, self,
db: &dyn db::ExpandDatabase, db: &dyn db::ExpandDatabase,
) -> FileRange { ) -> FileRange {
match self.file_id.repr() { self.map(SyntaxNode::text_range).original_node_file_range_with_macro_call_body(db)
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
if let Some((res, ctxt)) =
map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range())
{
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if ctxt.is_root() {
return res;
}
}
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range_with_body(db)
}
}
} }
/// Attempts to map the syntax node back up its macro calls. /// Attempts to map the syntax node back up its macro calls.
@ -222,17 +196,10 @@ impl InFile<&SyntaxNode> {
self, self,
db: &dyn db::ExpandDatabase, db: &dyn db::ExpandDatabase,
) -> Option<(FileRange, SyntaxContextId)> { ) -> Option<(FileRange, SyntaxContextId)> {
match self.file_id.repr() { self.map(SyntaxNode::text_range).original_node_file_range_opt(db)
HirFileIdRepr::FileId(file_id) => {
Some((FileRange { file_id, range: self.value.text_range() }, SyntaxContextId::ROOT))
}
HirFileIdRepr::MacroFile(mac_file) => {
map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range())
}
}
} }
pub fn original_syntax_node( pub fn original_syntax_node_rooted(
self, self,
db: &dyn db::ExpandDatabase, db: &dyn db::ExpandDatabase,
) -> Option<InRealFile<SyntaxNode>> { ) -> Option<InRealFile<SyntaxNode>> {
@ -242,25 +209,21 @@ impl InFile<&SyntaxNode> {
HirFileIdRepr::FileId(file_id) => { HirFileIdRepr::FileId(file_id) => {
return Some(InRealFile { file_id, value: self.value.clone() }) return Some(InRealFile { file_id, value: self.value.clone() })
} }
HirFileIdRepr::MacroFile(m) => m, HirFileIdRepr::MacroFile(m) if m.is_attr_macro(db) => m,
_ => return None,
}; };
if !file_id.is_attr_macro(db) {
return None;
}
let (FileRange { file_id, range }, ctx) = let FileRange { file_id, range } =
map_node_range_up(db, &db.expansion_span_map(file_id), self.value.text_range())?; map_node_range_up_rooted(db, &db.expansion_span_map(file_id), self.value.text_range())?;
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behavior.
if !ctx.is_root() {
return None;
}
let anc = db.parse(file_id).syntax_node().covering_element(range);
let kind = self.value.kind(); let kind = self.value.kind();
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes? let value = db
let value = anc.ancestors().find(|it| it.kind() == kind)?; .parse(file_id)
.syntax_node()
.covering_element(range)
.ancestors()
.take_while(|it| it.text_range() == range)
.find(|it| it.kind() == kind)?;
Some(InRealFile::new(file_id, value)) Some(InRealFile::new(file_id, value))
} }
} }
@ -355,8 +318,8 @@ impl InFile<TextRange> {
match self.file_id.repr() { match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value }, HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
HirFileIdRepr::MacroFile(mac_file) => { HirFileIdRepr::MacroFile(mac_file) => {
match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) { match map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value) {
Some((it, SyntaxContextId::ROOT)) => it, Some(it) => it,
_ => { _ => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range(db) loc.kind.original_call_range(db)
@ -366,6 +329,24 @@ impl InFile<TextRange> {
} }
} }
pub fn original_node_file_range_with_macro_call_body(
self,
db: &dyn db::ExpandDatabase,
) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
HirFileIdRepr::MacroFile(mac_file) => {
match map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value) {
Some(it) => it,
_ => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range_with_body(db)
}
}
}
}
}
pub fn original_node_file_range_opt( pub fn original_node_file_range_opt(
self, self,
db: &dyn db::ExpandDatabase, db: &dyn db::ExpandDatabase,
@ -395,18 +376,12 @@ impl<N: AstNode> InFile<N> {
return None; return None;
} }
let (FileRange { file_id, range }, ctx) = map_node_range_up( let FileRange { file_id, range } = map_node_range_up_rooted(
db, db,
&db.expansion_span_map(file_id), &db.expansion_span_map(file_id),
self.value.syntax().text_range(), self.value.syntax().text_range(),
)?; )?;
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if !ctx.is_root() {
return None;
}
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes? // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
let anc = db.parse(file_id).syntax_node().covering_element(range); let anc = db.parse(file_id).syntax_node().covering_element(range);
let value = anc.ancestors().find_map(N::cast)?; let value = anc.ancestors().find_map(N::cast)?;

View file

@ -3,7 +3,7 @@
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec; use smallvec::SmallVec;
use span::{ErasedFileAstId, Span, SpanAnchor, SpanData, FIXUP_ERASED_FILE_AST_ID_MARKER}; use span::{ErasedFileAstId, Span, SpanAnchor, FIXUP_ERASED_FILE_AST_ID_MARKER};
use stdx::never; use stdx::never;
use syntax::{ use syntax::{
ast::{self, AstNode, HasLoopBody}, ast::{self, AstNode, HasLoopBody},
@ -23,7 +23,7 @@ use crate::{
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub(crate) struct SyntaxFixups { pub(crate) struct SyntaxFixups {
pub(crate) append: FxHashMap<SyntaxElement, Vec<Leaf>>, pub(crate) append: FxHashMap<SyntaxElement, Vec<Leaf>>,
pub(crate) remove: FxHashSet<SyntaxNode>, pub(crate) remove: FxHashSet<SyntaxElement>,
pub(crate) undo_info: SyntaxFixupUndoInfo, pub(crate) undo_info: SyntaxFixupUndoInfo,
} }
@ -51,13 +51,13 @@ pub(crate) fn fixup_syntax(
call_site: Span, call_site: Span,
) -> SyntaxFixups { ) -> SyntaxFixups {
let mut append = FxHashMap::<SyntaxElement, _>::default(); let mut append = FxHashMap::<SyntaxElement, _>::default();
let mut remove = FxHashSet::<SyntaxNode>::default(); let mut remove = FxHashSet::<SyntaxElement>::default();
let mut preorder = node.preorder(); let mut preorder = node.preorder();
let mut original = Vec::new(); let mut original = Vec::new();
let dummy_range = FIXUP_DUMMY_RANGE; let dummy_range = FIXUP_DUMMY_RANGE;
let fake_span = |range| { let fake_span = |range| {
let span = span_map.span_for_range(range); let span = span_map.span_for_range(range);
SpanData { Span {
range: dummy_range, range: dummy_range,
anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor }, anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor },
ctx: span.ctx, ctx: span.ctx,
@ -68,7 +68,7 @@ pub(crate) fn fixup_syntax(
let node_range = node.text_range(); let node_range = node.text_range();
if can_handle_error(&node) && has_error_to_handle(&node) { if can_handle_error(&node) && has_error_to_handle(&node) {
remove.insert(node.clone()); remove.insert(node.clone().into());
// the node contains an error node, we have to completely replace it by something valid // the node contains an error node, we have to completely replace it by something valid
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site); let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site);
let idx = original.len() as u32; let idx = original.len() as u32;
@ -76,7 +76,7 @@ pub(crate) fn fixup_syntax(
let span = span_map.span_for_range(node_range); let span = span_map.span_for_range(node_range);
let replacement = Leaf::Ident(Ident { let replacement = Leaf::Ident(Ident {
text: "__ra_fixup".into(), text: "__ra_fixup".into(),
span: SpanData { span: Span {
range: TextRange::new(TextSize::new(idx), FIXUP_DUMMY_RANGE_END), range: TextRange::new(TextSize::new(idx), FIXUP_DUMMY_RANGE_END),
anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor }, anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor },
ctx: span.ctx, ctx: span.ctx,
@ -305,8 +305,8 @@ pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo)
tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID
|| tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID || tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID
) { ) {
tt.delimiter.close = SpanData::DUMMY; tt.delimiter.close = Span::DUMMY;
tt.delimiter.open = SpanData::DUMMY; tt.delimiter.open = Span::DUMMY;
} }
reverse_fixups_(tt, undo_info); reverse_fixups_(tt, undo_info);
} }

View file

@ -65,7 +65,7 @@ pub(super) fn apply_mark(
return apply_mark_internal(db, ctxt, call_id, transparency); return apply_mark_internal(db, ctxt, call_id, transparency);
} }
let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site.ctx; let call_site_ctxt = db.lookup_intern_macro_call(call_id).ctxt;
let mut call_site_ctxt = if transparency == Transparency::SemiTransparent { let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
call_site_ctxt.normalize_to_macros_2_0(db) call_site_ctxt.normalize_to_macros_2_0(db)
} else { } else {
@ -205,11 +205,10 @@ pub(crate) fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
let id = e.key; let id = e.key;
let expn_data = e.value.as_ref().unwrap(); let expn_data = e.value.as_ref().unwrap();
s.push_str(&format!( s.push_str(&format!(
"\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}", "\n{:?}: parent: {:?}, call_site_ctxt: {:?}, kind: {:?}",
id, id,
expn_data.kind.file_id(), expn_data.kind.file_id(),
expn_data.call_site, expn_data.ctxt,
SyntaxContextId::ROOT, // FIXME expn_data.def_site,
expn_data.kind.descr(), expn_data.kind.descr(),
)); ));
} }

View file

@ -22,16 +22,19 @@ pub mod proc_macro;
pub mod quote; pub mod quote;
pub mod span_map; pub mod span_map;
mod cfg_process;
mod fixup; mod fixup;
use attrs::collect_attrs; use attrs::collect_attrs;
use rustc_hash::FxHashMap;
use triomphe::Arc; use triomphe::Arc;
use std::{fmt, hash::Hash}; use std::{fmt, hash::Hash};
use base_db::{salsa::impl_intern_value_trivial, CrateId, Edition, FileId}; use base_db::{salsa::impl_intern_value_trivial, CrateId, Edition, FileId};
use either::Either; use either::Either;
use span::{ErasedFileAstId, FileRange, HirFileIdRepr, Span, SyntaxContextData, SyntaxContextId}; use span::{
ErasedFileAstId, FileRange, HirFileIdRepr, Span, SpanAnchor, SyntaxContextData, SyntaxContextId,
};
use syntax::{ use syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
SyntaxNode, SyntaxToken, TextRange, TextSize, SyntaxNode, SyntaxToken, TextRange, TextSize,
@ -167,13 +170,8 @@ impl fmt::Display for ExpandError {
pub struct MacroCallLoc { pub struct MacroCallLoc {
pub def: MacroDefId, pub def: MacroDefId,
pub krate: CrateId, pub krate: CrateId,
/// Some if this is a macro call for an eager macro. Note that this is `None`
/// for the eager input macro file.
// FIXME: This is being interned, subtrees can vary quickly differ just slightly causing
// leakage problems here
eager: Option<Arc<EagerCallInfo>>,
pub kind: MacroCallKind, pub kind: MacroCallKind,
pub call_site: Span, pub ctxt: SyntaxContextId,
} }
impl_intern_value_trivial!(MacroCallLoc); impl_intern_value_trivial!(MacroCallLoc);
@ -184,7 +182,6 @@ pub struct MacroDefId {
pub kind: MacroDefKind, pub kind: MacroDefKind,
pub local_inner: bool, pub local_inner: bool,
pub allow_internal_unsafe: bool, pub allow_internal_unsafe: bool,
pub span: Span,
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -204,6 +201,8 @@ pub struct EagerCallInfo {
/// Call id of the eager macro's input file (this is the macro file for its fully expanded input). /// Call id of the eager macro's input file (this is the macro file for its fully expanded input).
arg_id: MacroCallId, arg_id: MacroCallId,
error: Option<ExpandError>, error: Option<ExpandError>,
/// TODO: Doc
span: Span,
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -211,6 +210,11 @@ pub enum MacroCallKind {
FnLike { FnLike {
ast_id: AstId<ast::MacroCall>, ast_id: AstId<ast::MacroCall>,
expand_to: ExpandTo, expand_to: ExpandTo,
/// Some if this is a macro call for an eager macro. Note that this is `None`
/// for the eager input macro file.
// FIXME: This is being interned, subtrees can vary quickly differ just slightly causing
// leakage problems here
eager: Option<Arc<EagerCallInfo>>,
}, },
Derive { Derive {
ast_id: AstId<ast::Adt>, ast_id: AstId<ast::Adt>,
@ -272,7 +276,7 @@ impl HirFileIdExt for HirFileId {
HirFileIdRepr::MacroFile(file) => { HirFileIdRepr::MacroFile(file) => {
let loc = db.lookup_intern_macro_call(file.macro_call_id); let loc = db.lookup_intern_macro_call(file.macro_call_id);
if loc.def.is_include() { if loc.def.is_include() {
if let Some(eager) = &loc.eager { if let MacroCallKind::FnLike { eager: Some(eager), .. } = &loc.kind {
if let Ok(it) = builtin_fn_macro::include_input_to_file_id( if let Ok(it) = builtin_fn_macro::include_input_to_file_id(
db, db,
file.macro_call_id, file.macro_call_id,
@ -319,6 +323,9 @@ impl HirFileIdExt for HirFileId {
} }
pub trait MacroFileIdExt { pub trait MacroFileIdExt {
fn is_env_or_option_env(&self, db: &dyn ExpandDatabase) -> bool;
fn is_include_like_macro(&self, db: &dyn ExpandDatabase) -> bool;
fn eager_arg(&self, db: &dyn ExpandDatabase) -> Option<MacroCallId>;
fn expansion_level(self, db: &dyn ExpandDatabase) -> u32; fn expansion_level(self, db: &dyn ExpandDatabase) -> u32;
/// If this is a macro call, returns the syntax node of the call. /// If this is a macro call, returns the syntax node of the call.
fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode>; fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode>;
@ -385,31 +392,47 @@ impl MacroFileIdExt for MacroFileId {
db.lookup_intern_macro_call(self.macro_call_id).def.is_include() db.lookup_intern_macro_call(self.macro_call_id).def.is_include()
} }
fn is_include_like_macro(&self, db: &dyn ExpandDatabase) -> bool {
db.lookup_intern_macro_call(self.macro_call_id).def.is_include_like()
}
fn is_env_or_option_env(&self, db: &dyn ExpandDatabase) -> bool {
db.lookup_intern_macro_call(self.macro_call_id).def.is_env_or_option_env()
}
fn is_eager(&self, db: &dyn ExpandDatabase) -> bool { fn is_eager(&self, db: &dyn ExpandDatabase) -> bool {
let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id); let loc = db.lookup_intern_macro_call(self.macro_call_id);
matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
} }
fn eager_arg(&self, db: &dyn ExpandDatabase) -> Option<MacroCallId> {
let loc = db.lookup_intern_macro_call(self.macro_call_id);
match &loc.kind {
MacroCallKind::FnLike { eager, .. } => eager.as_ref().map(|it| it.arg_id),
_ => None,
}
}
fn is_attr_macro(&self, db: &dyn ExpandDatabase) -> bool { fn is_attr_macro(&self, db: &dyn ExpandDatabase) -> bool {
let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id); let loc = db.lookup_intern_macro_call(self.macro_call_id);
matches!(loc.kind, MacroCallKind::Attr { .. }) matches!(loc.kind, MacroCallKind::Attr { .. })
} }
fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool { fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool {
let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id); let loc = db.lookup_intern_macro_call(self.macro_call_id);
loc.def.is_attribute_derive() loc.def.is_attribute_derive()
} }
} }
impl MacroDefId { impl MacroDefId {
pub fn as_lazy_macro( pub fn make_call(
self, self,
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
krate: CrateId, krate: CrateId,
kind: MacroCallKind, kind: MacroCallKind,
call_site: Span, ctxt: SyntaxContextId,
) -> MacroCallId { ) -> MacroCallId {
MacroCallLoc { def: self, krate, eager: None, kind, call_site }.intern(db) MacroCallLoc { def: self, krate, kind, ctxt }.intern(db)
} }
pub fn definition_range(&self, db: &dyn ExpandDatabase) -> InFile<TextRange> { pub fn definition_range(&self, db: &dyn ExpandDatabase) -> InFile<TextRange> {
@ -474,6 +497,14 @@ impl MacroDefId {
pub fn is_include(&self) -> bool { pub fn is_include(&self) -> bool {
matches!(self.kind, MacroDefKind::BuiltInEager(expander, ..) if expander.is_include()) matches!(self.kind, MacroDefKind::BuiltInEager(expander, ..) if expander.is_include())
} }
pub fn is_include_like(&self) -> bool {
matches!(self.kind, MacroDefKind::BuiltInEager(expander, ..) if expander.is_include_like())
}
pub fn is_env_or_option_env(&self) -> bool {
matches!(self.kind, MacroDefKind::BuiltInEager(expander, ..) if expander.is_env_or_option_env())
}
} }
impl MacroCallLoc { impl MacroCallLoc {
@ -531,7 +562,7 @@ impl MacroCallLoc {
macro_call_id: MacroCallId, macro_call_id: MacroCallId,
) -> Option<FileId> { ) -> Option<FileId> {
if self.def.is_include() { if self.def.is_include() {
if let Some(eager) = &self.eager { if let MacroCallKind::FnLike { eager: Some(eager), .. } = &self.kind {
if let Ok(it) = if let Ok(it) =
builtin_fn_macro::include_input_to_file_id(db, macro_call_id, &eager.arg) builtin_fn_macro::include_input_to_file_id(db, macro_call_id, &eager.arg)
{ {
@ -655,7 +686,7 @@ impl MacroCallKind {
/// ExpansionInfo mainly describes how to map text range between src and expanded macro /// ExpansionInfo mainly describes how to map text range between src and expanded macro
// FIXME: can be expensive to create, we should check the use sites and maybe replace them with // FIXME: can be expensive to create, we should check the use sites and maybe replace them with
// simpler function calls if the map is only used once // simpler function calls if the map is only used once
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub struct ExpansionInfo { pub struct ExpansionInfo {
pub expanded: InMacroFile<SyntaxNode>, pub expanded: InMacroFile<SyntaxNode>,
/// The argument TokenTree or item for attributes /// The argument TokenTree or item for attributes
@ -683,6 +714,24 @@ impl ExpansionInfo {
} }
/// Maps the passed in file range down into a macro expansion if it is the input to a macro call. /// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
///
/// Note this does a linear search through the entire backing vector of the spanmap.
pub fn map_range_down_exact(
&self,
span: Span,
) -> Option<InMacroFile<impl Iterator<Item = SyntaxToken> + '_>> {
let tokens = self
.exp_map
.ranges_with_span_exact(span)
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());
Some(InMacroFile::new(self.expanded.file_id, tokens))
}
/// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
/// Unlike [`map_range_down_exact`], this will consider spans that contain the given span.
///
/// Note this does a linear search through the entire backing vector of the spanmap.
pub fn map_range_down( pub fn map_range_down(
&self, &self,
span: Span, span: Span,
@ -739,7 +788,7 @@ impl ExpansionInfo {
InFile::new( InFile::new(
self.arg.file_id, self.arg.file_id,
arg_map arg_map
.ranges_with_span(span) .ranges_with_span_exact(span)
.filter(|range| range.intersect(arg_range).is_some()) .filter(|range| range.intersect(arg_range).is_some())
.collect(), .collect(),
) )
@ -757,7 +806,7 @@ impl ExpansionInfo {
let (parse, exp_map) = db.parse_macro_expansion(macro_file).value; let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() }; let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() };
let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value; let (macro_arg, _, _) = db.macro_arg(macro_file.macro_call_id);
let def = loc.def.ast_id().left().and_then(|id| { let def = loc.def.ast_id().left().and_then(|id| {
let def_tt = match id.to_node(db) { let def_tt = match id.to_node(db) {
@ -793,7 +842,34 @@ impl ExpansionInfo {
} }
} }
/// Maps up the text range out of the expansion hierarchy back into the original file its from only
/// considering the root spans contained.
/// Unlike [`map_node_range_up`], this will not return `None` if any anchors or syntax contexts differ.
pub fn map_node_range_up_rooted(
db: &dyn ExpandDatabase,
exp_map: &ExpansionSpanMap,
range: TextRange,
) -> Option<FileRange> {
let mut spans = exp_map.spans_for_range(range).filter(|span| span.ctx.is_root());
let Span { range, anchor, ctx: _ } = spans.next()?;
let mut start = range.start();
let mut end = range.end();
for span in spans {
if span.anchor != anchor {
return None;
}
start = start.min(span.range.start());
end = end.max(span.range.end());
}
let anchor_offset =
db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
Some(FileRange { file_id: anchor.file_id, range: TextRange::new(start, end) + anchor_offset })
}
/// Maps up the text range out of the expansion hierarchy back into the original file its from. /// Maps up the text range out of the expansion hierarchy back into the original file its from.
///
/// this will return `None` if any anchors or syntax contexts differ.
pub fn map_node_range_up( pub fn map_node_range_up(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
exp_map: &ExpansionSpanMap, exp_map: &ExpansionSpanMap,
@ -819,6 +895,29 @@ pub fn map_node_range_up(
)) ))
} }
/// Maps up the text range out of the expansion hierarchy back into the original file its from.
/// This version will aggregate the ranges of all spans with the same anchor and syntax context.
pub fn map_node_range_up_aggregated(
db: &dyn ExpandDatabase,
exp_map: &ExpansionSpanMap,
range: TextRange,
) -> FxHashMap<(SpanAnchor, SyntaxContextId), TextRange> {
let mut map = FxHashMap::default();
for span in exp_map.spans_for_range(range) {
let range = map.entry((span.anchor, span.ctx)).or_insert_with(|| span.range);
*range = TextRange::new(
range.start().min(span.range.start()),
range.end().max(span.range.end()),
);
}
for ((anchor, _), range) in &mut map {
let anchor_offset =
db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
*range += anchor_offset;
}
map
}
/// Looks up the span at the given offset. /// Looks up the span at the given offset.
pub fn span_for_offset( pub fn span_for_offset(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,

View file

@ -266,10 +266,11 @@ mod tests {
let quoted = quote!(DUMMY =>#a); let quoted = quote!(DUMMY =>#a);
assert_eq!(quoted.to_string(), "hello"); assert_eq!(quoted.to_string(), "hello");
let t = format!("{quoted:?}"); let t = format!("{quoted:#?}");
expect![[r#" expect![[r#"
SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) } SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) } SUBTREE $$ 937550:0@0..0#0 937550:0@0..0#0
IDENT hello SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) }"#]].assert_eq(&t); IDENT hello 937550:0@0..0#0"#]]
.assert_eq(&t);
} }
#[test] #[test]

View file

@ -1,13 +1,15 @@
//! Span maps for real files and macro expansions. //! Span maps for real files and macro expansions.
use span::{FileId, HirFileId, HirFileIdRepr, MacroFileId, Span};
use syntax::{AstNode, TextRange}; use span::{FileId, HirFileId, HirFileIdRepr, MacroFileId, Span, SyntaxContextId};
use stdx::TupleExt;
use syntax::{ast, AstNode, TextRange};
use triomphe::Arc; use triomphe::Arc;
pub use span::RealSpanMap; pub use span::RealSpanMap;
use crate::db::ExpandDatabase; use crate::{attrs::collect_attrs, db::ExpandDatabase};
pub type ExpansionSpanMap = span::SpanMap<Span>; pub type ExpansionSpanMap = span::SpanMap<SyntaxContextId>;
/// Spanmap for a macro file or a real file /// Spanmap for a macro file or a real file
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
@ -82,13 +84,54 @@ pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<Rea
let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)]; let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)];
let ast_id_map = db.ast_id_map(file_id.into()); let ast_id_map = db.ast_id_map(file_id.into());
let tree = db.parse(file_id).tree(); let tree = db.parse(file_id).tree();
// FIXME: Descend into modules and other item containing items that are not annotated with attributes // This is an incrementality layer. Basically we can't use absolute ranges for our spans as that
// and allocate pairs for those as well. This gives us finer grained span anchors resulting in // would mean we'd invalidate everything whenever we type. So instead we make the text ranges
// better incrementality // relative to some AstIds reducing the risk of invalidation as typing somewhere no longer
pairs.extend( // affects all following spans in the file.
tree.items() // There is some stuff to bear in mind here though, for one, the more "anchors" we create, the
.map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())), // easier it gets to invalidate things again as spans are as stable as their anchor's ID.
); // The other problem is proc-macros. Proc-macros have a `Span::join` api that allows them
// to join two spans that come from the same file. rust-analyzer's proc-macro server
// can only join two spans if they belong to the same anchor though, as the spans are relative
// to that anchor. To do cross anchor joining we'd need to access to the ast id map to resolve
// them again, something we might get access to in the future. But even then, proc-macros doing
// this kind of joining makes them as stable as the AstIdMap (which is basically changing on
// every input of the file)…
let item_to_entry =
|item: ast::Item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase());
// Top level items make for great anchors as they are the most stable and a decent boundary
pairs.extend(tree.items().map(item_to_entry));
// Unfortunately, assoc items are very common in Rust, so descend into those as well and make
// them anchors too, but only if they have no attributes attached, as those might be proc-macros
// and using different anchors inside of them will prevent spans from being joinable.
tree.items().for_each(|item| match &item {
ast::Item::ExternBlock(it)
if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) =>
{
if let Some(extern_item_list) = it.extern_item_list() {
pairs.extend(
extern_item_list.extern_items().map(ast::Item::from).map(item_to_entry),
);
}
}
ast::Item::Impl(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => {
if let Some(assoc_item_list) = it.assoc_item_list() {
pairs.extend(assoc_item_list.assoc_items().map(ast::Item::from).map(item_to_entry));
}
}
ast::Item::Module(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => {
if let Some(item_list) = it.item_list() {
pairs.extend(item_list.items().map(item_to_entry));
}
}
ast::Item::Trait(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => {
if let Some(assoc_item_list) = it.assoc_item_list() {
pairs.extend(assoc_item_list.assoc_items().map(ast::Item::from).map(item_to_entry));
}
}
_ => (),
});
Arc::new(RealSpanMap::from_file( Arc::new(RealSpanMap::from_file(
file_id, file_id,

View file

@ -23,10 +23,10 @@ oorandom = "11.1.3"
tracing.workspace = true tracing.workspace = true
rustc-hash.workspace = true rustc-hash.workspace = true
scoped-tls = "1.0.0" scoped-tls = "1.0.0"
chalk-solve = { version = "0.96.0", default-features = false } chalk-solve.workspace = true
chalk-ir = "0.96.0" chalk-ir.workspace = true
chalk-recursive = { version = "0.96.0", default-features = false } chalk-recursive.workspace = true
chalk-derive = "0.96.0" chalk-derive.workspace = true
la-arena.workspace = true la-arena.workspace = true
once_cell = "1.17.0" once_cell = "1.17.0"
triomphe.workspace = true triomphe.workspace = true

View file

@ -113,7 +113,7 @@ pub(crate) fn autoderef_step(
ty: Ty, ty: Ty,
explicit: bool, explicit: bool,
) -> Option<(AutoderefKind, Ty)> { ) -> Option<(AutoderefKind, Ty)> {
if let Some(derefed) = builtin_deref(table, &ty, explicit) { if let Some(derefed) = builtin_deref(table.db, &ty, explicit) {
Some((AutoderefKind::Builtin, table.resolve_ty_shallow(derefed))) Some((AutoderefKind::Builtin, table.resolve_ty_shallow(derefed)))
} else { } else {
Some((AutoderefKind::Overloaded, deref_by_trait(table, ty)?)) Some((AutoderefKind::Overloaded, deref_by_trait(table, ty)?))
@ -121,7 +121,7 @@ pub(crate) fn autoderef_step(
} }
pub(crate) fn builtin_deref<'ty>( pub(crate) fn builtin_deref<'ty>(
table: &mut InferenceTable<'_>, db: &dyn HirDatabase,
ty: &'ty Ty, ty: &'ty Ty,
explicit: bool, explicit: bool,
) -> Option<&'ty Ty> { ) -> Option<&'ty Ty> {
@ -129,7 +129,7 @@ pub(crate) fn builtin_deref<'ty>(
TyKind::Ref(.., ty) => Some(ty), TyKind::Ref(.., ty) => Some(ty),
TyKind::Raw(.., ty) if explicit => Some(ty), TyKind::Raw(.., ty) if explicit => Some(ty),
&TyKind::Adt(chalk_ir::AdtId(adt), ref substs) => { &TyKind::Adt(chalk_ir::AdtId(adt), ref substs) => {
if crate::lang_items::is_box(table.db, adt) { if crate::lang_items::is_box(db, adt) {
substs.at(Interner, 0).ty(Interner) substs.at(Interner, 0).ty(Interner)
} else { } else {
None None

View file

@ -22,7 +22,7 @@ mod pat;
mod path; mod path;
pub(crate) mod unify; pub(crate) mod unify;
use std::{convert::identity, ops::Index}; use std::{convert::identity, iter, ops::Index};
use chalk_ir::{ use chalk_ir::{
cast::Cast, fold::TypeFoldable, interner::HasInterner, DebruijnIndex, Mutability, Safety, cast::Cast, fold::TypeFoldable, interner::HasInterner, DebruijnIndex, Mutability, Safety,
@ -777,7 +777,15 @@ impl<'a> InferenceContext<'a> {
param_tys.push(va_list_ty) param_tys.push(va_list_ty)
} }
for (ty, pat) in param_tys.into_iter().zip(self.body.params.iter()) { let mut param_tys = param_tys.into_iter().chain(iter::repeat(self.table.new_type_var()));
if let Some(self_param) = self.body.self_param {
if let Some(ty) = param_tys.next() {
let ty = self.insert_type_vars(ty);
let ty = self.normalize_associated_types_in(ty);
self.write_binding_ty(self_param, ty);
}
}
for (ty, pat) in param_tys.zip(&*self.body.params) {
let ty = self.insert_type_vars(ty); let ty = self.insert_type_vars(ty);
let ty = self.normalize_associated_types_in(ty); let ty = self.normalize_associated_types_in(ty);

View file

@ -647,7 +647,7 @@ impl InferenceTable<'_> {
let goal: InEnvironment<DomainGoal> = let goal: InEnvironment<DomainGoal> =
InEnvironment::new(&self.trait_env.env, coerce_unsized_tref.cast(Interner)); InEnvironment::new(&self.trait_env.env, coerce_unsized_tref.cast(Interner));
let canonicalized = self.canonicalize(goal); let canonicalized = self.canonicalize_with_free_vars(goal);
// FIXME: rustc's coerce_unsized is more specialized -- it only tries to // FIXME: rustc's coerce_unsized is more specialized -- it only tries to
// solve `CoerceUnsized` and `Unsize` goals at this point and leaves the // solve `CoerceUnsized` and `Unsize` goals at this point and leaves the

View file

@ -312,15 +312,13 @@ impl InferenceContext<'_> {
Expr::Call { callee, args, .. } => { Expr::Call { callee, args, .. } => {
let callee_ty = self.infer_expr(*callee, &Expectation::none()); let callee_ty = self.infer_expr(*callee, &Expectation::none());
let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone(), false); let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone(), false);
let (res, derefed_callee) = 'b: { let (res, derefed_callee) = loop {
// manual loop to be able to access `derefs.table` let Some((callee_deref_ty, _)) = derefs.next() else {
while let Some((callee_deref_ty, _)) = derefs.next() { break (None, callee_ty.clone());
let res = derefs.table.callable_sig(&callee_deref_ty, args.len()); };
if res.is_some() { if let Some(res) = derefs.table.callable_sig(&callee_deref_ty, args.len()) {
break 'b (res, callee_deref_ty); break (Some(res), callee_deref_ty);
}
} }
(None, callee_ty.clone())
}; };
// if the function is unresolved, we use is_varargs=true to // if the function is unresolved, we use is_varargs=true to
// suppress the arg count diagnostic here // suppress the arg count diagnostic here
@ -657,7 +655,7 @@ impl InferenceContext<'_> {
); );
} }
} }
if let Some(derefed) = builtin_deref(&mut self.table, &inner_ty, true) { if let Some(derefed) = builtin_deref(self.table.db, &inner_ty, true) {
self.resolve_ty_shallow(derefed) self.resolve_ty_shallow(derefed)
} else { } else {
deref_by_trait(&mut self.table, inner_ty) deref_by_trait(&mut self.table, inner_ty)
@ -774,7 +772,7 @@ impl InferenceContext<'_> {
let receiver_adjustments = method_resolution::resolve_indexing_op( let receiver_adjustments = method_resolution::resolve_indexing_op(
self.db, self.db,
self.table.trait_env.clone(), self.table.trait_env.clone(),
canonicalized.value, canonicalized,
index_trait, index_trait,
); );
let (self_ty, mut adj) = receiver_adjustments let (self_ty, mut adj) = receiver_adjustments
@ -1559,7 +1557,7 @@ impl InferenceContext<'_> {
let canonicalized_receiver = self.canonicalize(receiver_ty.clone()); let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
let resolved = method_resolution::lookup_method( let resolved = method_resolution::lookup_method(
self.db, self.db,
&canonicalized_receiver.value, &canonicalized_receiver,
self.table.trait_env.clone(), self.table.trait_env.clone(),
self.get_traits_in_scope().as_ref().left_or_else(|&it| it), self.get_traits_in_scope().as_ref().left_or_else(|&it| it),
VisibleFromModule::Filter(self.resolver.module()), VisibleFromModule::Filter(self.resolver.module()),
@ -1608,7 +1606,7 @@ impl InferenceContext<'_> {
let resolved = method_resolution::lookup_method( let resolved = method_resolution::lookup_method(
self.db, self.db,
&canonicalized_receiver.value, &canonicalized_receiver,
self.table.trait_env.clone(), self.table.trait_env.clone(),
self.get_traits_in_scope().as_ref().left_or_else(|&it| it), self.get_traits_in_scope().as_ref().left_or_else(|&it| it),
VisibleFromModule::Filter(self.resolver.module()), VisibleFromModule::Filter(self.resolver.module()),
@ -1641,7 +1639,7 @@ impl InferenceContext<'_> {
}; };
let assoc_func_with_same_name = method_resolution::iterate_method_candidates( let assoc_func_with_same_name = method_resolution::iterate_method_candidates(
&canonicalized_receiver.value, &canonicalized_receiver,
self.db, self.db,
self.table.trait_env.clone(), self.table.trait_env.clone(),
self.get_traits_in_scope().as_ref().left_or_else(|&it| it), self.get_traits_in_scope().as_ref().left_or_else(|&it| it),

View file

@ -321,7 +321,7 @@ impl InferenceContext<'_> {
let mut not_visible = None; let mut not_visible = None;
let res = method_resolution::iterate_method_candidates( let res = method_resolution::iterate_method_candidates(
&canonical_ty.value, &canonical_ty,
self.db, self.db,
self.table.trait_env.clone(), self.table.trait_env.clone(),
self.get_traits_in_scope().as_ref().left_or_else(|&it| it), self.get_traits_in_scope().as_ref().left_or_else(|&it| it),

View file

@ -23,12 +23,9 @@ use crate::{
}; };
impl InferenceContext<'_> { impl InferenceContext<'_> {
pub(super) fn canonicalize<T: TypeFoldable<Interner> + HasInterner<Interner = Interner>>( pub(super) fn canonicalize<T>(&mut self, t: T) -> Canonical<T>
&mut self,
t: T,
) -> Canonicalized<T>
where where
T: HasInterner<Interner = Interner>, T: TypeFoldable<Interner> + HasInterner<Interner = Interner>,
{ {
self.table.canonicalize(t) self.table.canonicalize(t)
} }
@ -128,14 +125,14 @@ impl<T: HasInterner<Interner = Interner>> Canonicalized<T> {
}), }),
); );
for (i, v) in solution.value.iter(Interner).enumerate() { for (i, v) in solution.value.iter(Interner).enumerate() {
let var = self.free_vars[i].clone(); let var = &self.free_vars[i];
if let Some(ty) = v.ty(Interner) { if let Some(ty) = v.ty(Interner) {
// eagerly replace projections in the type; we may be getting types // eagerly replace projections in the type; we may be getting types
// e.g. from where clauses where this hasn't happened yet // e.g. from where clauses where this hasn't happened yet
let ty = ctx.normalize_associated_types_in(new_vars.apply(ty.clone(), Interner)); let ty = ctx.normalize_associated_types_in(new_vars.apply(ty.clone(), Interner));
ctx.unify(var.assert_ty_ref(Interner), &ty); ctx.unify(var.assert_ty_ref(Interner), &ty);
} else { } else {
let _ = ctx.try_unify(&var, &new_vars.apply(v.clone(), Interner)); let _ = ctx.try_unify(var, &new_vars.apply(v.clone(), Interner));
} }
} }
} }
@ -243,7 +240,7 @@ pub(crate) struct InferenceTable<'a> {
pub(crate) db: &'a dyn HirDatabase, pub(crate) db: &'a dyn HirDatabase,
pub(crate) trait_env: Arc<TraitEnvironment>, pub(crate) trait_env: Arc<TraitEnvironment>,
var_unification_table: ChalkInferenceTable, var_unification_table: ChalkInferenceTable,
type_variable_table: Vec<TypeVariableFlags>, type_variable_table: SmallVec<[TypeVariableFlags; 16]>,
pending_obligations: Vec<Canonicalized<InEnvironment<Goal>>>, pending_obligations: Vec<Canonicalized<InEnvironment<Goal>>>,
/// Double buffer used in [`Self::resolve_obligations_as_possible`] to cut down on /// Double buffer used in [`Self::resolve_obligations_as_possible`] to cut down on
/// temporary allocations. /// temporary allocations.
@ -252,8 +249,8 @@ pub(crate) struct InferenceTable<'a> {
pub(crate) struct InferenceTableSnapshot { pub(crate) struct InferenceTableSnapshot {
var_table_snapshot: chalk_solve::infer::InferenceSnapshot<Interner>, var_table_snapshot: chalk_solve::infer::InferenceSnapshot<Interner>,
type_variable_table: SmallVec<[TypeVariableFlags; 16]>,
pending_obligations: Vec<Canonicalized<InEnvironment<Goal>>>, pending_obligations: Vec<Canonicalized<InEnvironment<Goal>>>,
type_variable_table_snapshot: Vec<TypeVariableFlags>,
} }
impl<'a> InferenceTable<'a> { impl<'a> InferenceTable<'a> {
@ -262,7 +259,7 @@ impl<'a> InferenceTable<'a> {
db, db,
trait_env, trait_env,
var_unification_table: ChalkInferenceTable::new(), var_unification_table: ChalkInferenceTable::new(),
type_variable_table: Vec::new(), type_variable_table: SmallVec::new(),
pending_obligations: Vec::new(), pending_obligations: Vec::new(),
resolve_obligations_buffer: Vec::new(), resolve_obligations_buffer: Vec::new(),
} }
@ -292,14 +289,14 @@ impl<'a> InferenceTable<'a> {
} }
fn fallback_value(&self, iv: InferenceVar, kind: TyVariableKind) -> Ty { fn fallback_value(&self, iv: InferenceVar, kind: TyVariableKind) -> Ty {
let is_diverging = self
.type_variable_table
.get(iv.index() as usize)
.map_or(false, |data| data.contains(TypeVariableFlags::DIVERGING));
if is_diverging {
return TyKind::Never.intern(Interner);
}
match kind { match kind {
_ if self
.type_variable_table
.get(iv.index() as usize)
.map_or(false, |data| data.contains(TypeVariableFlags::DIVERGING)) =>
{
TyKind::Never
}
TyVariableKind::General => TyKind::Error, TyVariableKind::General => TyKind::Error,
TyVariableKind::Integer => TyKind::Scalar(Scalar::Int(IntTy::I32)), TyVariableKind::Integer => TyKind::Scalar(Scalar::Int(IntTy::I32)),
TyVariableKind::Float => TyKind::Scalar(Scalar::Float(FloatTy::F64)), TyVariableKind::Float => TyKind::Scalar(Scalar::Float(FloatTy::F64)),
@ -307,12 +304,9 @@ impl<'a> InferenceTable<'a> {
.intern(Interner) .intern(Interner)
} }
pub(crate) fn canonicalize<T: TypeFoldable<Interner> + HasInterner<Interner = Interner>>( pub(crate) fn canonicalize_with_free_vars<T>(&mut self, t: T) -> Canonicalized<T>
&mut self,
t: T,
) -> Canonicalized<T>
where where
T: HasInterner<Interner = Interner>, T: TypeFoldable<Interner> + HasInterner<Interner = Interner>,
{ {
// try to resolve obligations before canonicalizing, since this might // try to resolve obligations before canonicalizing, since this might
// result in new knowledge about variables // result in new knowledge about variables
@ -326,6 +320,16 @@ impl<'a> InferenceTable<'a> {
Canonicalized { value: result.quantified, free_vars } Canonicalized { value: result.quantified, free_vars }
} }
pub(crate) fn canonicalize<T>(&mut self, t: T) -> Canonical<T>
where
T: TypeFoldable<Interner> + HasInterner<Interner = Interner>,
{
// try to resolve obligations before canonicalizing, since this might
// result in new knowledge about variables
self.resolve_obligations_as_possible();
self.var_unification_table.canonicalize(Interner, t).quantified
}
/// Recurses through the given type, normalizing associated types mentioned /// Recurses through the given type, normalizing associated types mentioned
/// in it by replacing them by type variables and registering obligations to /// in it by replacing them by type variables and registering obligations to
/// resolve later. This should be done once for every type we get from some /// resolve later. This should be done once for every type we get from some
@ -541,7 +545,7 @@ impl<'a> InferenceTable<'a> {
Err(_) => return false, Err(_) => return false,
}; };
result.goals.iter().all(|goal| { result.goals.iter().all(|goal| {
let canonicalized = self.canonicalize(goal.clone()); let canonicalized = self.canonicalize_with_free_vars(goal.clone());
self.try_resolve_obligation(&canonicalized).is_some() self.try_resolve_obligation(&canonicalized).is_some()
}) })
} }
@ -575,19 +579,15 @@ impl<'a> InferenceTable<'a> {
pub(crate) fn snapshot(&mut self) -> InferenceTableSnapshot { pub(crate) fn snapshot(&mut self) -> InferenceTableSnapshot {
let var_table_snapshot = self.var_unification_table.snapshot(); let var_table_snapshot = self.var_unification_table.snapshot();
let type_variable_table_snapshot = self.type_variable_table.clone(); let type_variable_table = self.type_variable_table.clone();
let pending_obligations = self.pending_obligations.clone(); let pending_obligations = self.pending_obligations.clone();
InferenceTableSnapshot { InferenceTableSnapshot { var_table_snapshot, pending_obligations, type_variable_table }
var_table_snapshot,
pending_obligations,
type_variable_table_snapshot,
}
} }
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
pub(crate) fn rollback_to(&mut self, snapshot: InferenceTableSnapshot) { pub(crate) fn rollback_to(&mut self, snapshot: InferenceTableSnapshot) {
self.var_unification_table.rollback_to(snapshot.var_table_snapshot); self.var_unification_table.rollback_to(snapshot.var_table_snapshot);
self.type_variable_table = snapshot.type_variable_table_snapshot; self.type_variable_table = snapshot.type_variable_table;
self.pending_obligations = snapshot.pending_obligations; self.pending_obligations = snapshot.pending_obligations;
} }
@ -606,7 +606,7 @@ impl<'a> InferenceTable<'a> {
let in_env = InEnvironment::new(&self.trait_env.env, goal); let in_env = InEnvironment::new(&self.trait_env.env, goal);
let canonicalized = self.canonicalize(in_env); let canonicalized = self.canonicalize(in_env);
self.db.trait_solve(self.trait_env.krate, self.trait_env.block, canonicalized.value) self.db.trait_solve(self.trait_env.krate, self.trait_env.block, canonicalized)
} }
pub(crate) fn register_obligation(&mut self, goal: Goal) { pub(crate) fn register_obligation(&mut self, goal: Goal) {
@ -615,7 +615,7 @@ impl<'a> InferenceTable<'a> {
} }
fn register_obligation_in_env(&mut self, goal: InEnvironment<Goal>) { fn register_obligation_in_env(&mut self, goal: InEnvironment<Goal>) {
let canonicalized = self.canonicalize(goal); let canonicalized = self.canonicalize_with_free_vars(goal);
let solution = self.try_resolve_obligation(&canonicalized); let solution = self.try_resolve_obligation(&canonicalized);
if matches!(solution, Some(Solution::Ambig(_))) { if matches!(solution, Some(Solution::Ambig(_))) {
self.pending_obligations.push(canonicalized); self.pending_obligations.push(canonicalized);
@ -798,7 +798,7 @@ impl<'a> InferenceTable<'a> {
let trait_data = self.db.trait_data(fn_once_trait); let trait_data = self.db.trait_data(fn_once_trait);
let output_assoc_type = trait_data.associated_type_by_name(&name![Output])?; let output_assoc_type = trait_data.associated_type_by_name(&name![Output])?;
let mut arg_tys = vec![]; let mut arg_tys = Vec::with_capacity(num_args);
let arg_ty = TyBuilder::tuple(num_args) let arg_ty = TyBuilder::tuple(num_args)
.fill(|it| { .fill(|it| {
let arg = match it { let arg = match it {
@ -828,11 +828,7 @@ impl<'a> InferenceTable<'a> {
environment: trait_env.clone(), environment: trait_env.clone(),
}; };
let canonical = self.canonicalize(obligation.clone()); let canonical = self.canonicalize(obligation.clone());
if self if self.db.trait_solve(krate, self.trait_env.block, canonical.cast(Interner)).is_some() {
.db
.trait_solve(krate, self.trait_env.block, canonical.value.cast(Interner))
.is_some()
{
self.register_obligation(obligation.goal); self.register_obligation(obligation.goal);
let return_ty = self.normalize_projection_ty(projection); let return_ty = self.normalize_projection_ty(projection);
for fn_x in [FnTrait::Fn, FnTrait::FnMut, FnTrait::FnOnce] { for fn_x in [FnTrait::Fn, FnTrait::FnMut, FnTrait::FnOnce] {
@ -845,7 +841,7 @@ impl<'a> InferenceTable<'a> {
let canonical = self.canonicalize(obligation.clone()); let canonical = self.canonicalize(obligation.clone());
if self if self
.db .db
.trait_solve(krate, self.trait_env.block, canonical.value.cast(Interner)) .trait_solve(krate, self.trait_env.block, canonical.cast(Interner))
.is_some() .is_some()
{ {
return Some((fn_x, arg_tys, return_ty)); return Some((fn_x, arg_tys, return_ty));

View file

@ -371,8 +371,8 @@ pub fn layout_of_ty_query(
TyKind::Never => cx.layout_of_never_type(), TyKind::Never => cx.layout_of_never_type(),
TyKind::Dyn(_) | TyKind::Foreign(_) => { TyKind::Dyn(_) | TyKind::Foreign(_) => {
let mut unit = layout_of_unit(&cx, dl)?; let mut unit = layout_of_unit(&cx, dl)?;
match unit.abi { match &mut unit.abi {
Abi::Aggregate { ref mut sized } => *sized = false, Abi::Aggregate { sized } => *sized = false,
_ => return Err(LayoutError::Unknown), _ => return Err(LayoutError::Unknown),
} }
unit unit

View file

@ -213,7 +213,7 @@ impl TraitImpls {
// To better support custom derives, collect impls in all unnamed const items. // To better support custom derives, collect impls in all unnamed const items.
// const _: () = { ... }; // const _: () = { ... };
for konst in module_data.scope.unnamed_consts(db.upcast()) { for konst in module_data.scope.unnamed_consts() {
let body = db.body(konst.into()); let body = db.body(konst.into());
for (_, block_def_map) in body.blocks(db.upcast()) { for (_, block_def_map) in body.blocks(db.upcast()) {
Self::collect_def_map(db, map, &block_def_map); Self::collect_def_map(db, map, &block_def_map);
@ -337,7 +337,7 @@ impl InherentImpls {
// To better support custom derives, collect impls in all unnamed const items. // To better support custom derives, collect impls in all unnamed const items.
// const _: () = { ... }; // const _: () = { ... };
for konst in module_data.scope.unnamed_consts(db.upcast()) { for konst in module_data.scope.unnamed_consts() {
let body = db.body(konst.into()); let body = db.body(konst.into());
for (_, block_def_map) in body.blocks(db.upcast()) { for (_, block_def_map) in body.blocks(db.upcast()) {
self.collect_def_map(db, &block_def_map); self.collect_def_map(db, &block_def_map);
@ -972,10 +972,9 @@ pub fn iterate_method_candidates_dyn(
deref_chain.into_iter().try_for_each(|(receiver_ty, adj)| { deref_chain.into_iter().try_for_each(|(receiver_ty, adj)| {
iterate_method_candidates_with_autoref( iterate_method_candidates_with_autoref(
&receiver_ty, &mut table,
receiver_ty,
adj, adj,
db,
env.clone(),
traits_in_scope, traits_in_scope,
visible_from_module, visible_from_module,
name, name,
@ -1000,10 +999,9 @@ pub fn iterate_method_candidates_dyn(
#[tracing::instrument(skip_all, fields(name = ?name))] #[tracing::instrument(skip_all, fields(name = ?name))]
fn iterate_method_candidates_with_autoref( fn iterate_method_candidates_with_autoref(
receiver_ty: &Canonical<Ty>, table: &mut InferenceTable<'_>,
receiver_ty: Canonical<Ty>,
first_adjustment: ReceiverAdjustments, first_adjustment: ReceiverAdjustments,
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
traits_in_scope: &FxHashSet<TraitId>, traits_in_scope: &FxHashSet<TraitId>,
visible_from_module: VisibleFromModule, visible_from_module: VisibleFromModule,
name: Option<&Name>, name: Option<&Name>,
@ -1016,10 +1014,9 @@ fn iterate_method_candidates_with_autoref(
let mut iterate_method_candidates_by_receiver = move |receiver_ty, first_adjustment| { let mut iterate_method_candidates_by_receiver = move |receiver_ty, first_adjustment| {
iterate_method_candidates_by_receiver( iterate_method_candidates_by_receiver(
table,
receiver_ty, receiver_ty,
first_adjustment, first_adjustment,
db,
env.clone(),
traits_in_scope, traits_in_scope,
visible_from_module, visible_from_module,
name, name,
@ -1034,7 +1031,7 @@ fn iterate_method_candidates_with_autoref(
maybe_reborrowed.autoderefs += 1; maybe_reborrowed.autoderefs += 1;
} }
iterate_method_candidates_by_receiver(receiver_ty, maybe_reborrowed)?; iterate_method_candidates_by_receiver(receiver_ty.clone(), maybe_reborrowed)?;
let refed = Canonical { let refed = Canonical {
value: TyKind::Ref(Mutability::Not, static_lifetime(), receiver_ty.value.clone()) value: TyKind::Ref(Mutability::Not, static_lifetime(), receiver_ty.value.clone())
@ -1042,7 +1039,7 @@ fn iterate_method_candidates_with_autoref(
binders: receiver_ty.binders.clone(), binders: receiver_ty.binders.clone(),
}; };
iterate_method_candidates_by_receiver(&refed, first_adjustment.with_autoref(Mutability::Not))?; iterate_method_candidates_by_receiver(refed, first_adjustment.with_autoref(Mutability::Not))?;
let ref_muted = Canonical { let ref_muted = Canonical {
value: TyKind::Ref(Mutability::Mut, static_lifetime(), receiver_ty.value.clone()) value: TyKind::Ref(Mutability::Mut, static_lifetime(), receiver_ty.value.clone())
@ -1050,58 +1047,53 @@ fn iterate_method_candidates_with_autoref(
binders: receiver_ty.binders.clone(), binders: receiver_ty.binders.clone(),
}; };
iterate_method_candidates_by_receiver( iterate_method_candidates_by_receiver(ref_muted, first_adjustment.with_autoref(Mutability::Mut))
&ref_muted,
first_adjustment.with_autoref(Mutability::Mut),
)
} }
#[tracing::instrument(skip_all, fields(name = ?name))] #[tracing::instrument(skip_all, fields(name = ?name))]
fn iterate_method_candidates_by_receiver( fn iterate_method_candidates_by_receiver(
receiver_ty: &Canonical<Ty>, table: &mut InferenceTable<'_>,
receiver_ty: Canonical<Ty>,
receiver_adjustments: ReceiverAdjustments, receiver_adjustments: ReceiverAdjustments,
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
traits_in_scope: &FxHashSet<TraitId>, traits_in_scope: &FxHashSet<TraitId>,
visible_from_module: VisibleFromModule, visible_from_module: VisibleFromModule,
name: Option<&Name>, name: Option<&Name>,
mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>,
) -> ControlFlow<()> { ) -> ControlFlow<()> {
let mut table = InferenceTable::new(db, env);
let receiver_ty = table.instantiate_canonical(receiver_ty.clone()); let receiver_ty = table.instantiate_canonical(receiver_ty.clone());
let snapshot = table.snapshot();
// We're looking for methods with *receiver* type receiver_ty. These could // We're looking for methods with *receiver* type receiver_ty. These could
// be found in any of the derefs of receiver_ty, so we have to go through // be found in any of the derefs of receiver_ty, so we have to go through
// that, including raw derefs. // that, including raw derefs.
let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone(), true); table.run_in_snapshot(|table| {
while let Some((self_ty, _)) = autoderef.next() { let mut autoderef = autoderef::Autoderef::new(table, receiver_ty.clone(), true);
iterate_inherent_methods( while let Some((self_ty, _)) = autoderef.next() {
&self_ty, iterate_inherent_methods(
autoderef.table, &self_ty,
name, autoderef.table,
Some(&receiver_ty), name,
Some(receiver_adjustments.clone()), Some(&receiver_ty),
visible_from_module, Some(receiver_adjustments.clone()),
&mut callback, visible_from_module,
)? &mut callback,
} )?
}
table.rollback_to(snapshot); ControlFlow::Continue(())
})?;
let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone(), true); table.run_in_snapshot(|table| {
while let Some((self_ty, _)) = autoderef.next() { let mut autoderef = autoderef::Autoderef::new(table, receiver_ty.clone(), true);
iterate_trait_method_candidates( while let Some((self_ty, _)) = autoderef.next() {
&self_ty, iterate_trait_method_candidates(
autoderef.table, &self_ty,
traits_in_scope, autoderef.table,
name, traits_in_scope,
Some(&receiver_ty), name,
Some(receiver_adjustments.clone()), Some(&receiver_ty),
&mut callback, Some(receiver_adjustments.clone()),
)? &mut callback,
} )?
}
ControlFlow::Continue(()) ControlFlow::Continue(())
})
} }
#[tracing::instrument(skip_all, fields(name = ?name))] #[tracing::instrument(skip_all, fields(name = ?name))]
@ -1147,9 +1139,9 @@ fn iterate_trait_method_candidates(
callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>,
) -> ControlFlow<()> { ) -> ControlFlow<()> {
let db = table.db; let db = table.db;
let env = table.trait_env.clone();
let canonical_self_ty = table.canonicalize(self_ty.clone()).value; let canonical_self_ty = table.canonicalize(self_ty.clone());
let TraitEnvironment { krate, block, .. } = *table.trait_env;
'traits: for &t in traits_in_scope { 'traits: for &t in traits_in_scope {
let data = db.trait_data(t); let data = db.trait_data(t);
@ -1164,7 +1156,7 @@ fn iterate_trait_method_candidates(
{ {
// FIXME: this should really be using the edition of the method name's span, in case it // FIXME: this should really be using the edition of the method name's span, in case it
// comes from a macro // comes from a macro
if db.crate_graph()[env.krate].edition < Edition::Edition2021 { if db.crate_graph()[krate].edition < Edition::Edition2021 {
continue; continue;
} }
} }
@ -1183,8 +1175,8 @@ fn iterate_trait_method_candidates(
IsValidCandidate::No => continue, IsValidCandidate::No => continue,
}; };
if !known_implemented { if !known_implemented {
let goal = generic_implements_goal(db, env.clone(), t, &canonical_self_ty); let goal = generic_implements_goal(db, &table.trait_env, t, &canonical_self_ty);
if db.trait_solve(env.krate, env.block, goal.cast(Interner)).is_none() { if db.trait_solve(krate, block, goal.cast(Interner)).is_none() {
continue 'traits; continue 'traits;
} }
} }
@ -1365,7 +1357,7 @@ pub(crate) fn resolve_indexing_op(
let ty = table.instantiate_canonical(ty); let ty = table.instantiate_canonical(ty);
let deref_chain = autoderef_method_receiver(&mut table, ty); let deref_chain = autoderef_method_receiver(&mut table, ty);
for (ty, adj) in deref_chain { for (ty, adj) in deref_chain {
let goal = generic_implements_goal(db, table.trait_env.clone(), index_trait, &ty); let goal = generic_implements_goal(db, &table.trait_env, index_trait, &ty);
if db if db
.trait_solve(table.trait_env.krate, table.trait_env.block, goal.cast(Interner)) .trait_solve(table.trait_env.krate, table.trait_env.block, goal.cast(Interner))
.is_some() .is_some()
@ -1548,7 +1540,7 @@ fn is_valid_impl_fn_candidate(
for goal in goals.clone() { for goal in goals.clone() {
let in_env = InEnvironment::new(&table.trait_env.env, goal); let in_env = InEnvironment::new(&table.trait_env.env, goal);
let canonicalized = table.canonicalize(in_env); let canonicalized = table.canonicalize_with_free_vars(in_env);
let solution = table.db.trait_solve( let solution = table.db.trait_solve(
table.trait_env.krate, table.trait_env.krate,
table.trait_env.block, table.trait_env.block,
@ -1586,10 +1578,10 @@ fn is_valid_impl_fn_candidate(
pub fn implements_trait( pub fn implements_trait(
ty: &Canonical<Ty>, ty: &Canonical<Ty>,
db: &dyn HirDatabase, db: &dyn HirDatabase,
env: Arc<TraitEnvironment>, env: &TraitEnvironment,
trait_: TraitId, trait_: TraitId,
) -> bool { ) -> bool {
let goal = generic_implements_goal(db, env.clone(), trait_, ty); let goal = generic_implements_goal(db, env, trait_, ty);
let solution = db.trait_solve(env.krate, env.block, goal.cast(Interner)); let solution = db.trait_solve(env.krate, env.block, goal.cast(Interner));
solution.is_some() solution.is_some()
@ -1598,10 +1590,10 @@ pub fn implements_trait(
pub fn implements_trait_unique( pub fn implements_trait_unique(
ty: &Canonical<Ty>, ty: &Canonical<Ty>,
db: &dyn HirDatabase, db: &dyn HirDatabase,
env: Arc<TraitEnvironment>, env: &TraitEnvironment,
trait_: TraitId, trait_: TraitId,
) -> bool { ) -> bool {
let goal = generic_implements_goal(db, env.clone(), trait_, ty); let goal = generic_implements_goal(db, env, trait_, ty);
let solution = db.trait_solve(env.krate, env.block, goal.cast(Interner)); let solution = db.trait_solve(env.krate, env.block, goal.cast(Interner));
matches!(solution, Some(crate::Solution::Unique(_))) matches!(solution, Some(crate::Solution::Unique(_)))
@ -1612,32 +1604,34 @@ pub fn implements_trait_unique(
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
fn generic_implements_goal( fn generic_implements_goal(
db: &dyn HirDatabase, db: &dyn HirDatabase,
env: Arc<TraitEnvironment>, env: &TraitEnvironment,
trait_: TraitId, trait_: TraitId,
self_ty: &Canonical<Ty>, self_ty: &Canonical<Ty>,
) -> Canonical<InEnvironment<super::DomainGoal>> { ) -> Canonical<InEnvironment<super::DomainGoal>> {
let mut kinds = self_ty.binders.interned().to_vec(); let binders = self_ty.binders.interned();
let trait_ref = TyBuilder::trait_ref(db, trait_) let trait_ref = TyBuilder::trait_ref(db, trait_)
.push(self_ty.value.clone()) .push(self_ty.value.clone())
.fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len()) .fill_with_bound_vars(DebruijnIndex::INNERMOST, binders.len())
.build(); .build();
kinds.extend(trait_ref.substitution.iter(Interner).skip(1).map(|it| {
let vk = match it.data(Interner) { let kinds =
chalk_ir::GenericArgData::Ty(_) => { binders.iter().cloned().chain(trait_ref.substitution.iter(Interner).skip(1).map(|it| {
chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General) let vk = match it.data(Interner) {
} chalk_ir::GenericArgData::Ty(_) => {
chalk_ir::GenericArgData::Lifetime(_) => chalk_ir::VariableKind::Lifetime, chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)
chalk_ir::GenericArgData::Const(c) => { }
chalk_ir::VariableKind::Const(c.data(Interner).ty.clone()) chalk_ir::GenericArgData::Lifetime(_) => chalk_ir::VariableKind::Lifetime,
} chalk_ir::GenericArgData::Const(c) => {
}; chalk_ir::VariableKind::Const(c.data(Interner).ty.clone())
chalk_ir::WithKind::new(vk, UniverseIndex::ROOT) }
})); };
chalk_ir::WithKind::new(vk, UniverseIndex::ROOT)
}));
let binders = CanonicalVarKinds::from_iter(Interner, kinds);
let obligation = trait_ref.cast(Interner); let obligation = trait_ref.cast(Interner);
Canonical { let value = InEnvironment::new(&env.env, obligation);
binders: CanonicalVarKinds::from_iter(Interner, kinds), Canonical { binders, value }
value: InEnvironment::new(&env.env, obligation),
}
} }
fn autoderef_method_receiver( fn autoderef_method_receiver(
@ -1648,7 +1642,7 @@ fn autoderef_method_receiver(
let mut autoderef = autoderef::Autoderef::new(table, ty, false); let mut autoderef = autoderef::Autoderef::new(table, ty, false);
while let Some((ty, derefs)) = autoderef.next() { while let Some((ty, derefs)) = autoderef.next() {
deref_chain.push(( deref_chain.push((
autoderef.table.canonicalize(ty).value, autoderef.table.canonicalize(ty),
ReceiverAdjustments { autoref: None, autoderefs: derefs, unsize_array: false }, ReceiverAdjustments { autoref: None, autoderefs: derefs, unsize_array: false },
)); ));
} }

View file

@ -1165,6 +1165,7 @@ impl MirBody {
pub enum MirSpan { pub enum MirSpan {
ExprId(ExprId), ExprId(ExprId),
PatId(PatId), PatId(PatId),
SelfParam,
Unknown, Unknown,
} }

View file

@ -376,6 +376,10 @@ impl MirEvalError {
Ok(s) => s.map(|it| it.syntax_node_ptr()), Ok(s) => s.map(|it| it.syntax_node_ptr()),
Err(_) => continue, Err(_) => continue,
}, },
MirSpan::SelfParam => match source_map.self_param_syntax() {
Some(s) => s.map(|it| it.syntax_node_ptr()),
None => continue,
},
MirSpan::Unknown => continue, MirSpan::Unknown => continue,
}; };
let file_id = span.file_id.original_file(db.upcast()); let file_id = span.file_id.original_file(db.upcast());

View file

@ -1810,9 +1810,20 @@ impl<'ctx> MirLowerCtx<'ctx> {
fn lower_params_and_bindings( fn lower_params_and_bindings(
&mut self, &mut self,
params: impl Iterator<Item = (PatId, Ty)> + Clone, params: impl Iterator<Item = (PatId, Ty)> + Clone,
self_binding: Option<(BindingId, Ty)>,
pick_binding: impl Fn(BindingId) -> bool, pick_binding: impl Fn(BindingId) -> bool,
) -> Result<BasicBlockId> { ) -> Result<BasicBlockId> {
let base_param_count = self.result.param_locals.len(); let base_param_count = self.result.param_locals.len();
let self_binding = match self_binding {
Some((self_binding, ty)) => {
let local_id = self.result.locals.alloc(Local { ty });
self.drop_scopes.last_mut().unwrap().locals.push(local_id);
self.result.binding_locals.insert(self_binding, local_id);
self.result.param_locals.push(local_id);
Some(self_binding)
}
None => None,
};
self.result.param_locals.extend(params.clone().map(|(it, ty)| { self.result.param_locals.extend(params.clone().map(|(it, ty)| {
let local_id = self.result.locals.alloc(Local { ty }); let local_id = self.result.locals.alloc(Local { ty });
self.drop_scopes.last_mut().unwrap().locals.push(local_id); self.drop_scopes.last_mut().unwrap().locals.push(local_id);
@ -1838,9 +1849,23 @@ impl<'ctx> MirLowerCtx<'ctx> {
} }
} }
let mut current = self.result.start_block; let mut current = self.result.start_block;
for ((param, _), local) in if let Some(self_binding) = self_binding {
params.zip(self.result.param_locals.clone().into_iter().skip(base_param_count)) let local = self.result.param_locals.clone()[base_param_count];
{ if local != self.binding_local(self_binding)? {
let r = self.match_self_param(self_binding, current, local)?;
if let Some(b) = r.1 {
self.set_terminator(b, TerminatorKind::Unreachable, MirSpan::SelfParam);
}
current = r.0;
}
}
let local_params = self
.result
.param_locals
.clone()
.into_iter()
.skip(base_param_count + self_binding.is_some() as usize);
for ((param, _), local) in params.zip(local_params) {
if let Pat::Bind { id, .. } = self.body[param] { if let Pat::Bind { id, .. } = self.body[param] {
if local == self.binding_local(id)? { if local == self.binding_local(id)? {
continue; continue;
@ -2019,6 +2044,7 @@ pub fn mir_body_for_closure_query(
}; };
let current = ctx.lower_params_and_bindings( let current = ctx.lower_params_and_bindings(
args.iter().zip(sig.params().iter()).map(|(it, y)| (*it, y.clone())), args.iter().zip(sig.params().iter()).map(|(it, y)| (*it, y.clone())),
None,
|_| true, |_| true,
)?; )?;
if let Some(current) = ctx.lower_expr_to_place(*root, return_slot().into(), current)? { if let Some(current) = ctx.lower_expr_to_place(*root, return_slot().into(), current)? {
@ -2149,16 +2175,16 @@ pub fn lower_to_mir(
let substs = TyBuilder::placeholder_subst(db, fid); let substs = TyBuilder::placeholder_subst(db, fid);
let callable_sig = let callable_sig =
db.callable_item_signature(fid.into()).substitute(Interner, &substs); db.callable_item_signature(fid.into()).substitute(Interner, &substs);
let mut params = callable_sig.params().iter();
let self_param = body.self_param.and_then(|id| Some((id, params.next()?.clone())));
break 'b ctx.lower_params_and_bindings( break 'b ctx.lower_params_and_bindings(
body.params body.params.iter().zip(params).map(|(it, y)| (*it, y.clone())),
.iter() self_param,
.zip(callable_sig.params().iter())
.map(|(it, y)| (*it, y.clone())),
binding_picker, binding_picker,
)?; )?;
} }
} }
ctx.lower_params_and_bindings([].into_iter(), binding_picker)? ctx.lower_params_and_bindings([].into_iter(), None, binding_picker)?
}; };
if let Some(current) = ctx.lower_expr_to_place(root_expr, return_slot().into(), current)? { if let Some(current) = ctx.lower_expr_to_place(root_expr, return_slot().into(), current)? {
let current = ctx.pop_drop_scope_assert_finished(current, root_expr.into())?; let current = ctx.pop_drop_scope_assert_finished(current, root_expr.into())?;

View file

@ -11,7 +11,7 @@ use crate::{
Substitution, SwitchTargets, TerminatorKind, TupleFieldId, TupleId, TyBuilder, TyKind, Substitution, SwitchTargets, TerminatorKind, TupleFieldId, TupleId, TyBuilder, TyKind,
ValueNs, VariantData, VariantId, ValueNs, VariantData, VariantId,
}, },
MutBorrowKind, LocalId, MutBorrowKind,
}, },
BindingMode, BindingMode,
}; };
@ -82,6 +82,22 @@ impl MirLowerCtx<'_> {
Ok((current, current_else)) Ok((current, current_else))
} }
pub(super) fn match_self_param(
&mut self,
id: BindingId,
current: BasicBlockId,
local: LocalId,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
self.pattern_match_binding(
id,
BindingMode::Move,
local.into(),
MirSpan::SelfParam,
current,
None,
)
}
fn pattern_match_inner( fn pattern_match_inner(
&mut self, &mut self,
mut current: BasicBlockId, mut current: BasicBlockId,
@ -283,9 +299,9 @@ impl MirLowerCtx<'_> {
(current, current_else) = (current, current_else) =
self.pattern_match_inner(current, current_else, next_place, pat, mode)?; self.pattern_match_inner(current, current_else, next_place, pat, mode)?;
} }
if let Some(slice) = slice { if let &Some(slice) = slice {
if mode == MatchingMode::Bind { if mode == MatchingMode::Bind {
if let Pat::Bind { id, subpat: _ } = self.body[*slice] { if let Pat::Bind { id, subpat: _ } = self.body[slice] {
let next_place = cond_place.project( let next_place = cond_place.project(
ProjectionElem::Subslice { ProjectionElem::Subslice {
from: prefix.len() as u64, from: prefix.len() as u64,
@ -293,11 +309,12 @@ impl MirLowerCtx<'_> {
}, },
&mut self.result.projection_store, &mut self.result.projection_store,
); );
let mode = self.infer.binding_modes[slice];
(current, current_else) = self.pattern_match_binding( (current, current_else) = self.pattern_match_binding(
id, id,
*slice, mode,
next_place, next_place,
(*slice).into(), (slice).into(),
current, current,
current_else, current_else,
)?; )?;
@ -398,9 +415,10 @@ impl MirLowerCtx<'_> {
self.pattern_match_inner(current, current_else, cond_place, *subpat, mode)? self.pattern_match_inner(current, current_else, cond_place, *subpat, mode)?
} }
if mode == MatchingMode::Bind { if mode == MatchingMode::Bind {
let mode = self.infer.binding_modes[pattern];
self.pattern_match_binding( self.pattern_match_binding(
*id, *id,
pattern, mode,
cond_place, cond_place,
pattern.into(), pattern.into(),
current, current,
@ -437,14 +455,13 @@ impl MirLowerCtx<'_> {
fn pattern_match_binding( fn pattern_match_binding(
&mut self, &mut self,
id: BindingId, id: BindingId,
pat: PatId, mode: BindingMode,
cond_place: Place, cond_place: Place,
span: MirSpan, span: MirSpan,
current: BasicBlockId, current: BasicBlockId,
current_else: Option<BasicBlockId>, current_else: Option<BasicBlockId>,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> { ) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
let target_place = self.binding_local(id)?; let target_place = self.binding_local(id)?;
let mode = self.infer.binding_modes[pat];
self.push_storage_live(id, current)?; self.push_storage_live(id, current)?;
self.push_assignment( self.push_assignment(
current, current,

View file

@ -12,7 +12,7 @@ mod traits;
use std::env; use std::env;
use base_db::{FileRange, SourceDatabaseExt}; use base_db::{FileRange, SourceDatabaseExt2 as _};
use expect_test::Expect; use expect_test::Expect;
use hir_def::{ use hir_def::{
body::{Body, BodySourceMap, SyntheticSyntax}, body::{Body, BodySourceMap, SyntheticSyntax},
@ -164,7 +164,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
Some(value) => value, Some(value) => value,
None => continue, None => continue,
}; };
let range = node.as_ref().original_file_range(&db); let range = node.as_ref().original_file_range_rooted(&db);
if let Some(expected) = types.remove(&range) { if let Some(expected) = types.remove(&range) {
let actual = if display_source { let actual = if display_source {
ty.display_source_code(&db, def.module(&db), true).unwrap() ty.display_source_code(&db, def.module(&db), true).unwrap()
@ -180,7 +180,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
Some(value) => value, Some(value) => value,
None => continue, None => continue,
}; };
let range = node.as_ref().original_file_range(&db); let range = node.as_ref().original_file_range_rooted(&db);
if let Some(expected) = types.remove(&range) { if let Some(expected) = types.remove(&range) {
let actual = if display_source { let actual = if display_source {
ty.display_source_code(&db, def.module(&db), true).unwrap() ty.display_source_code(&db, def.module(&db), true).unwrap()
@ -211,7 +211,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
}) else { }) else {
continue; continue;
}; };
let range = node.as_ref().original_file_range(&db); let range = node.as_ref().original_file_range_rooted(&db);
let actual = format!( let actual = format!(
"expected {}, got {}", "expected {}, got {}",
mismatch.expected.display_test(&db), mismatch.expected.display_test(&db),
@ -293,20 +293,29 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
let mut types: Vec<(InFile<SyntaxNode>, &Ty)> = Vec::new(); let mut types: Vec<(InFile<SyntaxNode>, &Ty)> = Vec::new();
let mut mismatches: Vec<(InFile<SyntaxNode>, &TypeMismatch)> = Vec::new(); let mut mismatches: Vec<(InFile<SyntaxNode>, &TypeMismatch)> = Vec::new();
if let Some(self_param) = body.self_param {
let ty = &inference_result.type_of_binding[self_param];
if let Some(syntax_ptr) = body_source_map.self_param_syntax() {
let root = db.parse_or_expand(syntax_ptr.file_id);
let node = syntax_ptr.map(|ptr| ptr.to_node(&root).syntax().clone());
types.push((node.clone(), ty));
}
}
for (pat, mut ty) in inference_result.type_of_pat.iter() { for (pat, mut ty) in inference_result.type_of_pat.iter() {
if let Pat::Bind { id, .. } = body.pats[pat] { if let Pat::Bind { id, .. } = body.pats[pat] {
ty = &inference_result.type_of_binding[id]; ty = &inference_result.type_of_binding[id];
} }
let syntax_ptr = match body_source_map.pat_syntax(pat) { let node = match body_source_map.pat_syntax(pat) {
Ok(sp) => { Ok(sp) => {
let root = db.parse_or_expand(sp.file_id); let root = db.parse_or_expand(sp.file_id);
sp.map(|ptr| ptr.to_node(&root).syntax().clone()) sp.map(|ptr| ptr.to_node(&root).syntax().clone())
} }
Err(SyntheticSyntax) => continue, Err(SyntheticSyntax) => continue,
}; };
types.push((syntax_ptr.clone(), ty)); types.push((node.clone(), ty));
if let Some(mismatch) = inference_result.type_mismatch_for_pat(pat) { if let Some(mismatch) = inference_result.type_mismatch_for_pat(pat) {
mismatches.push((syntax_ptr, mismatch)); mismatches.push((node, mismatch));
} }
} }
@ -575,7 +584,7 @@ fn salsa_bug() {
} }
"; ";
db.set_file_text(pos.file_id, Arc::from(new_text)); db.set_file_text(pos.file_id, new_text);
let module = db.module_for_file(pos.file_id); let module = db.module_for_file(pos.file_id);
let crate_def_map = module.def_map(&db); let crate_def_map = module.def_map(&db);

View file

@ -1,6 +1,5 @@
use base_db::SourceDatabaseExt; use base_db::SourceDatabaseExt2 as _;
use test_fixture::WithFixture; use test_fixture::WithFixture;
use triomphe::Arc;
use crate::{db::HirDatabase, test_db::TestDB}; use crate::{db::HirDatabase, test_db::TestDB};
@ -33,7 +32,7 @@ fn foo() -> i32 {
1 1
}"; }";
db.set_file_text(pos.file_id, Arc::from(new_text)); db.set_file_text(pos.file_id, new_text);
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {
@ -85,7 +84,7 @@ fn baz() -> i32 {
} }
"; ";
db.set_file_text(pos.file_id, Arc::from(new_text)); db.set_file_text(pos.file_id, new_text);
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {

View file

@ -1461,28 +1461,6 @@ fn f() {
); );
} }
#[test]
fn trait_impl_in_synstructure_const() {
check_types(
r#"
struct S;
trait Tr {
fn method(&self) -> u16;
}
const _DERIVE_Tr_: () = {
impl Tr for S {}
};
fn f() {
S.method();
//^^^^^^^^^^ u16
}
"#,
);
}
#[test] #[test]
fn inherent_impl_in_unnamed_const() { fn inherent_impl_in_unnamed_const() {
check_types( check_types(
@ -1795,6 +1773,21 @@ fn test() {
); );
} }
#[test]
fn deref_into_inference_var() {
check_types(
r#"
//- minicore:deref
struct A<T>(T);
impl core::ops::Deref for A<u32> {}
impl A<i32> { fn foo(&self) {} }
fn main() {
A(0).foo();
//^^^^^^^^^^ ()
}
"#,
);
}
#[test] #[test]
fn receiver_adjustment_autoref() { fn receiver_adjustment_autoref() {
check( check(

View file

@ -2121,6 +2121,7 @@ async fn main() {
"#, "#,
expect![[r#" expect![[r#"
16..193 '{ ...2 }; }': () 16..193 '{ ...2 }; }': ()
16..193 '{ ...2 }; }': impl Future<Output = ()>
26..27 'x': i32 26..27 'x': i32
30..43 'unsafe { 92 }': i32 30..43 'unsafe { 92 }': i32
39..41 '92': i32 39..41 '92': i32
@ -2131,6 +2132,8 @@ async fn main() {
73..75 '()': () 73..75 '()': ()
95..96 'z': ControlFlow<(), ()> 95..96 'z': ControlFlow<(), ()>
130..140 'try { () }': ControlFlow<(), ()> 130..140 'try { () }': ControlFlow<(), ()>
130..140 'try { () }': fn from_output<ControlFlow<(), ()>>(<ControlFlow<(), ()> as Try>::Output) -> ControlFlow<(), ()>
130..140 'try { () }': ControlFlow<(), ()>
136..138 '()': () 136..138 '()': ()
150..151 'w': i32 150..151 'w': i32
154..166 'const { 92 }': i32 154..166 'const { 92 }': i32

View file

@ -204,7 +204,7 @@ pub struct NoSuchField {
#[derive(Debug)] #[derive(Debug)]
pub struct PrivateAssocItem { pub struct PrivateAssocItem {
pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, Either<ast::Pat, ast::SelfParam>>>>, pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
pub item: AssocItem, pub item: AssocItem,
} }
@ -240,7 +240,7 @@ pub struct UnresolvedMethodCall {
#[derive(Debug)] #[derive(Debug)]
pub struct UnresolvedAssocItem { pub struct UnresolvedAssocItem {
pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, Either<ast::Pat, ast::SelfParam>>>>, pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
} }
#[derive(Debug)] #[derive(Debug)]

View file

@ -159,6 +159,7 @@ impl HirDisplay for Adt {
impl HirDisplay for Struct { impl HirDisplay for Struct {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
let module_id = self.module(f.db).id; let module_id = self.module(f.db).id;
// FIXME: Render repr if its set explicitly?
write_visibility(module_id, self.visibility(f.db), f)?; write_visibility(module_id, self.visibility(f.db), f)?;
f.write_str("struct ")?; f.write_str("struct ")?;
write!(f, "{}", self.name(f.db).display(f.db.upcast()))?; write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
@ -166,37 +167,40 @@ impl HirDisplay for Struct {
write_generic_params(def_id, f)?; write_generic_params(def_id, f)?;
let variant_data = self.variant_data(f.db); let variant_data = self.variant_data(f.db);
if let StructKind::Tuple = variant_data.kind() { match variant_data.kind() {
f.write_char('(')?; StructKind::Tuple => {
let mut it = variant_data.fields().iter().peekable(); f.write_char('(')?;
let mut it = variant_data.fields().iter().peekable();
while let Some((id, _)) = it.next() { while let Some((id, _)) = it.next() {
let field = Field { parent: (*self).into(), id }; let field = Field { parent: (*self).into(), id };
write_visibility(module_id, field.visibility(f.db), f)?; write_visibility(module_id, field.visibility(f.db), f)?;
field.ty(f.db).hir_fmt(f)?; field.ty(f.db).hir_fmt(f)?;
if it.peek().is_some() { if it.peek().is_some() {
f.write_str(", ")?; f.write_str(", ")?;
}
}
f.write_char(')')?;
write_where_clause(def_id, f)?;
}
StructKind::Record => {
let has_where_clause = write_where_clause(def_id, f)?;
let fields = self.fields(f.db);
f.write_char(if !has_where_clause { ' ' } else { '\n' })?;
if fields.is_empty() {
f.write_str("{}")?;
} else {
f.write_str("{\n")?;
for field in self.fields(f.db) {
f.write_str(" ")?;
field.hir_fmt(f)?;
f.write_str(",\n")?;
}
f.write_str("}")?;
} }
} }
StructKind::Unit => _ = write_where_clause(def_id, f)?,
f.write_str(");")?;
}
write_where_clause(def_id, f)?;
if let StructKind::Record = variant_data.kind() {
let fields = self.fields(f.db);
if fields.is_empty() {
f.write_str(" {}")?;
} else {
f.write_str(" {\n")?;
for field in self.fields(f.db) {
f.write_str(" ")?;
field.hir_fmt(f)?;
f.write_str(",\n")?;
}
f.write_str("}")?;
}
} }
Ok(()) Ok(())
@ -210,11 +214,12 @@ impl HirDisplay for Enum {
write!(f, "{}", self.name(f.db).display(f.db.upcast()))?; write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
let def_id = GenericDefId::AdtId(AdtId::EnumId(self.id)); let def_id = GenericDefId::AdtId(AdtId::EnumId(self.id));
write_generic_params(def_id, f)?; write_generic_params(def_id, f)?;
write_where_clause(def_id, f)?; let has_where_clause = write_where_clause(def_id, f)?;
let variants = self.variants(f.db); let variants = self.variants(f.db);
if !variants.is_empty() { if !variants.is_empty() {
f.write_str(" {\n")?; f.write_char(if !has_where_clause { ' ' } else { '\n' })?;
f.write_str("{\n")?;
for variant in variants { for variant in variants {
f.write_str(" ")?; f.write_str(" ")?;
variant.hir_fmt(f)?; variant.hir_fmt(f)?;
@ -234,11 +239,12 @@ impl HirDisplay for Union {
write!(f, "{}", self.name(f.db).display(f.db.upcast()))?; write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
let def_id = GenericDefId::AdtId(AdtId::UnionId(self.id)); let def_id = GenericDefId::AdtId(AdtId::UnionId(self.id));
write_generic_params(def_id, f)?; write_generic_params(def_id, f)?;
write_where_clause(def_id, f)?; let has_where_clause = write_where_clause(def_id, f)?;
let fields = self.fields(f.db); let fields = self.fields(f.db);
if !fields.is_empty() { if !fields.is_empty() {
f.write_str(" {\n")?; f.write_char(if !has_where_clause { ' ' } else { '\n' })?;
f.write_str("{\n")?;
for field in self.fields(f.db) { for field in self.fields(f.db) {
f.write_str(" ")?; f.write_str(" ")?;
field.hir_fmt(f)?; field.hir_fmt(f)?;
@ -446,7 +452,10 @@ fn write_generic_params(
Ok(()) Ok(())
} }
fn write_where_clause(def: GenericDefId, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { fn write_where_clause(
def: GenericDefId,
f: &mut HirFormatter<'_>,
) -> Result<bool, HirDisplayError> {
let params = f.db.generic_params(def); let params = f.db.generic_params(def);
// unnamed type targets are displayed inline with the argument itself, e.g. `f: impl Y`. // unnamed type targets are displayed inline with the argument itself, e.g. `f: impl Y`.
@ -465,7 +474,7 @@ fn write_where_clause(def: GenericDefId, f: &mut HirFormatter<'_>) -> Result<(),
}); });
if !has_displayable_predicate { if !has_displayable_predicate {
return Ok(()); return Ok(false);
} }
let write_target = |target: &WherePredicateTypeTarget, f: &mut HirFormatter<'_>| match target { let write_target = |target: &WherePredicateTypeTarget, f: &mut HirFormatter<'_>| match target {
@ -543,7 +552,7 @@ fn write_where_clause(def: GenericDefId, f: &mut HirFormatter<'_>) -> Result<(),
// End of final predicate. There must be at least one predicate here. // End of final predicate. There must be at least one predicate here.
f.write_char(',')?; f.write_char(',')?;
Ok(()) Ok(true)
} }
impl HirDisplay for Const { impl HirDisplay for Const {
@ -594,19 +603,20 @@ impl HirDisplay for Trait {
write!(f, "trait {}", data.name.display(f.db.upcast()))?; write!(f, "trait {}", data.name.display(f.db.upcast()))?;
let def_id = GenericDefId::TraitId(self.id); let def_id = GenericDefId::TraitId(self.id);
write_generic_params(def_id, f)?; write_generic_params(def_id, f)?;
write_where_clause(def_id, f)?; let has_where_clause = write_where_clause(def_id, f)?;
if let Some(limit) = f.entity_limit { if let Some(limit) = f.entity_limit {
let assoc_items = self.items(f.db); let assoc_items = self.items(f.db);
let count = assoc_items.len().min(limit); let count = assoc_items.len().min(limit);
f.write_char(if !has_where_clause { ' ' } else { '\n' })?;
if count == 0 { if count == 0 {
if assoc_items.is_empty() { if assoc_items.is_empty() {
f.write_str(" {}")?; f.write_str("{}")?;
} else { } else {
f.write_str(" { /* … */ }")?; f.write_str("{ /* … */ }")?;
} }
} else { } else {
f.write_str(" {\n")?; f.write_str("{\n")?;
for item in &assoc_items[..count] { for item in &assoc_items[..count] {
f.write_str(" ")?; f.write_str(" ")?;
match item { match item {
@ -651,7 +661,6 @@ impl HirDisplay for TypeAlias {
write!(f, "type {}", data.name.display(f.db.upcast()))?; write!(f, "type {}", data.name.display(f.db.upcast()))?;
let def_id = GenericDefId::TypeAliasId(self.id); let def_id = GenericDefId::TypeAliasId(self.id);
write_generic_params(def_id, f)?; write_generic_params(def_id, f)?;
write_where_clause(def_id, f)?;
if !data.bounds.is_empty() { if !data.bounds.is_empty() {
f.write_str(": ")?; f.write_str(": ")?;
f.write_joined(data.bounds.iter(), " + ")?; f.write_joined(data.bounds.iter(), " + ")?;
@ -660,6 +669,7 @@ impl HirDisplay for TypeAlias {
f.write_str(" = ")?; f.write_str(" = ")?;
ty.hir_fmt(f)?; ty.hir_fmt(f)?;
} }
write_where_clause(def_id, f)?;
Ok(()) Ok(())
} }
} }

View file

@ -9,6 +9,7 @@ use hir_def::{
}; };
use hir_expand::{HirFileId, InFile}; use hir_expand::{HirFileId, InFile};
use syntax::ast; use syntax::ast;
use tt::TextRange;
use crate::{ use crate::{
db::HirDatabase, Adt, Const, Enum, ExternCrateDecl, Field, FieldSource, Function, Impl, db::HirDatabase, Adt, Const, Enum, ExternCrateDecl, Field, FieldSource, Function, Impl,
@ -37,6 +38,12 @@ impl Module {
def_map[self.id.local_id].definition_source(db.upcast()) def_map[self.id.local_id].definition_source(db.upcast())
} }
/// Returns a node which defines this module. That is, a file or a `mod foo {}` with items.
pub fn definition_source_range(self, db: &dyn HirDatabase) -> InFile<TextRange> {
let def_map = self.id.def_map(db.upcast());
def_map[self.id.local_id].definition_source_range(db.upcast())
}
pub fn definition_source_file_id(self, db: &dyn HirDatabase) -> HirFileId { pub fn definition_source_file_id(self, db: &dyn HirDatabase) -> HirFileId {
let def_map = self.id.def_map(db.upcast()); let def_map = self.id.def_map(db.upcast());
def_map[self.id.local_id].definition_source_file_id() def_map[self.id.local_id].definition_source_file_id()
@ -71,6 +78,13 @@ impl Module {
let def_map = self.id.def_map(db.upcast()); let def_map = self.id.def_map(db.upcast());
def_map[self.id.local_id].declaration_source(db.upcast()) def_map[self.id.local_id].declaration_source(db.upcast())
} }
/// Returns a text range which declares this module, either a `mod foo;` or a `mod foo {}`.
/// `None` for the crate root.
pub fn declaration_source_range(self, db: &dyn HirDatabase) -> Option<InFile<TextRange>> {
let def_map = self.id.def_map(db.upcast());
def_map[self.id.local_id].declaration_source_range(db.upcast())
}
} }
impl HasSource for Field { impl HasSource for Field {

View file

@ -56,8 +56,8 @@ use hir_def::{
AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId,
EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, HasModule, EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, HasModule,
ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalFieldId, Lookup, MacroExpander, ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalFieldId, Lookup, MacroExpander,
MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TupleId, TypeAliasId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TupleId, TypeAliasId, TypeOrConstParamId,
TypeOrConstParamId, TypeParamId, UnionId, TypeParamId, UnionId,
}; };
use hir_expand::{attrs::collect_attrs, name::name, proc_macro::ProcMacroKind, MacroCallKind}; use hir_expand::{attrs::collect_attrs, name::name, proc_macro::ProcMacroKind, MacroCallKind};
use hir_ty::{ use hir_ty::{
@ -122,7 +122,7 @@ pub use {
visibility::Visibility, visibility::Visibility,
// FIXME: This is here since some queries take it as input that are used // FIXME: This is here since some queries take it as input that are used
// outside of hir. // outside of hir.
{AdtId, ModuleDefId}, {AdtId, MacroId, ModuleDefId},
}, },
hir_expand::{ hir_expand::{
attrs::{Attr, AttrId}, attrs::{Attr, AttrId},
@ -754,7 +754,7 @@ impl Module {
scope scope
.declarations() .declarations()
.map(ModuleDef::from) .map(ModuleDef::from)
.chain(scope.unnamed_consts(db.upcast()).map(|id| ModuleDef::Const(Const::from(id)))) .chain(scope.unnamed_consts().map(|id| ModuleDef::Const(Const::from(id))))
.collect() .collect()
} }
@ -1725,6 +1725,10 @@ impl DefWithBody {
Ok(s) => s.map(|it| it.into()), Ok(s) => s.map(|it| it.into()),
Err(_) => continue, Err(_) => continue,
}, },
mir::MirSpan::SelfParam => match source_map.self_param_syntax() {
Some(s) => s.map(|it| it.into()),
None => continue,
},
mir::MirSpan::Unknown => continue, mir::MirSpan::Unknown => continue,
}; };
acc.push( acc.push(
@ -1776,6 +1780,11 @@ impl DefWithBody {
Ok(s) => s.map(|it| it.into()), Ok(s) => s.map(|it| it.into()),
Err(_) => continue, Err(_) => continue,
}, },
mir::MirSpan::SelfParam => match source_map.self_param_syntax()
{
Some(s) => s.map(|it| it.into()),
None => continue,
},
mir::MirSpan::Unknown => continue, mir::MirSpan::Unknown => continue,
}; };
acc.push(NeedMut { local, span }.into()); acc.push(NeedMut { local, span }.into());
@ -2127,8 +2136,11 @@ impl Param {
pub fn as_local(&self, db: &dyn HirDatabase) -> Option<Local> { pub fn as_local(&self, db: &dyn HirDatabase) -> Option<Local> {
let parent = DefWithBodyId::FunctionId(self.func.into()); let parent = DefWithBodyId::FunctionId(self.func.into());
let body = db.body(parent); let body = db.body(parent);
let pat_id = body.params[self.idx]; if let Some(self_param) = body.self_param.filter(|_| self.idx == 0) {
if let Pat::Bind { id, .. } = &body[pat_id] { Some(Local { parent, binding_id: self_param })
} else if let Pat::Bind { id, .. } =
&body[body.params[self.idx - body.self_param.is_some() as usize]]
{
Some(Local { parent, binding_id: *id }) Some(Local { parent, binding_id: *id })
} else { } else {
None None
@ -2143,7 +2155,7 @@ impl Param {
let InFile { file_id, value } = self.func.source(db)?; let InFile { file_id, value } = self.func.source(db)?;
let params = value.param_list()?; let params = value.param_list()?;
if params.self_param().is_some() { if params.self_param().is_some() {
params.params().nth(self.idx.checked_sub(1)?) params.params().nth(self.idx.checked_sub(params.self_param().is_some() as usize)?)
} else { } else {
params.params().nth(self.idx) params.params().nth(self.idx)
} }
@ -2605,6 +2617,15 @@ impl Macro {
} }
} }
pub fn is_env_or_option_env(&self, db: &dyn HirDatabase) -> bool {
match self.id {
MacroId::Macro2Id(it) => {
matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInEager(eager) if eager.is_env_or_option_env())
}
MacroId::MacroRulesId(_) | MacroId::ProcMacroId(_) => false,
}
}
pub fn is_attr(&self, db: &dyn HirDatabase) -> bool { pub fn is_attr(&self, db: &dyn HirDatabase) -> bool {
matches!(self.kind(db), MacroKind::Attr) matches!(self.kind(db), MacroKind::Attr)
} }
@ -3134,35 +3155,59 @@ impl Local {
/// All definitions for this local. Example: `let (a$0, _) | (_, a$0) = it;` /// All definitions for this local. Example: `let (a$0, _) | (_, a$0) = it;`
pub fn sources(self, db: &dyn HirDatabase) -> Vec<LocalSource> { pub fn sources(self, db: &dyn HirDatabase) -> Vec<LocalSource> {
let (body, source_map) = db.body_with_source_map(self.parent); let (body, source_map) = db.body_with_source_map(self.parent);
self.sources_(db, &body, &source_map).collect() match body.self_param.zip(source_map.self_param_syntax()) {
Some((param, source)) if param == self.binding_id => {
let root = source.file_syntax(db.upcast());
vec![LocalSource {
local: self,
source: source.map(|ast| Either::Right(ast.to_node(&root))),
}]
}
_ => body[self.binding_id]
.definitions
.iter()
.map(|&definition| {
let src = source_map.pat_syntax(definition).unwrap(); // Hmm...
let root = src.file_syntax(db.upcast());
LocalSource {
local: self,
source: src.map(|ast| match ast.to_node(&root) {
ast::Pat::IdentPat(it) => Either::Left(it),
_ => unreachable!("local with non ident-pattern"),
}),
}
})
.collect(),
}
} }
/// The leftmost definition for this local. Example: `let (a$0, _) | (_, a) = it;` /// The leftmost definition for this local. Example: `let (a$0, _) | (_, a) = it;`
pub fn primary_source(self, db: &dyn HirDatabase) -> LocalSource { pub fn primary_source(self, db: &dyn HirDatabase) -> LocalSource {
let (body, source_map) = db.body_with_source_map(self.parent); let (body, source_map) = db.body_with_source_map(self.parent);
let src = self.sources_(db, &body, &source_map).next().unwrap(); match body.self_param.zip(source_map.self_param_syntax()) {
src Some((param, source)) if param == self.binding_id => {
} let root = source.file_syntax(db.upcast());
LocalSource {
fn sources_<'a>( local: self,
self, source: source.map(|ast| Either::Right(ast.to_node(&root))),
db: &'a dyn HirDatabase, }
body: &'a hir_def::body::Body, }
source_map: &'a hir_def::body::BodySourceMap, _ => body[self.binding_id]
) -> impl Iterator<Item = LocalSource> + 'a { .definitions
body[self.binding_id] .first()
.definitions .map(|&definition| {
.iter() let src = source_map.pat_syntax(definition).unwrap(); // Hmm...
.map(|&definition| { let root = src.file_syntax(db.upcast());
let src = source_map.pat_syntax(definition).unwrap(); // Hmm... LocalSource {
let root = src.file_syntax(db.upcast()); local: self,
src.map(|ast| match ast.to_node(&root) { source: src.map(|ast| match ast.to_node(&root) {
Either::Left(ast::Pat::IdentPat(it)) => Either::Left(it), ast::Pat::IdentPat(it) => Either::Left(it),
Either::Left(_) => unreachable!("local with non ident-pattern"), _ => unreachable!("local with non ident-pattern"),
Either::Right(it) => Either::Right(it), }),
}
}) })
}) .unwrap(),
.map(move |source| LocalSource { local: self, source }) }
} }
} }
@ -4037,7 +4082,7 @@ impl Type {
let canonical_ty = let canonical_ty =
Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) }; Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) };
method_resolution::implements_trait(&canonical_ty, db, self.env.clone(), trait_) method_resolution::implements_trait(&canonical_ty, db, &self.env, trait_)
} }
/// Checks that particular type `ty` implements `std::ops::FnOnce`. /// Checks that particular type `ty` implements `std::ops::FnOnce`.
@ -4052,12 +4097,7 @@ impl Type {
let canonical_ty = let canonical_ty =
Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) }; Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) };
method_resolution::implements_trait_unique( method_resolution::implements_trait_unique(&canonical_ty, db, &self.env, fnonce_trait)
&canonical_ty,
db,
self.env.clone(),
fnonce_trait,
)
} }
// FIXME: Find better API that also handles const generics // FIXME: Find better API that also handles const generics

View file

@ -681,28 +681,29 @@ impl<'db> SemanticsImpl<'db> {
.filter(|&(_, include_file_id)| include_file_id == file_id) .filter(|&(_, include_file_id)| include_file_id == file_id)
{ {
let macro_file = invoc.as_macro_file(); let macro_file = invoc.as_macro_file();
let expansion_info = cache let expansion_info = cache.entry(macro_file).or_insert_with(|| {
.entry(macro_file) let exp_info = macro_file.expansion_info(self.db.upcast());
.or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
let InMacroFile { file_id, value } = exp_info.expanded();
self.cache(value, file_id.into());
exp_info
});
// Create the source analyzer for the macro call scope // Create the source analyzer for the macro call scope
let Some(sa) = self.analyze_no_infer(&self.parse_or_expand(expansion_info.call_file())) let Some(sa) = self.analyze_no_infer(&self.parse_or_expand(expansion_info.call_file()))
else { else {
continue; continue;
}; };
{
let InMacroFile { file_id: macro_file, value } = expansion_info.expanded();
self.cache(value, macro_file.into());
}
// get mapped token in the include! macro file // get mapped token in the include! macro file
let span = span::SpanData { let span = span::Span {
range: token.text_range(), range: token.text_range(),
anchor: span::SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, anchor: span::SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
ctx: SyntaxContextId::ROOT, ctx: SyntaxContextId::ROOT,
}; };
let Some(InMacroFile { file_id, value: mut mapped_tokens }) = let Some(InMacroFile { file_id, value: mut mapped_tokens }) =
expansion_info.map_range_down(span) expansion_info.map_range_down_exact(span)
else { else {
continue; continue;
}; };
@ -753,22 +754,20 @@ impl<'db> SemanticsImpl<'db> {
let def_map = sa.resolver.def_map(); let def_map = sa.resolver.def_map();
let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])]; let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])];
let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| { let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
let expansion_info = cache let exp_info = cache.entry(macro_file).or_insert_with(|| {
.entry(macro_file) let exp_info = macro_file.expansion_info(self.db.upcast());
.or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
{ let InMacroFile { file_id, value } = exp_info.expanded();
let InMacroFile { file_id, value } = expansion_info.expanded();
self.cache(value, file_id.into()); self.cache(value, file_id.into());
}
let InMacroFile { file_id, value: mapped_tokens } = exp_info
expansion_info.map_range_down(span)?; });
let InMacroFile { file_id, value: mapped_tokens } = exp_info.map_range_down(span)?;
let mapped_tokens: SmallVec<[_; 2]> = mapped_tokens.collect(); let mapped_tokens: SmallVec<[_; 2]> = mapped_tokens.collect();
// if the length changed we have found a mapping for the token // we have found a mapping for the token if the vec is non-empty
let res = mapped_tokens.is_empty().not().then_some(()); let res = mapped_tokens.is_empty().not().then_some(());
// requeue the tokens we got from mapping our current token down // requeue the tokens we got from mapping our current token down
stack.push((HirFileId::from(file_id), mapped_tokens)); stack.push((HirFileId::from(file_id), mapped_tokens));
@ -851,7 +850,13 @@ impl<'db> SemanticsImpl<'db> {
// remove any other token in this macro input, all their mappings are the // remove any other token in this macro input, all their mappings are the
// same as this one // same as this one
tokens.retain(|t| !text_range.contains_range(t.text_range())); tokens.retain(|t| !text_range.contains_range(t.text_range()));
process_expansion_for_token(&mut stack, file_id)
process_expansion_for_token(&mut stack, file_id).or(file_id
.eager_arg(self.db.upcast())
.and_then(|arg| {
// also descend into eager expansions
process_expansion_for_token(&mut stack, arg.as_macro_file())
}))
} else if let Some(meta) = ast::Meta::cast(parent) { } else if let Some(meta) = ast::Meta::cast(parent) {
// attribute we failed expansion for earlier, this might be a derive invocation // attribute we failed expansion for earlier, this might be a derive invocation
// or derive helper attribute // or derive helper attribute
@ -960,7 +965,7 @@ impl<'db> SemanticsImpl<'db> {
/// macro file the node resides in. /// macro file the node resides in.
pub fn original_range(&self, node: &SyntaxNode) -> FileRange { pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
let node = self.find_file(node); let node = self.find_file(node);
node.original_file_range(self.db.upcast()) node.original_file_range_rooted(self.db.upcast())
} }
/// Attempts to map the node out of macro expanded files returning the original file range. /// Attempts to map the node out of macro expanded files returning the original file range.
@ -984,9 +989,9 @@ impl<'db> SemanticsImpl<'db> {
/// Attempts to map the node out of macro expanded files. /// Attempts to map the node out of macro expanded files.
/// This only work for attribute expansions, as other ones do not have nodes as input. /// This only work for attribute expansions, as other ones do not have nodes as input.
pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> { pub fn original_syntax_node_rooted(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
let InFile { file_id, .. } = self.find_file(node); let InFile { file_id, .. } = self.find_file(node);
InFile::new(file_id, node).original_syntax_node(self.db.upcast()).map( InFile::new(file_id, node).original_syntax_node_rooted(self.db.upcast()).map(
|InRealFile { file_id, value }| { |InRealFile { file_id, value }| {
self.cache(find_root(&value), file_id.into()); self.cache(find_root(&value), file_id.into());
value value
@ -997,7 +1002,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange { pub fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
let root = self.parse_or_expand(src.file_id); let root = self.parse_or_expand(src.file_id);
let node = src.map(|it| it.to_node(&root)); let node = src.map(|it| it.to_node(&root));
node.as_ref().original_file_range(self.db.upcast()) node.as_ref().original_file_range_rooted(self.db.upcast())
} }
fn token_ancestors_with_macros( fn token_ancestors_with_macros(
@ -1236,6 +1241,11 @@ impl<'db> SemanticsImpl<'db> {
sa.resolve_macro_call(self.db, macro_call) sa.resolve_macro_call(self.db, macro_call)
} }
pub fn is_proc_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
self.resolve_macro_call(macro_call)
.map_or(false, |m| matches!(m.id, MacroId::ProcMacroId(..)))
}
pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool { pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
let sa = match self.analyze(macro_call.syntax()) { let sa = match self.analyze(macro_call.syntax()) {
Some(it) => it, Some(it) => it,

View file

@ -101,7 +101,7 @@ use hir_def::{
use hir_expand::{attrs::AttrId, name::AsName, HirFileId, HirFileIdExt, MacroCallId}; use hir_expand::{attrs::AttrId, name::AsName, HirFileId, HirFileIdExt, MacroCallId};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use smallvec::SmallVec; use smallvec::SmallVec;
use stdx::{impl_from, never}; use stdx::impl_from;
use syntax::{ use syntax::{
ast::{self, HasName}, ast::{self, HasName},
AstNode, SyntaxNode, AstNode, SyntaxNode,
@ -253,14 +253,8 @@ impl SourceToDefCtx<'_, '_> {
src: InFile<ast::SelfParam>, src: InFile<ast::SelfParam>,
) -> Option<(DefWithBodyId, BindingId)> { ) -> Option<(DefWithBodyId, BindingId)> {
let container = self.find_pat_or_label_container(src.syntax())?; let container = self.find_pat_or_label_container(src.syntax())?;
let (body, source_map) = self.db.body_with_source_map(container); let body = self.db.body(container);
let pat_id = source_map.node_self_param(src.as_ref())?; Some((container, body.self_param?))
if let crate::Pat::Bind { id, .. } = body[pat_id] {
Some((container, id))
} else {
never!();
None
}
} }
pub(super) fn label_to_def( pub(super) fn label_to_def(
&mut self, &mut self,

View file

@ -219,11 +219,10 @@ impl SourceAnalyzer {
pub(crate) fn type_of_self( pub(crate) fn type_of_self(
&self, &self,
db: &dyn HirDatabase, db: &dyn HirDatabase,
param: &ast::SelfParam, _param: &ast::SelfParam,
) -> Option<Type> { ) -> Option<Type> {
let src = InFile { file_id: self.file_id, value: param }; let binding = self.body()?.self_param?;
let pat_id = self.body_source_map()?.node_self_param(src)?; let ty = self.infer.as_ref()?[binding].clone();
let ty = self.infer.as_ref()?[pat_id].clone();
Some(Type::new_with_resolver(db, &self.resolver, ty)) Some(Type::new_with_resolver(db, &self.resolver, ty))
} }

View file

@ -49,7 +49,7 @@ impl DeclarationLocation {
return FileRange { file_id, range: self.ptr.text_range() }; return FileRange { file_id, range: self.ptr.text_range() };
} }
let node = resolve_node(db, self.hir_file_id, &self.ptr); let node = resolve_node(db, self.hir_file_id, &self.ptr);
node.as_ref().original_file_range(db.upcast()) node.as_ref().original_file_range_rooted(db.upcast())
} }
} }
@ -165,7 +165,6 @@ impl<'a> SymbolCollector<'a> {
// Record renamed imports. // Record renamed imports.
// FIXME: In case it imports multiple items under different namespaces we just pick one arbitrarily // FIXME: In case it imports multiple items under different namespaces we just pick one arbitrarily
// for now. // for now.
// FIXME: This parses!
for id in scope.imports() { for id in scope.imports() {
let source = id.import.child_source(self.db.upcast()); let source = id.import.child_source(self.db.upcast());
let Some(use_tree_src) = source.value.get(id.idx) else { continue }; let Some(use_tree_src) = source.value.get(id.idx) else { continue };
@ -196,7 +195,7 @@ impl<'a> SymbolCollector<'a> {
}); });
} }
for const_id in scope.unnamed_consts(self.db.upcast()) { for const_id in scope.unnamed_consts() {
self.collect_from_body(const_id); self.collect_from_body(const_id);
} }

File diff suppressed because it is too large Load diff

View file

@ -198,7 +198,7 @@ fn get_adt_source(
adt: &hir::Adt, adt: &hir::Adt,
fn_name: &str, fn_name: &str,
) -> Option<(Option<ast::Impl>, FileId)> { ) -> Option<(Option<ast::Impl>, FileId)> {
let range = adt.source(ctx.sema.db)?.syntax().original_file_range(ctx.sema.db); let range = adt.source(ctx.sema.db)?.syntax().original_file_range_rooted(ctx.sema.db);
let file = ctx.sema.parse(range.file_id); let file = ctx.sema.parse(range.file_id);
let adt_source = let adt_source =
ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?; ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?;

View file

@ -206,7 +206,7 @@ pub(crate) fn inline_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
let fn_body = fn_source.value.body()?; let fn_body = fn_source.value.body()?;
let param_list = fn_source.value.param_list()?; let param_list = fn_source.value.param_list()?;
let FileRange { file_id, range } = fn_source.syntax().original_file_range(ctx.sema.db); let FileRange { file_id, range } = fn_source.syntax().original_file_range_rooted(ctx.sema.db);
if file_id == ctx.file_id() && range.contains(ctx.offset()) { if file_id == ctx.file_id() && range.contains(ctx.offset()) {
cov_mark::hit!(inline_call_recursive); cov_mark::hit!(inline_call_recursive);
return None; return None;

View file

@ -1,7 +1,10 @@
//! Completes environment variables defined by Cargo (https://doc.rust-lang.org/cargo/reference/environment-variables.html) //! Completes environment variables defined by Cargo (https://doc.rust-lang.org/cargo/reference/environment-variables.html)
use hir::Semantics; use hir::MacroFileIdExt;
use ide_db::{syntax_helpers::node_ext::macro_call_for_string_token, RootDatabase}; use ide_db::syntax_helpers::node_ext::macro_call_for_string_token;
use syntax::ast::{self, IsString}; use syntax::{
ast::{self, IsString},
AstToken,
};
use crate::{ use crate::{
completions::Completions, context::CompletionContext, CompletionItem, CompletionItemKind, completions::Completions, context::CompletionContext, CompletionItem, CompletionItemKind,
@ -32,10 +35,24 @@ const CARGO_DEFINED_VARS: &[(&str, &str)] = &[
pub(crate) fn complete_cargo_env_vars( pub(crate) fn complete_cargo_env_vars(
acc: &mut Completions, acc: &mut Completions,
ctx: &CompletionContext<'_>, ctx: &CompletionContext<'_>,
original: &ast::String,
expanded: &ast::String, expanded: &ast::String,
) -> Option<()> { ) -> Option<()> {
guard_env_macro(expanded, &ctx.sema)?; let is_in_env_expansion = ctx
let range = expanded.text_range_between_quotes()?; .sema
.hir_file_for(&expanded.syntax().parent()?)
.macro_file()
.map_or(false, |it| it.is_env_or_option_env(ctx.sema.db));
if !is_in_env_expansion {
let call = macro_call_for_string_token(expanded)?;
let makro = ctx.sema.resolve_macro_call(&call)?;
// We won't map into `option_env` as that generates `None` for non-existent env vars
// so fall back to this lookup
if !makro.is_env_or_option_env(ctx.sema.db) {
return None;
}
}
let range = original.text_range_between_quotes()?;
CARGO_DEFINED_VARS.iter().for_each(|&(var, detail)| { CARGO_DEFINED_VARS.iter().for_each(|&(var, detail)| {
let mut item = CompletionItem::new(CompletionItemKind::Keyword, range, var); let mut item = CompletionItem::new(CompletionItemKind::Keyword, range, var);
@ -46,18 +63,6 @@ pub(crate) fn complete_cargo_env_vars(
Some(()) Some(())
} }
fn guard_env_macro(string: &ast::String, semantics: &Semantics<'_, RootDatabase>) -> Option<()> {
let call = macro_call_for_string_token(string)?;
let name = call.path()?.segment()?.name_ref()?;
let makro = semantics.resolve_macro_call(&call)?;
let db = semantics.db;
match name.text().as_str() {
"env" | "option_env" if makro.kind(db) == hir::MacroKind::BuiltIn => Some(()),
_ => None,
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::tests::{check_edit, completion_list}; use crate::tests::{check_edit, completion_list};
@ -68,7 +73,7 @@ mod tests {
&format!( &format!(
r#" r#"
#[rustc_builtin_macro] #[rustc_builtin_macro]
macro_rules! {macro_name} {{ macro {macro_name} {{
($var:literal) => {{ 0 }} ($var:literal) => {{ 0 }}
}} }}
@ -80,7 +85,7 @@ mod tests {
&format!( &format!(
r#" r#"
#[rustc_builtin_macro] #[rustc_builtin_macro]
macro_rules! {macro_name} {{ macro {macro_name} {{
($var:literal) => {{ 0 }} ($var:literal) => {{ 0 }}
}} }}

View file

@ -96,7 +96,7 @@ fn complete_trait_impl_name(
.parent() .parent()
} }
}?; }?;
let item = ctx.sema.original_syntax_node(&item)?; let item = ctx.sema.original_syntax_node_rooted(&item)?;
// item -> ASSOC_ITEM_LIST -> IMPL // item -> ASSOC_ITEM_LIST -> IMPL
let impl_def = ast::Impl::cast(item.parent()?.parent()?)?; let impl_def = ast::Impl::cast(item.parent()?.parent()?)?;
let replacement_range = { let replacement_range = {

View file

@ -2,7 +2,7 @@
use std::iter; use std::iter;
use hir::{HirFileIdExt, Module, ModuleSource}; use hir::{HirFileIdExt, Module};
use ide_db::{ use ide_db::{
base_db::{SourceDatabaseExt, VfsPath}, base_db::{SourceDatabaseExt, VfsPath},
FxHashSet, RootDatabase, SymbolKind, FxHashSet, RootDatabase, SymbolKind,
@ -57,7 +57,7 @@ pub(crate) fn complete_mod(
.collect::<FxHashSet<_>>(); .collect::<FxHashSet<_>>();
let module_declaration_file = let module_declaration_file =
current_module.declaration_source(ctx.db).map(|module_declaration_source_file| { current_module.declaration_source_range(ctx.db).map(|module_declaration_source_file| {
module_declaration_source_file.file_id.original_file(ctx.db) module_declaration_source_file.file_id.original_file(ctx.db)
}); });
@ -148,9 +148,7 @@ fn module_chain_to_containing_module_file(
) -> Vec<Module> { ) -> Vec<Module> {
let mut path = let mut path =
iter::successors(Some(current_module), |current_module| current_module.parent(db)) iter::successors(Some(current_module), |current_module| current_module.parent(db))
.take_while(|current_module| { .take_while(|current_module| current_module.is_inline(db))
matches!(current_module.definition_source(db).value, ModuleSource::Module(_))
})
.collect::<Vec<_>>(); .collect::<Vec<_>>();
path.reverse(); path.reverse();
path path

View file

@ -369,6 +369,7 @@ impl CompletionItemKind {
SymbolKind::LifetimeParam => "lt", SymbolKind::LifetimeParam => "lt",
SymbolKind::Local => "lc", SymbolKind::Local => "lc",
SymbolKind::Macro => "ma", SymbolKind::Macro => "ma",
SymbolKind::ProcMacro => "pm",
SymbolKind::Module => "md", SymbolKind::Module => "md",
SymbolKind::SelfParam => "sp", SymbolKind::SelfParam => "sp",
SymbolKind::SelfType => "sy", SymbolKind::SelfType => "sy",

View file

@ -207,7 +207,7 @@ pub fn completions(
CompletionAnalysis::String { original, expanded: Some(expanded) } => { CompletionAnalysis::String { original, expanded: Some(expanded) } => {
completions::extern_abi::complete_extern_abi(acc, ctx, expanded); completions::extern_abi::complete_extern_abi(acc, ctx, expanded);
completions::format_string::format_string(acc, ctx, original, expanded); completions::format_string::format_string(acc, ctx, original, expanded);
completions::env_vars::complete_cargo_env_vars(acc, ctx, expanded); completions::env_vars::complete_cargo_env_vars(acc, ctx, original, expanded);
} }
CompletionAnalysis::UnexpandedAttrTT { CompletionAnalysis::UnexpandedAttrTT {
colon_prefix, colon_prefix,

View file

@ -205,6 +205,7 @@ impl RootDatabase {
// SourceDatabaseExt // SourceDatabaseExt
base_db::FileTextQuery base_db::FileTextQuery
base_db::CompressedFileTextQuery
base_db::FileSourceRootQuery base_db::FileSourceRootQuery
base_db::SourceRootQuery base_db::SourceRootQuery
base_db::SourceRootCratesQuery base_db::SourceRootCratesQuery

View file

@ -407,7 +407,7 @@ impl NameClass {
} }
pub fn classify(sema: &Semantics<'_, RootDatabase>, name: &ast::Name) -> Option<NameClass> { pub fn classify(sema: &Semantics<'_, RootDatabase>, name: &ast::Name) -> Option<NameClass> {
let _p = tracing::span!(tracing::Level::INFO, "classify_name").entered(); let _p = tracing::span!(tracing::Level::INFO, "NameClass::classify").entered();
let parent = name.syntax().parent()?; let parent = name.syntax().parent()?;
@ -499,7 +499,8 @@ impl NameClass {
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
lifetime: &ast::Lifetime, lifetime: &ast::Lifetime,
) -> Option<NameClass> { ) -> Option<NameClass> {
let _p = tracing::span!(tracing::Level::INFO, "classify_lifetime", ?lifetime).entered(); let _p = tracing::span!(tracing::Level::INFO, "NameClass::classify_lifetime", ?lifetime)
.entered();
let parent = lifetime.syntax().parent()?; let parent = lifetime.syntax().parent()?;
if let Some(it) = ast::LifetimeParam::cast(parent.clone()) { if let Some(it) = ast::LifetimeParam::cast(parent.clone()) {
@ -590,7 +591,8 @@ impl NameRefClass {
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
name_ref: &ast::NameRef, name_ref: &ast::NameRef,
) -> Option<NameRefClass> { ) -> Option<NameRefClass> {
let _p = tracing::span!(tracing::Level::INFO, "classify_name_ref", ?name_ref).entered(); let _p =
tracing::span!(tracing::Level::INFO, "NameRefClass::classify", ?name_ref).entered();
let parent = name_ref.syntax().parent()?; let parent = name_ref.syntax().parent()?;
@ -689,7 +691,8 @@ impl NameRefClass {
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
lifetime: &ast::Lifetime, lifetime: &ast::Lifetime,
) -> Option<NameRefClass> { ) -> Option<NameRefClass> {
let _p = tracing::span!(tracing::Level::INFO, "classify_lifetime_ref", ?lifetime).entered(); let _p = tracing::span!(tracing::Level::INFO, "NameRefClass::classify_lifetime", ?lifetime)
.entered();
let parent = lifetime.syntax().parent()?; let parent = lifetime.syntax().parent()?;
match parent.kind() { match parent.kind() {
SyntaxKind::BREAK_EXPR | SyntaxKind::CONTINUE_EXPR => { SyntaxKind::BREAK_EXPR | SyntaxKind::CONTINUE_EXPR => {

View file

@ -71,7 +71,7 @@ pub fn visit_file_defs(
let mut defs: VecDeque<_> = module.declarations(db).into(); let mut defs: VecDeque<_> = module.declarations(db).into();
while let Some(def) = defs.pop_front() { while let Some(def) = defs.pop_front() {
if let ModuleDef::Module(submodule) = def { if let ModuleDef::Module(submodule) = def {
if let hir::ModuleSource::Module(_) = submodule.definition_source(db).value { if submodule.is_inline(db) {
defs.extend(submodule.declarations(db)); defs.extend(submodule.declarations(db));
submodule.impl_defs(db).into_iter().for_each(|impl_| cb(impl_.into())); submodule.impl_defs(db).into_iter().for_each(|impl_| cb(impl_.into()));
} }

View file

@ -51,6 +51,7 @@ use std::{fmt, mem::ManuallyDrop};
use base_db::{ use base_db::{
salsa::{self, Durability}, salsa::{self, Durability},
AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast, AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
DEFAULT_FILE_TEXT_LRU_CAP,
}; };
use hir::db::{DefDatabase, ExpandDatabase, HirDatabase}; use hir::db::{DefDatabase, ExpandDatabase, HirDatabase};
use triomphe::Arc; use triomphe::Arc;
@ -157,6 +158,7 @@ impl RootDatabase {
pub fn update_base_query_lru_capacities(&mut self, lru_capacity: Option<usize>) { pub fn update_base_query_lru_capacities(&mut self, lru_capacity: Option<usize>) {
let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_PARSE_LRU_CAP); let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_PARSE_LRU_CAP);
base_db::FileTextQuery.in_db_mut(self).set_lru_capacity(DEFAULT_FILE_TEXT_LRU_CAP);
base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity); base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
// macro expansions are usually rather small, so we can afford to keep more of them alive // macro expansions are usually rather small, so we can afford to keep more of them alive
hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity); hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity);
@ -166,6 +168,7 @@ impl RootDatabase {
pub fn update_lru_capacities(&mut self, lru_capacities: &FxHashMap<Box<str>, usize>) { pub fn update_lru_capacities(&mut self, lru_capacities: &FxHashMap<Box<str>, usize>) {
use hir::db as hir_db; use hir::db as hir_db;
base_db::FileTextQuery.in_db_mut(self).set_lru_capacity(DEFAULT_FILE_TEXT_LRU_CAP);
base_db::ParseQuery.in_db_mut(self).set_lru_capacity( base_db::ParseQuery.in_db_mut(self).set_lru_capacity(
lru_capacities lru_capacities
.get(stringify!(ParseQuery)) .get(stringify!(ParseQuery))
@ -199,7 +202,7 @@ impl RootDatabase {
// base_db::ProcMacrosQuery // base_db::ProcMacrosQuery
// SourceDatabaseExt // SourceDatabaseExt
// base_db::FileTextQuery base_db::FileTextQuery
// base_db::FileSourceRootQuery // base_db::FileSourceRootQuery
// base_db::SourceRootQuery // base_db::SourceRootQuery
base_db::SourceRootCratesQuery base_db::SourceRootCratesQuery
@ -348,6 +351,7 @@ pub enum SymbolKind {
LifetimeParam, LifetimeParam,
Local, Local,
Macro, Macro,
ProcMacro,
Module, Module,
SelfParam, SelfParam,
SelfType, SelfType,
@ -366,9 +370,8 @@ pub enum SymbolKind {
impl From<hir::MacroKind> for SymbolKind { impl From<hir::MacroKind> for SymbolKind {
fn from(it: hir::MacroKind) -> Self { fn from(it: hir::MacroKind) -> Self {
match it { match it {
hir::MacroKind::Declarative | hir::MacroKind::BuiltIn | hir::MacroKind::ProcMacro => { hir::MacroKind::Declarative | hir::MacroKind::BuiltIn => SymbolKind::Macro,
SymbolKind::Macro hir::MacroKind::ProcMacro => SymbolKind::ProcMacro,
}
hir::MacroKind::Derive => SymbolKind::Derive, hir::MacroKind::Derive => SymbolKind::Derive,
hir::MacroKind::Attr => SymbolKind::Attribute, hir::MacroKind::Attr => SymbolKind::Attribute,
} }
@ -381,6 +384,7 @@ impl From<hir::ModuleDefId> for SymbolKind {
hir::ModuleDefId::ConstId(..) => SymbolKind::Const, hir::ModuleDefId::ConstId(..) => SymbolKind::Const,
hir::ModuleDefId::EnumVariantId(..) => SymbolKind::Variant, hir::ModuleDefId::EnumVariantId(..) => SymbolKind::Variant,
hir::ModuleDefId::FunctionId(..) => SymbolKind::Function, hir::ModuleDefId::FunctionId(..) => SymbolKind::Function,
hir::ModuleDefId::MacroId(hir::MacroId::ProcMacroId(..)) => SymbolKind::ProcMacro,
hir::ModuleDefId::MacroId(..) => SymbolKind::Macro, hir::ModuleDefId::MacroId(..) => SymbolKind::Macro,
hir::ModuleDefId::ModuleId(..) => SymbolKind::Module, hir::ModuleDefId::ModuleId(..) => SymbolKind::Module,
hir::ModuleDefId::StaticId(..) => SymbolKind::Static, hir::ModuleDefId::StaticId(..) => SymbolKind::Static,

View file

@ -190,22 +190,15 @@ impl SearchScope {
let mut entries = IntMap::default(); let mut entries = IntMap::default();
let (file_id, range) = { let (file_id, range) = {
let InFile { file_id, value } = module.definition_source(db); let InFile { file_id, value } = module.definition_source_range(db);
if let Some(InRealFile { file_id, value: call_source }) = file_id.original_call_node(db) if let Some(InRealFile { file_id, value: call_source }) = file_id.original_call_node(db)
{ {
(file_id, Some(call_source.text_range())) (file_id, Some(call_source.text_range()))
} else { } else {
( (file_id.original_file(db), Some(value))
file_id.original_file(db),
match value {
ModuleSource::SourceFile(_) => None,
ModuleSource::Module(it) => Some(it.syntax().text_range()),
ModuleSource::BlockExpr(it) => Some(it.syntax().text_range()),
},
)
} }
}; };
entries.insert(file_id, range); entries.entry(file_id).or_insert(range);
let mut to_visit: Vec<_> = module.children(db).collect(); let mut to_visit: Vec<_> = module.children(db).collect();
while let Some(module) = to_visit.pop() { while let Some(module) = to_visit.pop() {

View file

@ -38,7 +38,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Option<Vec<Ass
let def = NameClass::classify(&ctx.sema, &name_node)?.defined()?; let def = NameClass::classify(&ctx.sema, &name_node)?.defined()?;
let name_node = InFile::new(d.file, name_node.syntax()); let name_node = InFile::new(d.file, name_node.syntax());
let frange = name_node.original_file_range(ctx.sema.db); let frange = name_node.original_file_range_rooted(ctx.sema.db);
let label = format!("Rename to {}", d.suggested_text); let label = format!("Rename to {}", d.suggested_text);
let mut res = unresolved_fix("change_case", &label, frange.range); let mut res = unresolved_fix("change_case", &label, frange.range);

View file

@ -413,7 +413,7 @@ fn main() {
fn main() { fn main() {
return; return;
let mut x = 2; let mut x = 2;
//^^^^^ warn: unused variable //^^^^^ 💡 warn: unused variable
&mut x; &mut x;
} }
"#, "#,
@ -423,7 +423,7 @@ fn main() {
fn main() { fn main() {
loop {} loop {}
let mut x = 2; let mut x = 2;
//^^^^^ warn: unused variable //^^^^^ 💡 warn: unused variable
&mut x; &mut x;
} }
"#, "#,
@ -444,7 +444,7 @@ fn main(b: bool) {
g(); g();
} }
let mut x = 2; let mut x = 2;
//^^^^^ warn: unused variable //^^^^^ 💡 warn: unused variable
&mut x; &mut x;
} }
"#, "#,
@ -459,7 +459,7 @@ fn main(b: bool) {
return; return;
} }
let mut x = 2; let mut x = 2;
//^^^^^ warn: unused variable //^^^^^ 💡 warn: unused variable
&mut x; &mut x;
} }
"#, "#,
@ -789,7 +789,7 @@ fn f() {
//^^ 💡 error: cannot mutate immutable variable `x` //^^ 💡 error: cannot mutate immutable variable `x`
_ = (x, y); _ = (x, y);
let x = Foo; let x = Foo;
//^ warn: unused variable //^ 💡 warn: unused variable
let x = Foo; let x = Foo;
let y: &mut (i32, u8) = &mut x; let y: &mut (i32, u8) = &mut x;
//^^^^^^ 💡 error: cannot mutate immutable variable `x` //^^^^^^ 💡 error: cannot mutate immutable variable `x`

View file

@ -1,11 +1,22 @@
use hir::{db::ExpandDatabase, HirDisplay, InFile}; use std::iter;
use hir::{db::ExpandDatabase, Adt, HasSource, HirDisplay, InFile, Struct, Union};
use ide_db::{ use ide_db::{
assists::{Assist, AssistId, AssistKind}, assists::{Assist, AssistId, AssistKind},
base_db::FileRange, base_db::FileRange,
helpers::is_editable_crate,
label::Label, label::Label,
source_change::SourceChange, source_change::{SourceChange, SourceChangeBuilder},
};
use syntax::{
algo,
ast::{self, edit::IndentLevel, make, FieldList, Name, Visibility},
AstNode, AstPtr, Direction, SyntaxKind, TextSize,
};
use syntax::{
ast::{edit::AstNodeEdit, Type},
SyntaxNode,
}; };
use syntax::{ast, AstNode, AstPtr};
use text_edit::TextEdit; use text_edit::TextEdit;
use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext}; use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext};
@ -46,24 +57,206 @@ pub(crate) fn unresolved_field(
} }
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<Vec<Assist>> { fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<Vec<Assist>> {
let mut fixes = Vec::new();
if d.method_with_same_name_exists { if d.method_with_same_name_exists {
method_fix(ctx, &d.expr) fixes.extend(method_fix(ctx, &d.expr));
} else {
// FIXME: add quickfix
None
} }
fixes.extend(field_fix(ctx, d));
if fixes.is_empty() {
None
} else {
Some(fixes)
}
}
// FIXME: Add Snippet Support
fn field_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<Assist> {
// Get the FileRange of the invalid field access
let root = ctx.sema.db.parse_or_expand(d.expr.file_id);
let expr = d.expr.value.to_node(&root);
let error_range = ctx.sema.original_range_opt(expr.syntax())?;
let field_name = d.name.as_str()?;
// Convert the receiver to an ADT
let adt = d.receiver.strip_references().as_adt()?;
let target_module = adt.module(ctx.sema.db);
let suggested_type =
if let Some(new_field_type) = ctx.sema.type_of_expr(&expr).map(|v| v.adjusted()) {
let display =
new_field_type.display_source_code(ctx.sema.db, target_module.into(), false).ok();
make::ty(display.as_deref().unwrap_or("()"))
} else {
make::ty("()")
};
if !is_editable_crate(target_module.krate(), ctx.sema.db) {
return None;
}
match adt {
Adt::Struct(adt_struct) => {
add_field_to_struct_fix(ctx, adt_struct, field_name, suggested_type, error_range)
}
Adt::Union(adt_union) => {
add_variant_to_union(ctx, adt_union, field_name, suggested_type, error_range)
}
_ => None,
}
}
fn add_variant_to_union(
ctx: &DiagnosticsContext<'_>,
adt_union: Union,
field_name: &str,
suggested_type: Type,
error_range: FileRange,
) -> Option<Assist> {
let adt_source = adt_union.source(ctx.sema.db)?;
let adt_syntax = adt_source.syntax();
let field_list = adt_source.value.record_field_list()?;
let range = adt_syntax.original_file_range_rooted(ctx.sema.db);
let field_name = make::name(field_name);
let (offset, record_field) =
record_field_layout(None, field_name, suggested_type, field_list, adt_syntax.value)?;
let mut src_change_builder = SourceChangeBuilder::new(range.file_id);
src_change_builder.insert(offset, record_field);
Some(Assist {
id: AssistId("add-variant-to-union", AssistKind::QuickFix),
label: Label::new("Add field to union".to_owned()),
group: None,
target: error_range.range,
source_change: Some(src_change_builder.finish()),
trigger_signature_help: false,
})
}
fn add_field_to_struct_fix(
ctx: &DiagnosticsContext<'_>,
adt_struct: Struct,
field_name: &str,
suggested_type: Type,
error_range: FileRange,
) -> Option<Assist> {
let struct_source = adt_struct.source(ctx.sema.db)?;
let struct_syntax = struct_source.syntax();
let struct_range = struct_syntax.original_file_range_rooted(ctx.sema.db);
let field_list = struct_source.value.field_list();
match field_list {
Some(FieldList::RecordFieldList(field_list)) => {
// Get range of final field in the struct
let visibility = if error_range.file_id == struct_range.file_id {
None
} else {
Some(make::visibility_pub_crate())
};
let field_name = make::name(field_name);
let (offset, record_field) = record_field_layout(
visibility,
field_name,
suggested_type,
field_list,
struct_syntax.value,
)?;
let mut src_change_builder = SourceChangeBuilder::new(struct_range.file_id);
// FIXME: Allow for choosing a visibility modifier see https://github.com/rust-lang/rust-analyzer/issues/11563
src_change_builder.insert(offset, record_field);
Some(Assist {
id: AssistId("add-field-to-record-struct", AssistKind::QuickFix),
label: Label::new("Add field to Record Struct".to_owned()),
group: None,
target: error_range.range,
source_change: Some(src_change_builder.finish()),
trigger_signature_help: false,
})
}
None => {
// Add a field list to the Unit Struct
let mut src_change_builder = SourceChangeBuilder::new(struct_range.file_id);
let field_name = make::name(field_name);
let visibility = if error_range.file_id == struct_range.file_id {
None
} else {
Some(make::visibility_pub_crate())
};
// FIXME: Allow for choosing a visibility modifier see https://github.com/rust-lang/rust-analyzer/issues/11563
let indent = IndentLevel::from_node(struct_syntax.value) + 1;
let field = make::record_field(visibility, field_name, suggested_type).indent(indent);
let record_field_list = make::record_field_list(iter::once(field));
// A Unit Struct with no `;` is invalid syntax. We should not suggest this fix.
let semi_colon =
algo::skip_trivia_token(struct_syntax.value.last_token()?, Direction::Prev)?;
if semi_colon.kind() != SyntaxKind::SEMICOLON {
return None;
}
src_change_builder.replace(semi_colon.text_range(), record_field_list.to_string());
Some(Assist {
id: AssistId("convert-unit-struct-to-record-struct", AssistKind::QuickFix),
label: Label::new("Convert Unit Struct to Record Struct and add field".to_owned()),
group: None,
target: error_range.range,
source_change: Some(src_change_builder.finish()),
trigger_signature_help: false,
})
}
Some(FieldList::TupleFieldList(_tuple)) => {
// FIXME: Add support for Tuple Structs. Tuple Structs are not sent to this diagnostic
None
}
}
}
/// Used to determine the layout of the record field in the struct.
fn record_field_layout(
visibility: Option<Visibility>,
name: Name,
suggested_type: Type,
field_list: ast::RecordFieldList,
struct_syntax: &SyntaxNode,
) -> Option<(TextSize, String)> {
let (offset, needs_comma, trailing_new_line, indent) = match field_list.fields().last() {
Some(record_field) => {
let syntax = algo::skip_trivia_token(field_list.r_curly_token()?, Direction::Prev)?;
let last_field_syntax = record_field.syntax();
let last_field_indent = IndentLevel::from_node(last_field_syntax);
(
last_field_syntax.text_range().end(),
syntax.kind() != SyntaxKind::COMMA,
false,
last_field_indent,
)
}
// Empty Struct. Add a field right before the closing brace
None => {
let indent = IndentLevel::from_node(struct_syntax) + 1;
let offset = field_list.r_curly_token()?.text_range().start();
(offset, false, true, indent)
}
};
let comma = if needs_comma { ",\n" } else { "" };
let trailing_new_line = if trailing_new_line { "\n" } else { "" };
let record_field = make::record_field(visibility, name, suggested_type);
Some((offset, format!("{comma}{indent}{record_field}{trailing_new_line}")))
} }
// FIXME: We should fill out the call here, move the cursor and trigger signature help // FIXME: We should fill out the call here, move the cursor and trigger signature help
fn method_fix( fn method_fix(
ctx: &DiagnosticsContext<'_>, ctx: &DiagnosticsContext<'_>,
expr_ptr: &InFile<AstPtr<ast::Expr>>, expr_ptr: &InFile<AstPtr<ast::Expr>>,
) -> Option<Vec<Assist>> { ) -> Option<Assist> {
let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id); let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id);
let expr = expr_ptr.value.to_node(&root); let expr = expr_ptr.value.to_node(&root);
let FileRange { range, file_id } = ctx.sema.original_range_opt(expr.syntax())?; let FileRange { range, file_id } = ctx.sema.original_range_opt(expr.syntax())?;
Some(vec![Assist { Some(Assist {
id: AssistId("expected-field-found-method-call-fix", AssistKind::QuickFix), id: AssistId("expected-field-found-method-call-fix", AssistKind::QuickFix),
label: Label::new("Use parentheses to call the method".to_owned()), label: Label::new("Use parentheses to call the method".to_owned()),
group: None, group: None,
@ -73,13 +266,15 @@ fn method_fix(
TextEdit::insert(range.end(), "()".to_owned()), TextEdit::insert(range.end(), "()".to_owned()),
)), )),
trigger_signature_help: false, trigger_signature_help: false,
}]) })
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::{ use crate::{
tests::{ tests::{
check_diagnostics, check_diagnostics_with_config, check_diagnostics_with_disabled, check_diagnostics, check_diagnostics_with_config, check_diagnostics_with_disabled,
check_fix,
}, },
DiagnosticsConfig, DiagnosticsConfig,
}; };
@ -168,4 +363,100 @@ fn foo() {
config.disabled.insert("syntax-error".to_owned()); config.disabled.insert("syntax-error".to_owned());
check_diagnostics_with_config(config, "fn foo() { (). }"); check_diagnostics_with_config(config, "fn foo() { (). }");
} }
#[test]
fn unresolved_field_fix_on_unit() {
check_fix(
r#"
struct Foo;
fn foo() {
Foo.bar$0;
}
"#,
r#"
struct Foo{ bar: () }
fn foo() {
Foo.bar;
}
"#,
);
}
#[test]
fn unresolved_field_fix_on_empty() {
check_fix(
r#"
struct Foo{
}
fn foo() {
let foo = Foo{};
foo.bar$0;
}
"#,
r#"
struct Foo{
bar: ()
}
fn foo() {
let foo = Foo{};
foo.bar;
}
"#,
);
}
#[test]
fn unresolved_field_fix_on_struct() {
check_fix(
r#"
struct Foo{
a: i32
}
fn foo() {
let foo = Foo{a: 0};
foo.bar$0;
}
"#,
r#"
struct Foo{
a: i32,
bar: ()
}
fn foo() {
let foo = Foo{a: 0};
foo.bar;
}
"#,
);
}
#[test]
fn unresolved_field_fix_on_union() {
check_fix(
r#"
union Foo{
a: i32
}
fn foo() {
let foo = Foo{a: 0};
foo.bar$0;
}
"#,
r#"
union Foo{
a: i32,
bar: ()
}
fn foo() {
let foo = Foo{a: 0};
foo.bar;
}
"#,
);
}
} }

View file

@ -1,3 +1,11 @@
use ide_db::{
assists::{Assist, AssistId, AssistKind},
base_db::FileRange,
label::Label,
source_change::SourceChange,
};
use text_edit::TextEdit;
use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unused-variables // Diagnostic: unused-variables
@ -8,18 +16,38 @@ pub(crate) fn unused_variables(
d: &hir::UnusedVariable, d: &hir::UnusedVariable,
) -> Diagnostic { ) -> Diagnostic {
let ast = d.local.primary_source(ctx.sema.db).syntax_ptr(); let ast = d.local.primary_source(ctx.sema.db).syntax_ptr();
let diagnostic_range = ctx.sema.diagnostics_display_range(ast);
let var_name = d.local.primary_source(ctx.sema.db).syntax().to_string();
Diagnostic::new_with_syntax_node_ptr( Diagnostic::new_with_syntax_node_ptr(
ctx, ctx,
DiagnosticCode::RustcLint("unused_variables"), DiagnosticCode::RustcLint("unused_variables"),
"unused variable", "unused variable",
ast, ast,
) )
.with_fixes(fixes(&var_name, diagnostic_range, ast.file_id.is_macro()))
.experimental() .experimental()
} }
fn fixes(var_name: &String, diagnostic_range: FileRange, is_in_marco: bool) -> Option<Vec<Assist>> {
if is_in_marco {
return None;
}
Some(vec![Assist {
id: AssistId("unscore_unused_variable_name", AssistKind::QuickFix),
label: Label::new(format!("Rename unused {} to _{}", var_name, var_name)),
group: None,
target: diagnostic_range.range,
source_change: Some(SourceChange::from_text_edit(
diagnostic_range.file_id,
TextEdit::replace(diagnostic_range.range, format!("_{}", var_name)),
)),
trigger_signature_help: false,
}])
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::tests::check_diagnostics; use crate::tests::{check_diagnostics, check_fix, check_no_fix};
#[test] #[test]
fn unused_variables_simple() { fn unused_variables_simple() {
@ -29,23 +57,23 @@ mod tests {
struct Foo { f1: i32, f2: i64 } struct Foo { f1: i32, f2: i64 }
fn f(kkk: i32) {} fn f(kkk: i32) {}
//^^^ warn: unused variable //^^^ 💡 warn: unused variable
fn main() { fn main() {
let a = 2; let a = 2;
//^ warn: unused variable //^ 💡 warn: unused variable
let b = 5; let b = 5;
// note: `unused variable` implies `unused mut`, so we should not emit both at the same time. // note: `unused variable` implies `unused mut`, so we should not emit both at the same time.
let mut c = f(b); let mut c = f(b);
//^^^^^ warn: unused variable //^^^^^ 💡 warn: unused variable
let (d, e) = (3, 5); let (d, e) = (3, 5);
//^ warn: unused variable //^ 💡 warn: unused variable
let _ = e; let _ = e;
let f1 = 2; let f1 = 2;
let f2 = 5; let f2 = 5;
let f = Foo { f1, f2 }; let f = Foo { f1, f2 };
match f { match f {
Foo { f1, f2 } => { Foo { f1, f2 } => {
//^^ warn: unused variable //^^ 💡 warn: unused variable
_ = f2; _ = f2;
} }
} }
@ -53,7 +81,7 @@ fn main() {
if g {} if g {}
let h: fn() -> i32 = || 2; let h: fn() -> i32 = || 2;
let i = h(); let i = h();
//^ warn: unused variable //^ 💡 warn: unused variable
} }
"#, "#,
); );
@ -67,11 +95,11 @@ struct S {
} }
impl S { impl S {
fn owned_self(self, u: i32) {} fn owned_self(self, u: i32) {}
//^ warn: unused variable //^ 💡 warn: unused variable
fn ref_self(&self, u: i32) {} fn ref_self(&self, u: i32) {}
//^ warn: unused variable //^ 💡 warn: unused variable
fn ref_mut_self(&mut self, u: i32) {} fn ref_mut_self(&mut self, u: i32) {}
//^ warn: unused variable //^ 💡 warn: unused variable
fn owned_mut_self(mut self) {} fn owned_mut_self(mut self) {}
//^^^^^^^^ 💡 warn: variable does not need to be mutable //^^^^^^^^ 💡 warn: variable does not need to be mutable
@ -103,7 +131,78 @@ fn main() {
#[deny(unused)] #[deny(unused)]
fn main2() { fn main2() {
let x = 2; let x = 2;
//^ error: unused variable //^ 💡 error: unused variable
}
"#,
);
}
#[test]
fn fix_unused_variable() {
check_fix(
r#"
fn main() {
let x$0 = 2;
}
"#,
r#"
fn main() {
let _x = 2;
}
"#,
);
check_fix(
r#"
fn main() {
let ($0d, _e) = (3, 5);
}
"#,
r#"
fn main() {
let (_d, _e) = (3, 5);
}
"#,
);
check_fix(
r#"
struct Foo { f1: i32, f2: i64 }
fn main() {
let f = Foo { f1: 0, f2: 0 };
match f {
Foo { f1$0, f2 } => {
_ = f2;
}
}
}
"#,
r#"
struct Foo { f1: i32, f2: i64 }
fn main() {
let f = Foo { f1: 0, f2: 0 };
match f {
Foo { _f1, f2 } => {
_ = f2;
}
}
}
"#,
);
}
#[test]
fn no_fix_for_marco() {
check_no_fix(
r#"
macro_rules! my_macro {
() => {
let x = 3;
};
}
fn main() {
$0my_macro!();
} }
"#, "#,
); );

View file

@ -125,9 +125,12 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
let match_state = Matcher { sema, restrict_range: *restrict_range, rule }; let match_state = Matcher { sema, restrict_range: *restrict_range, rule };
// First pass at matching, where we check that node types and idents match. // First pass at matching, where we check that node types and idents match.
match_state.attempt_match_node(&mut Phase::First, &rule.pattern.node, code)?; match_state.attempt_match_node(&mut Phase::First, &rule.pattern.node, code)?;
match_state.validate_range(&sema.original_range(code))?; let file_range = sema
.original_range_opt(code)
.ok_or(MatchFailed { reason: Some("def site definition".to_owned()) })?;
match_state.validate_range(&file_range)?;
let mut the_match = Match { let mut the_match = Match {
range: sema.original_range(code), range: file_range,
matched_node: code.clone(), matched_node: code.clone(),
placeholder_values: FxHashMap::default(), placeholder_values: FxHashMap::default(),
ignored_comments: Vec::new(), ignored_comments: Vec::new(),
@ -175,7 +178,10 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
self.check_constraint(constraint, code)?; self.check_constraint(constraint, code)?;
} }
if let Phase::Second(matches_out) = phase { if let Phase::Second(matches_out) = phase {
let original_range = self.sema.original_range(code); let original_range = self
.sema
.original_range_opt(code)
.ok_or(MatchFailed { reason: Some("def site definition".to_owned()) })?;
// We validated the range for the node when we started the match, so the placeholder // We validated the range for the node when we started the match, so the placeholder
// probably can't fail range validation, but just to be safe... // probably can't fail range validation, but just to be safe...
self.validate_range(&original_range)?; self.validate_range(&original_range)?;
@ -487,7 +493,13 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
match_out.placeholder_values.insert( match_out.placeholder_values.insert(
placeholder.ident.clone(), placeholder.ident.clone(),
PlaceholderMatch::from_range(FileRange { PlaceholderMatch::from_range(FileRange {
file_id: self.sema.original_range(code).file_id, file_id: self
.sema
.original_range_opt(code)
.ok_or(MatchFailed {
reason: Some("def site definition".to_owned()),
})?
.file_id,
range: first_matched_token range: first_matched_token
.text_range() .text_range()
.cover(last_matched_token.text_range()), .cover(last_matched_token.text_range()),

View file

@ -190,12 +190,9 @@ impl MatchFinder<'_> {
// When matching within a macro expansion, we only want to allow matches of // When matching within a macro expansion, we only want to allow matches of
// nodes that originated entirely from within the token tree of the macro call. // nodes that originated entirely from within the token tree of the macro call.
// i.e. we don't want to match something that came from the macro itself. // i.e. we don't want to match something that came from the macro itself.
self.slow_scan_node( if let Some(range) = self.sema.original_range_opt(tt.syntax()) {
&expanded, self.slow_scan_node(&expanded, rule, &Some(range), matches_out);
rule, }
&Some(self.sema.original_range(tt.syntax())),
matches_out,
);
} }
} }
} }
@ -227,7 +224,7 @@ impl MatchFinder<'_> {
// There is no range restriction. // There is no range restriction.
return true; return true;
} }
let node_range = self.sema.original_range(code); let Some(node_range) = self.sema.original_range_opt(code) else { return false };
for range in &self.restrict_ranges { for range in &self.restrict_ranges {
if range.file_id == node_range.file_id && range.range.contains_range(node_range.range) { if range.file_id == node_range.file_id && range.range.contains_range(node_range.range) {
return true; return true;

View file

@ -1,10 +1,10 @@
use std::mem::discriminant; use std::{iter, mem::discriminant};
use crate::{ use crate::{
doc_links::token_as_doc_comment, navigation_target::ToNav, FilePosition, NavigationTarget, doc_links::token_as_doc_comment, navigation_target::ToNav, FilePosition, NavigationTarget,
RangeInfo, TryToNav, RangeInfo, TryToNav,
}; };
use hir::{AsAssocItem, AssocItem, DescendPreference, ModuleDef, Semantics}; use hir::{AsAssocItem, AssocItem, DescendPreference, MacroFileIdExt, ModuleDef, Semantics};
use ide_db::{ use ide_db::{
base_db::{AnchoredPath, FileId, FileLoader}, base_db::{AnchoredPath, FileId, FileLoader},
defs::{Definition, IdentClass}, defs::{Definition, IdentClass},
@ -74,11 +74,13 @@ pub(crate) fn goto_definition(
.filter_map(|token| { .filter_map(|token| {
let parent = token.parent()?; let parent = token.parent()?;
if let Some(tt) = ast::TokenTree::cast(parent.clone()) { if let Some(token) = ast::String::cast(token.clone()) {
if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), file_id) { if let Some(x) = try_lookup_include_path(sema, token, file_id) {
return Some(vec![x]); return Some(vec![x]);
} }
}
if ast::TokenTree::can_cast(parent.kind()) {
if let Some(x) = try_lookup_macro_def_in_macro_use(sema, token) { if let Some(x) = try_lookup_macro_def_in_macro_use(sema, token) {
return Some(vec![x]); return Some(vec![x]);
} }
@ -111,24 +113,17 @@ pub(crate) fn goto_definition(
fn try_lookup_include_path( fn try_lookup_include_path(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
tt: ast::TokenTree, token: ast::String,
token: SyntaxToken,
file_id: FileId, file_id: FileId,
) -> Option<NavigationTarget> { ) -> Option<NavigationTarget> {
let token = ast::String::cast(token)?; let file = sema.hir_file_for(&token.syntax().parent()?).macro_file()?;
let path = token.value()?.into_owned(); if !iter::successors(Some(file), |file| file.parent(sema.db).macro_file())
let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?; // Check that we are in the eager argument expansion of an include macro
let name = macro_call.path()?.segment()?.name_ref()?; .any(|file| file.is_include_like_macro(sema.db) && file.eager_arg(sema.db).is_none())
if !matches!(&*name.text(), "include" | "include_str" | "include_bytes") { {
return None; return None;
} }
let path = token.value()?;
// Ignore non-built-in macros to account for shadowing
if let Some(it) = sema.resolve_macro_call(&macro_call) {
if !matches!(it.kind(sema.db), hir::MacroKind::BuiltIn) {
return None;
}
}
let file_id = sema.db.resolve_path(AnchoredPath { anchor: file_id, path: &path })?; let file_id = sema.db.resolve_path(AnchoredPath { anchor: file_id, path: &path })?;
let size = sema.db.file_text(file_id).len().try_into().ok()?; let size = sema.db.file_text(file_id).len().try_into().ok()?;
@ -1531,6 +1526,26 @@ fn main() {
); );
} }
#[test]
fn goto_include_has_eager_input() {
check(
r#"
//- /main.rs
#[rustc_builtin_macro]
macro_rules! include_str {}
#[rustc_builtin_macro]
macro_rules! concat {}
fn main() {
let str = include_str!(concat!("foo", ".tx$0t"));
}
//- /foo.txt
// empty
//^file
"#,
);
}
#[test] #[test]
fn goto_doc_include_str() { fn goto_doc_include_str() {
check( check(

View file

@ -510,7 +510,7 @@ fn render_notable_trait_comment(
let mut needs_impl_header = true; let mut needs_impl_header = true;
for (trait_, assoc_types) in notable_traits { for (trait_, assoc_types) in notable_traits {
desc.push_str(if mem::take(&mut needs_impl_header) { desc.push_str(if mem::take(&mut needs_impl_header) {
" // Implements notable traits: " "// Implements notable traits: "
} else { } else {
", " ", "
}); });
@ -661,7 +661,7 @@ fn closure_ty(
if let Some(layout) = if let Some(layout) =
render_memory_layout(config.memory_layout, || original.layout(sema.db), |_| None, |_| None) render_memory_layout(config.memory_layout, || original.layout(sema.db), |_| None, |_| None)
{ {
format_to!(markup, "{layout}"); format_to!(markup, " {layout}");
} }
if let Some(trait_) = c.fn_trait(sema.db).get_id(sema.db, original.krate(sema.db).into()) { if let Some(trait_) = c.fn_trait(sema.db).get_id(sema.db, original.krate(sema.db).into()) {
push_new_def(hir::Trait::from(trait_).into()) push_new_def(hir::Trait::from(trait_).into())
@ -730,7 +730,7 @@ fn render_memory_layout(
let config = config?; let config = config?;
let layout = layout().ok()?; let layout = layout().ok()?;
let mut label = String::from(" // "); let mut label = String::from("// ");
if let Some(render) = config.size { if let Some(render) = config.size {
let size = match tag(&layout) { let size = match tag(&layout) {

View file

@ -180,7 +180,7 @@ fn foo() {
*local* *local*
```rust ```rust
// size = 4, align = 4 // size = 4, align = 4
let local: i32 let local: i32
``` ```
"#]], "#]],
@ -471,7 +471,7 @@ fn main() {
*iter* *iter*
```rust ```rust
// size = 8, align = 4 // size = 8, align = 4
let mut iter: Iter<Scan<OtherStruct<OtherStruct<i32>>, impl Fn(&mut u32, &u32, &mut u32) -> Option<u32>, u32>> let mut iter: Iter<Scan<OtherStruct<OtherStruct<i32>>, impl Fn(&mut u32, &u32, &mut u32) -> Option<u32>, u32>>
``` ```
"#]], "#]],
@ -713,7 +713,7 @@ struct Foo { fiel$0d_a: u8, field_b: i32, field_c: i16 }
``` ```
```rust ```rust
// size = 1, align = 1, offset = 6 // size = 1, align = 1, offset = 6
field_a: u8 field_a: u8
``` ```
"#]], "#]],
@ -739,7 +739,7 @@ fn main() {
``` ```
```rust ```rust
// size = 4, align = 4, offset = 0 // size = 4, align = 4, offset = 0
pub field_a: u32 pub field_a: u32
``` ```
"#]], "#]],
@ -762,7 +762,7 @@ fn main() {
``` ```
```rust ```rust
// size = 4, align = 4, offset = 0 // size = 4, align = 4, offset = 0
pub field_a: u32 pub field_a: u32
``` ```
"#]], "#]],
@ -787,7 +787,7 @@ fn main() {
``` ```
```rust ```rust
// size = 4, align = 4, offset = 0 // size = 4, align = 4, offset = 0
pub 0: u32 pub 0: u32
``` ```
"#]], "#]],
@ -808,7 +808,7 @@ fn foo(foo: Foo) {
``` ```
```rust ```rust
// size = 4, align = 4, offset = 0 // size = 4, align = 4, offset = 0
pub 0: u32 pub 0: u32
``` ```
"#]], "#]],
@ -819,7 +819,7 @@ fn foo(foo: Foo) {
fn hover_tuple_struct() { fn hover_tuple_struct() {
check( check(
r#" r#"
struct Foo$0(pub u32) struct Foo$0(pub u32) where u32: Copy;
"#, "#,
expect![[r#" expect![[r#"
*Foo* *Foo*
@ -829,8 +829,100 @@ struct Foo$0(pub u32)
``` ```
```rust ```rust
// size = 4, align = 4 // size = 4, align = 4
struct Foo(pub u32); struct Foo(pub u32)
where
u32: Copy,
```
"#]],
);
}
#[test]
fn hover_record_struct() {
check(
r#"
struct Foo$0 { field: u32 }
"#,
expect![[r#"
*Foo*
```rust
test
```
```rust
// size = 4, align = 4
struct Foo {
field: u32,
}
```
"#]],
);
check(
r#"
struct Foo$0 where u32: Copy { field: u32 }
"#,
expect![[r#"
*Foo*
```rust
test
```
```rust
// size = 4, align = 4
struct Foo
where
u32: Copy,
{
field: u32,
}
```
"#]],
);
}
#[test]
fn hover_unit_struct() {
check(
r#"
struct Foo$0 where u32: Copy;
"#,
expect![[r#"
*Foo*
```rust
test
```
```rust
// size = 0, align = 1
struct Foo
where
u32: Copy,
```
"#]],
);
}
#[test]
fn hover_type_alias() {
check(
r#"
type Fo$0o: Trait = S where T: Trait;
"#,
expect![[r#"
*Foo*
```rust
test
```
```rust
type Foo: Trait = S
where
T: Trait,
``` ```
"#]], "#]],
); );
@ -957,7 +1049,7 @@ fn main() {
*zz* *zz*
```rust ```rust
// size = 8, align = 4 // size = 8, align = 4
let zz: Test<i32> let zz: Test<i32>
``` ```
"#]], "#]],
@ -1009,7 +1101,7 @@ fn main() { let b$0ar = Some(12); }
*bar* *bar*
```rust ```rust
// size = 4, align = 4 // size = 4, align = 4
let bar: Option<i32> let bar: Option<i32>
``` ```
"#]], "#]],
@ -1079,7 +1171,7 @@ fn hover_for_local_variable() {
*foo* *foo*
```rust ```rust
// size = 4, align = 4 // size = 4, align = 4
foo: i32 foo: i32
``` ```
"#]], "#]],
@ -1094,7 +1186,7 @@ fn hover_for_local_variable_pat() {
*foo* *foo*
```rust ```rust
// size = 4, align = 4 // size = 4, align = 4
foo: i32 foo: i32
``` ```
"#]], "#]],
@ -1109,7 +1201,7 @@ fn hover_local_var_edge() {
*foo* *foo*
```rust ```rust
// size = 4, align = 4 // size = 4, align = 4
foo: i32 foo: i32
``` ```
"#]], "#]],
@ -1124,7 +1216,7 @@ fn hover_for_param_edge() {
*foo* *foo*
```rust ```rust
// size = 4, align = 4 // size = 4, align = 4
foo: i32 foo: i32
``` ```
"#]], "#]],
@ -1169,7 +1261,7 @@ fn main() { let foo_$0test = Thing::new(); }
*foo_test* *foo_test*
```rust ```rust
// size = 4, align = 4 // size = 4, align = 4
let foo_test: Thing let foo_test: Thing
``` ```
"#]], "#]],
@ -1374,7 +1466,7 @@ fn y() {
*x* *x*
```rust ```rust
// size = 4, align = 4 // size = 4, align = 4
let x: i32 let x: i32
``` ```
"#]], "#]],
@ -1505,7 +1597,7 @@ fn foo(bar:u32) { let a = id!(ba$0r); }
*bar* *bar*
```rust ```rust
// size = 4, align = 4 // size = 4, align = 4
bar: u32 bar: u32
``` ```
"#]], "#]],
@ -1524,7 +1616,7 @@ fn foo(bar:u32) { let a = id!(ba$0r); }
*bar* *bar*
```rust ```rust
// size = 4, align = 4 // size = 4, align = 4
bar: u32 bar: u32
``` ```
"#]], "#]],
@ -1760,7 +1852,7 @@ fn test_hover_function_pointer_show_identifiers() {
``` ```
```rust ```rust
// size = 8, align = 8, niches = 1 // size = 8, align = 8, niches = 1
type foo = fn(a: i32, b: i32) -> i32 type foo = fn(a: i32, b: i32) -> i32
``` ```
"#]], "#]],
@ -1779,7 +1871,7 @@ fn test_hover_function_pointer_no_identifier() {
``` ```
```rust ```rust
// size = 8, align = 8, niches = 1 // size = 8, align = 8, niches = 1
type foo = fn(i32, i32) -> i32 type foo = fn(i32, i32) -> i32
``` ```
"#]], "#]],
@ -1926,7 +2018,7 @@ fn foo() { let bar = Ba$0r; }
``` ```
```rust ```rust
// size = 0, align = 1 // size = 0, align = 1
struct Bar struct Bar
``` ```
@ -1963,7 +2055,7 @@ fn foo() { let bar = Ba$0r; }
``` ```
```rust ```rust
// size = 0, align = 1 // size = 0, align = 1
struct Bar struct Bar
``` ```
@ -1993,7 +2085,7 @@ fn foo() { let bar = Ba$0r; }
``` ```
```rust ```rust
// size = 0, align = 1 // size = 0, align = 1
struct Bar struct Bar
``` ```
@ -2022,7 +2114,7 @@ pub struct B$0ar
``` ```
```rust ```rust
// size = 0, align = 1 // size = 0, align = 1
pub struct Bar pub struct Bar
``` ```
@ -2050,7 +2142,7 @@ pub struct B$0ar
``` ```
```rust ```rust
// size = 0, align = 1 // size = 0, align = 1
pub struct Bar pub struct Bar
``` ```
@ -2140,7 +2232,7 @@ fn test_hover_layout_of_variant() {
``` ```
```rust ```rust
// size = 4, align = 2 // size = 4, align = 2
Variant1(u8, u16) Variant1(u8, u16)
``` ```
"#]], "#]],
@ -2162,7 +2254,7 @@ fn test_hover_layout_of_enum() {
``` ```
```rust ```rust
// size = 16 (0x10), align = 8, niches = 254 // size = 16 (0x10), align = 8, niches = 254
enum Foo { enum Foo {
Variant1(u8, u16), Variant1(u8, u16),
Variant2(i32, u8, i64), Variant2(i32, u8, i64),
@ -2540,7 +2632,7 @@ fn main() { let s$0t = S{ f1:Arg(0) }; }
focus_range: 7..10, focus_range: 7..10,
name: "Arg", name: "Arg",
kind: Struct, kind: Struct,
description: "struct Arg(u32);", description: "struct Arg(u32)",
}, },
}, },
HoverGotoTypeData { HoverGotoTypeData {
@ -2599,7 +2691,7 @@ fn main() { let s$0t = S{ f1: S{ f1: Arg(0) } }; }
focus_range: 7..10, focus_range: 7..10,
name: "Arg", name: "Arg",
kind: Struct, kind: Struct,
description: "struct Arg(u32);", description: "struct Arg(u32)",
}, },
}, },
HoverGotoTypeData { HoverGotoTypeData {
@ -2648,7 +2740,7 @@ fn main() { let s$0t = (A(1), B(2), M::C(3) ); }
focus_range: 7..8, focus_range: 7..8,
name: "A", name: "A",
kind: Struct, kind: Struct,
description: "struct A(u32);", description: "struct A(u32)",
}, },
}, },
HoverGotoTypeData { HoverGotoTypeData {
@ -2661,7 +2753,7 @@ fn main() { let s$0t = (A(1), B(2), M::C(3) ); }
focus_range: 22..23, focus_range: 22..23,
name: "B", name: "B",
kind: Struct, kind: Struct,
description: "struct B(u32);", description: "struct B(u32)",
}, },
}, },
HoverGotoTypeData { HoverGotoTypeData {
@ -2675,7 +2767,7 @@ fn main() { let s$0t = (A(1), B(2), M::C(3) ); }
name: "C", name: "C",
kind: Struct, kind: Struct,
container_name: "M", container_name: "M",
description: "pub struct C(u32);", description: "pub struct C(u32)",
}, },
}, },
], ],
@ -3331,26 +3423,26 @@ struct Foo<const BAR: Bar>;
impl<const BAR: Bar> Foo<BAR$0> {} impl<const BAR: Bar> Foo<BAR$0> {}
"#, "#,
expect![[r#" expect![[r#"
[ [
GoToType( GoToType(
[ [
HoverGotoTypeData { HoverGotoTypeData {
mod_path: "test::Bar", mod_path: "test::Bar",
nav: NavigationTarget { nav: NavigationTarget {
file_id: FileId( file_id: FileId(
0, 0,
), ),
full_range: 0..11, full_range: 0..11,
focus_range: 7..10, focus_range: 7..10,
name: "Bar", name: "Bar",
kind: Struct, kind: Struct,
description: "struct Bar", description: "struct Bar",
},
}, },
], },
), ],
] ),
"#]], ]
"#]],
); );
} }
@ -3396,26 +3488,26 @@ impl Foo {
} }
"#, "#,
expect![[r#" expect![[r#"
[ [
GoToType( GoToType(
[ [
HoverGotoTypeData { HoverGotoTypeData {
mod_path: "test::Foo", mod_path: "test::Foo",
nav: NavigationTarget { nav: NavigationTarget {
file_id: FileId( file_id: FileId(
0, 0,
), ),
full_range: 0..11, full_range: 0..11,
focus_range: 7..10, focus_range: 7..10,
name: "Foo", name: "Foo",
kind: Struct, kind: Struct,
description: "struct Foo", description: "struct Foo",
},
}, },
], },
), ],
] ),
"#]], ]
"#]],
); );
} }
@ -3466,7 +3558,7 @@ fn main() {
*f* *f*
```rust ```rust
// size = 8, align = 8, niches = 1 // size = 8, align = 8, niches = 1
let f: &i32 let f: &i32
``` ```
--- ---
@ -3476,7 +3568,7 @@ fn main() {
``` ```
```rust ```rust
// size = 4, align = 4, offset = 0 // size = 4, align = 4, offset = 0
f: i32 f: i32
``` ```
"#]], "#]],
@ -3498,7 +3590,7 @@ struct S$0T<const C: usize = 1, T = Foo>(T);
``` ```
```rust ```rust
struct ST<const C: usize = 1, T = Foo>(T); struct ST<const C: usize = 1, T = Foo>(T)
``` ```
"#]], "#]],
); );
@ -3519,7 +3611,7 @@ struct S$0T<const C: usize = {40 + 2}, T = Foo>(T);
``` ```
```rust ```rust
struct ST<const C: usize = {const}, T = Foo>(T); struct ST<const C: usize = {const}, T = Foo>(T)
``` ```
"#]], "#]],
); );
@ -3541,7 +3633,7 @@ struct S$0T<const C: usize = VAL, T = Foo>(T);
``` ```
```rust ```rust
struct ST<const C: usize = VAL, T = Foo>(T); struct ST<const C: usize = VAL, T = Foo>(T)
``` ```
"#]], "#]],
); );
@ -3561,7 +3653,7 @@ fn main() {
*value* *value*
```rust ```rust
// size = 0, align = 1 // size = 0, align = 1
let value: Const<1> let value: Const<1>
``` ```
"#]], "#]],
@ -3582,7 +3674,7 @@ fn main() {
*value* *value*
```rust ```rust
// size = 0, align = 1 // size = 0, align = 1
let value: Const<0> let value: Const<0>
``` ```
"#]], "#]],
@ -3603,7 +3695,7 @@ fn main() {
*value* *value*
```rust ```rust
// size = 0, align = 1 // size = 0, align = 1
let value: Const<-1> let value: Const<-1>
``` ```
"#]], "#]],
@ -3624,7 +3716,7 @@ fn main() {
*value* *value*
```rust ```rust
// size = 0, align = 1 // size = 0, align = 1
let value: Const<true> let value: Const<true>
``` ```
"#]], "#]],
@ -3645,7 +3737,7 @@ fn main() {
*value* *value*
```rust ```rust
// size = 0, align = 1 // size = 0, align = 1
let value: Const<'🦀'> let value: Const<'🦀'>
``` ```
"#]], "#]],
@ -3665,7 +3757,7 @@ impl Foo {
*self* *self*
```rust ```rust
// size = 8, align = 8, niches = 1 // size = 8, align = 8, niches = 1
self: &Foo self: &Foo
``` ```
"#]], "#]],
@ -3686,7 +3778,7 @@ impl Foo {
*self* *self*
```rust ```rust
// size = 0, align = 1 // size = 0, align = 1
self: Arc<Foo> self: Arc<Foo>
``` ```
"#]], "#]],
@ -4072,7 +4164,7 @@ type Fo$0o2 = Foo<2>;
``` ```
```rust ```rust
// size = 0, align = 1 // size = 0, align = 1
type Foo2 = Foo<2> type Foo2 = Foo<2>
``` ```
"#]], "#]],
@ -4115,7 +4207,7 @@ enum E {
``` ```
```rust ```rust
// size = 1, align = 1 // size = 1, align = 1
A = 8 A = 8
``` ```
@ -4141,7 +4233,7 @@ enum E {
``` ```
```rust ```rust
// size = 1, align = 1 // size = 1, align = 1
A = 12 (0xC) A = 12 (0xC)
``` ```
@ -4168,7 +4260,7 @@ enum E {
``` ```
```rust ```rust
// size = 1, align = 1 // size = 1, align = 1
B = 2 B = 2
``` ```
@ -4195,7 +4287,7 @@ enum E {
``` ```
```rust ```rust
// size = 1, align = 1 // size = 1, align = 1
B = 5 B = 5
``` ```
@ -5002,7 +5094,7 @@ fn foo(e: E) {
``` ```
```rust ```rust
// size = 0, align = 1 // size = 0, align = 1
A = 3 A = 3
``` ```
@ -5025,7 +5117,7 @@ fn main() {
*tile4* *tile4*
```rust ```rust
// size = 32 (0x20), align = 4 // size = 32 (0x20), align = 4
let tile4: [u32; 8] let tile4: [u32; 8]
``` ```
"#]], "#]],
@ -5262,7 +5354,7 @@ pub fn gimme() -> theitem::TheItem {
``` ```
```rust ```rust
// size = 0, align = 1 // size = 0, align = 1
pub struct TheItem pub struct TheItem
``` ```
@ -5411,7 +5503,7 @@ mod string {
``` ```
```rust ```rust
// size = 0, align = 1 // size = 0, align = 1
struct String struct String
``` ```
@ -5931,26 +6023,26 @@ fn foo() {
} }
"#, "#,
expect![[r#" expect![[r#"
[ [
GoToType( GoToType(
[ [
HoverGotoTypeData { HoverGotoTypeData {
mod_path: "test::Foo", mod_path: "test::Foo",
nav: NavigationTarget { nav: NavigationTarget {
file_id: FileId( file_id: FileId(
0, 0,
), ),
full_range: 0..11, full_range: 0..11,
focus_range: 7..10, focus_range: 7..10,
name: "Foo", name: "Foo",
kind: Struct, kind: Struct,
description: "struct Foo", description: "struct Foo",
},
}, },
], },
), ],
] ),
"#]], ]
"#]],
); );
} }
@ -6139,7 +6231,7 @@ foo_macro!(
``` ```
```rust ```rust
// size = 0, align = 1 // size = 0, align = 1
pub struct Foo pub struct Foo
``` ```
@ -6165,8 +6257,8 @@ pub struct Foo(i32);
``` ```
```rust ```rust
// size = 4, align = 4 // size = 4, align = 4
pub struct Foo(i32); pub struct Foo(i32)
``` ```
--- ---
@ -6191,7 +6283,7 @@ pub struct Foo<T>(T);
``` ```
```rust ```rust
pub struct Foo<T>(T); pub struct Foo<T>(T)
``` ```
--- ---
@ -6290,7 +6382,7 @@ enum Enum {
``` ```
```rust ```rust
// size = 4, align = 4 // size = 4, align = 4
RecordV { field: u32 } RecordV { field: u32 }
``` ```
"#]], "#]],
@ -6313,7 +6405,7 @@ enum Enum {
``` ```
```rust ```rust
// size = 4, align = 4 // size = 4, align = 4
field: u32 field: u32
``` ```
"#]], "#]],
@ -6961,7 +7053,7 @@ fn test() {
``` ```
```rust ```rust
// size = 4, align = 4, offset = 0 // size = 4, align = 4, offset = 0
f: u32 f: u32
``` ```
"#]], "#]],
@ -6981,7 +7073,7 @@ fn test() {
*s* *s*
```rust ```rust
// size = 0, align = 1 // size = 0, align = 1
let s: S let s: S
``` ```
"#]], "#]],
@ -7002,7 +7094,7 @@ fn test() {
*foo* *foo*
```rust ```rust
// size = 4, align = 4 // size = 4, align = 4
let foo: i32 let foo: i32
``` ```
"#]], "#]],
@ -7023,7 +7115,7 @@ format_args!("{aaaaa$0}");
*aaaaa* *aaaaa*
```rust ```rust
// size = 16 (0x10), align = 8, niches = 1 // size = 16 (0x10), align = 8, niches = 1
let aaaaa: &str let aaaaa: &str
``` ```
"#]], "#]],
@ -7044,7 +7136,7 @@ format_args!("{$0aaaaa}");
*aaaaa* *aaaaa*
```rust ```rust
// size = 16 (0x10), align = 8, niches = 1 // size = 16 (0x10), align = 8, niches = 1
let aaaaa: &str let aaaaa: &str
``` ```
"#]], "#]],
@ -7065,7 +7157,7 @@ format_args!(r"{$0aaaaa}");
*aaaaa* *aaaaa*
```rust ```rust
// size = 16 (0x10), align = 8, niches = 1 // size = 16 (0x10), align = 8, niches = 1
let aaaaa: &str let aaaaa: &str
``` ```
"#]], "#]],
@ -7091,7 +7183,7 @@ foo!(r"{$0aaaaa}");
*aaaaa* *aaaaa*
```rust ```rust
// size = 16 (0x10), align = 8, niches = 1 // size = 16 (0x10), align = 8, niches = 1
let aaaaa: &str let aaaaa: &str
``` ```
"#]], "#]],
@ -7440,8 +7532,8 @@ fn main(notable$0: u32) {}
*notable* *notable*
```rust ```rust
// Implements notable traits: Notable<Assoc = &str, Assoc2 = char> // Implements notable traits: Notable<Assoc = &str, Assoc2 = char>
// size = 4, align = 4 // size = 4, align = 4
notable: u32 notable: u32
``` ```
"#]], "#]],
@ -7472,8 +7564,8 @@ impl Iterator for S {
``` ```
```rust ```rust
// Implements notable traits: Notable, Future<Output = u32>, Iterator<Item = S> // Implements notable traits: Notable, Future<Output = u32>, Iterator<Item = S>
// size = 0, align = 1 // size = 0, align = 1
struct S struct S
``` ```
"#]], "#]],
@ -7532,7 +7624,7 @@ extern "C" {
``` ```
```rust ```rust
// size = 0, align = 1 // size = 0, align = 1
type Ty type Ty
``` ```
"#]], "#]],
@ -7560,7 +7652,7 @@ fn main() {
"#, "#,
expect![[r#" expect![[r#"
```rust ```rust
// Implements notable traits: Notable, Future<Output = u32>, Iterator<Item = S> // Implements notable traits: Notable, Future<Output = u32>, Iterator<Item = S>
S S
```"#]], ```"#]],
); );

View file

@ -74,6 +74,10 @@ pub(super) fn hints(
Ok(s) => s.value.text_range(), Ok(s) => s.value.text_range(),
Err(_) => continue, Err(_) => continue,
}, },
MirSpan::SelfParam => match source_map.self_param_syntax() {
Some(s) => s.value.text_range(),
None => continue,
},
MirSpan::Unknown => continue, MirSpan::Unknown => continue,
}; };
let binding = &hir.bindings[*binding]; let binding = &hir.bindings[*binding];

View file

@ -259,7 +259,7 @@ impl Analysis {
false, false,
CrateOrigin::Local { repo: None, name: None }, CrateOrigin::Local { repo: None, name: None },
); );
change.change_file(file_id, Some(Arc::from(text))); change.change_file(file_id, Some(text));
change.set_crate_graph(crate_graph); change.set_crate_graph(crate_graph);
change.set_target_data_layouts(vec![Err("fixture has no layout".into())]); change.set_target_data_layouts(vec![Err("fixture has no layout".into())]);
change.set_toolchains(vec![None]); change.set_toolchains(vec![None]);

View file

@ -176,14 +176,12 @@ impl NavigationTarget {
impl TryToNav for FileSymbol { impl TryToNav for FileSymbol {
fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> { fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
let root = db.parse_or_expand(self.loc.hir_file_id);
self.loc.ptr.to_node(&root);
Some( Some(
orig_range_with_focus( orig_range_with_focus_r(
db, db,
self.loc.hir_file_id, self.loc.hir_file_id,
&self.loc.ptr.to_node(&root), self.loc.ptr.text_range(),
Some(self.loc.name_ptr.to_node(&root)), Some(self.loc.name_ptr.text_range()),
) )
.map(|(FileRange { file_id, range: full_range }, focus_range)| { .map(|(FileRange { file_id, range: full_range }, focus_range)| {
NavigationTarget { NavigationTarget {
@ -722,7 +720,21 @@ fn orig_range_with_focus(
value: &SyntaxNode, value: &SyntaxNode,
name: Option<impl AstNode>, name: Option<impl AstNode>,
) -> UpmappingResult<(FileRange, Option<TextRange>)> { ) -> UpmappingResult<(FileRange, Option<TextRange>)> {
let Some(name) = name else { return orig_range(db, hir_file, value) }; orig_range_with_focus_r(
db,
hir_file,
value.text_range(),
name.map(|it| it.syntax().text_range()),
)
}
fn orig_range_with_focus_r(
db: &RootDatabase,
hir_file: HirFileId,
value: TextRange,
name: Option<TextRange>,
) -> UpmappingResult<(FileRange, Option<TextRange>)> {
let Some(name) = name else { return orig_range_r(db, hir_file, value) };
let call_kind = let call_kind =
|| db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id).kind; || db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id).kind;
@ -733,9 +745,9 @@ fn orig_range_with_focus(
.definition_range(db) .definition_range(db)
}; };
let value_range = InFile::new(hir_file, value).original_file_range_opt(db); let value_range = InFile::new(hir_file, value).original_node_file_range_opt(db);
let ((call_site_range, call_site_focus), def_site) = let ((call_site_range, call_site_focus), def_site) =
match InFile::new(hir_file, name.syntax()).original_file_range_opt(db) { match InFile::new(hir_file, name).original_node_file_range_opt(db) {
// call site name // call site name
Some((focus_range, ctxt)) if ctxt.is_root() => { Some((focus_range, ctxt)) if ctxt.is_root() => {
// Try to upmap the node as well, if it ends up in the def site, go back to the call site // Try to upmap the node as well, if it ends up in the def site, go back to the call site
@ -802,7 +814,7 @@ fn orig_range_with_focus(
} }
} }
// lost name? can't happen for single tokens // lost name? can't happen for single tokens
None => return orig_range(db, hir_file, value), None => return orig_range_r(db, hir_file, value),
}; };
UpmappingResult { UpmappingResult {
@ -840,7 +852,18 @@ fn orig_range(
value: &SyntaxNode, value: &SyntaxNode,
) -> UpmappingResult<(FileRange, Option<TextRange>)> { ) -> UpmappingResult<(FileRange, Option<TextRange>)> {
UpmappingResult { UpmappingResult {
call_site: (InFile::new(hir_file, value).original_file_range(db), None), call_site: (InFile::new(hir_file, value).original_file_range_rooted(db), None),
def_site: None,
}
}
fn orig_range_r(
db: &RootDatabase,
hir_file: HirFileId,
value: TextRange,
) -> UpmappingResult<(FileRange, Option<TextRange>)> {
UpmappingResult {
call_site: (InFile::new(hir_file, value).original_node_file_range(db).0, None),
def_site: None, def_site: None,
} }
} }

View file

@ -1710,7 +1710,7 @@ use proc_macros::mirror;
mirror$0! {} mirror$0! {}
"#, "#,
expect![[r#" expect![[r#"
mirror Macro FileId(1) 1..77 22..28 mirror ProcMacro FileId(1) 1..77 22..28
FileId(0) 26..32 FileId(0) 26..32
"#]], "#]],

View file

@ -138,7 +138,9 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
}) { }) {
if let Some(def) = def { if let Some(def) = def {
let file_id = match def { let file_id = match def {
Definition::Module(it) => it.declaration_source(db).map(|src| src.file_id), Definition::Module(it) => {
it.declaration_source_range(db).map(|src| src.file_id)
}
Definition::Function(it) => it.source(db).map(|src| src.file_id), Definition::Function(it) => it.source(db).map(|src| src.file_id),
_ => None, _ => None,
}; };
@ -269,15 +271,10 @@ fn find_related_tests_in_module(
Some(it) => it, Some(it) => it,
_ => return, _ => return,
}; };
let mod_source = parent_module.definition_source(sema.db); let mod_source = parent_module.definition_source_range(sema.db);
let range = match &mod_source.value {
hir::ModuleSource::Module(m) => m.syntax().text_range(),
hir::ModuleSource::BlockExpr(b) => b.syntax().text_range(),
hir::ModuleSource::SourceFile(f) => f.syntax().text_range(),
};
let file_id = mod_source.file_id.original_file(sema.db); let file_id = mod_source.file_id.original_file(sema.db);
let mod_scope = SearchScope::file_range(FileRange { file_id, range }); let mod_scope = SearchScope::file_range(FileRange { file_id, range: mod_source.value });
let fn_pos = FilePosition { file_id, offset: fn_name.syntax().text_range().start() }; let fn_pos = FilePosition { file_id, offset: fn_name.syntax().text_range().start() };
find_related_tests(sema, syntax, fn_pos, Some(mod_scope), tests) find_related_tests(sema, syntax, fn_pos, Some(mod_scope), tests)
} }
@ -405,14 +402,15 @@ fn runnable_mod_outline_definition(
let attrs = def.attrs(sema.db); let attrs = def.attrs(sema.db);
let cfg = attrs.cfg(); let cfg = attrs.cfg();
match def.definition_source(sema.db).value { if def.as_source_file_id(sema.db).is_some() {
hir::ModuleSource::SourceFile(_) => Some(Runnable { Some(Runnable {
use_name_in_title: false, use_name_in_title: false,
nav: def.to_nav(sema.db).call_site(), nav: def.to_nav(sema.db).call_site(),
kind: RunnableKind::TestMod { path }, kind: RunnableKind::TestMod { path },
cfg, cfg,
}), })
_ => None, } else {
None
} }
} }

View file

@ -248,6 +248,7 @@ fn traverse(
// an attribute nested in a macro call will not emit `inside_attribute` // an attribute nested in a macro call will not emit `inside_attribute`
let mut inside_attribute = false; let mut inside_attribute = false;
let mut inside_macro_call = false; let mut inside_macro_call = false;
let mut inside_proc_macro_call = false;
// Walk all nodes, keeping track of whether we are inside a macro or not. // Walk all nodes, keeping track of whether we are inside a macro or not.
// If in macro, expand it first and highlight the expanded code. // If in macro, expand it first and highlight the expanded code.
@ -298,8 +299,9 @@ fn traverse(
ast::Item::Fn(_) | ast::Item::Const(_) | ast::Item::Static(_) => { ast::Item::Fn(_) | ast::Item::Const(_) | ast::Item::Static(_) => {
bindings_shadow_count.clear() bindings_shadow_count.clear()
} }
ast::Item::MacroCall(_) => { ast::Item::MacroCall(ref macro_call) => {
inside_macro_call = true; inside_macro_call = true;
inside_proc_macro_call = sema.is_proc_macro_call(macro_call);
} }
_ => (), _ => (),
} }
@ -344,6 +346,7 @@ fn traverse(
} }
Some(ast::Item::MacroCall(_)) => { Some(ast::Item::MacroCall(_)) => {
inside_macro_call = false; inside_macro_call = false;
inside_proc_macro_call = false;
} }
_ => (), _ => (),
} }
@ -519,6 +522,9 @@ fn traverse(
highlight |= HlMod::Attribute highlight |= HlMod::Attribute
} }
if inside_macro_call && tt_level > 0 { if inside_macro_call && tt_level > 0 {
if inside_proc_macro_call {
highlight |= HlMod::ProcMacro
}
highlight |= HlMod::Macro highlight |= HlMod::Macro
} }

Some files were not shown because too many files have changed in this diff Show more