mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-29 06:23:25 +00:00
Auto merge of #129817 - matthiaskrgr:rollup-ll2ld5m, r=matthiaskrgr
Rollup of 12 pull requests Successful merges: - #129659 (const fn stability checking: also check declared language features) - #129711 (Expand NLL MIR dumps) - #129730 (f32 docs: define 'arithmetic' operations) - #129733 (Subtree update of `rust-analyzer`) - #129749 (llvm-wrapper: adapt for LLVM API changes) - #129757 (Add a test for trait solver overflow in MIR inliner cycle detection) - #129760 (Make the "detect-old-time" UI test more representative) - #129767 (Remove `#[macro_use] extern crate tracing`, round 4) - #129774 (Remove `#[macro_use] extern crate tracing` from rustdoc and rustfmt) - #129785 (Miri subtree update) - #129791 (mark joboet as on vacation) - #129812 (interpret, codegen: tweak some comments and checks regarding Box with custom allocator) Failed merges: - #129777 (Add `unreachable_pub`, round 4) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
883439d42b
303 changed files with 9220 additions and 2905 deletions
|
@ -7,9 +7,10 @@
|
||||||
# prettier format
|
# prettier format
|
||||||
f247090558c9ba3c551566eae5882b7ca865225f
|
f247090558c9ba3c551566eae5882b7ca865225f
|
||||||
|
|
||||||
# subtree syncs
|
# pre-josh subtree syncs
|
||||||
932d85b52946d917deab2c23ead552f7f713b828
|
|
||||||
3e358a6827d83e8d6473913a5e304734aadfed04
|
3e358a6827d83e8d6473913a5e304734aadfed04
|
||||||
|
932d85b52946d917deab2c23ead552f7f713b828
|
||||||
9d2cb42a413e51deb50b36794a2e1605381878fc
|
9d2cb42a413e51deb50b36794a2e1605381878fc
|
||||||
f532576ac53ddcc666bc8d59e0b6437065e2f599
|
b2f6fd4f961fc7e4fbfdb80cae2e6065f8436f15
|
||||||
c48062fe2ab9a2d913d1985a6b0aec4bf936bfc1
|
c48062fe2ab9a2d913d1985a6b0aec4bf936bfc1
|
||||||
|
f532576ac53ddcc666bc8d59e0b6437065e2f599
|
||||||
|
|
7
Cargo.lock
generated
7
Cargo.lock
generated
|
@ -495,7 +495,6 @@ dependencies = [
|
||||||
"hir-ty",
|
"hir-ty",
|
||||||
"intern",
|
"intern",
|
||||||
"itertools",
|
"itertools",
|
||||||
"once_cell",
|
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"span",
|
"span",
|
||||||
|
@ -528,7 +527,6 @@ dependencies = [
|
||||||
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"limit",
|
"limit",
|
||||||
"mbe",
|
"mbe",
|
||||||
"once_cell",
|
|
||||||
"ra-ap-rustc_abi",
|
"ra-ap-rustc_abi",
|
||||||
"ra-ap-rustc_parse_format",
|
"ra-ap-rustc_parse_format",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
|
@ -595,7 +593,6 @@ dependencies = [
|
||||||
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"limit",
|
"limit",
|
||||||
"nohash-hasher",
|
"nohash-hasher",
|
||||||
"once_cell",
|
|
||||||
"oorandom",
|
"oorandom",
|
||||||
"project-model",
|
"project-model",
|
||||||
"ra-ap-rustc_abi",
|
"ra-ap-rustc_abi",
|
||||||
|
@ -691,7 +688,6 @@ dependencies = [
|
||||||
"hir",
|
"hir",
|
||||||
"ide-db",
|
"ide-db",
|
||||||
"itertools",
|
"itertools",
|
||||||
"once_cell",
|
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"stdx",
|
"stdx",
|
||||||
"syntax",
|
"syntax",
|
||||||
|
@ -720,7 +716,6 @@ dependencies = [
|
||||||
"line-index 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"line-index 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"memchr",
|
"memchr",
|
||||||
"nohash-hasher",
|
"nohash-hasher",
|
||||||
"once_cell",
|
|
||||||
"parser",
|
"parser",
|
||||||
"profile",
|
"profile",
|
||||||
"rayon",
|
"rayon",
|
||||||
|
@ -746,7 +741,6 @@ dependencies = [
|
||||||
"hir",
|
"hir",
|
||||||
"ide-db",
|
"ide-db",
|
||||||
"itertools",
|
"itertools",
|
||||||
"once_cell",
|
|
||||||
"paths",
|
"paths",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"stdx",
|
"stdx",
|
||||||
|
@ -1933,7 +1927,6 @@ dependencies = [
|
||||||
"expect-test",
|
"expect-test",
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"itertools",
|
"itertools",
|
||||||
"once_cell",
|
|
||||||
"parser",
|
"parser",
|
||||||
"ra-ap-rustc_lexer",
|
"ra-ap-rustc_lexer",
|
||||||
"rayon",
|
"rayon",
|
||||||
|
|
|
@ -19,9 +19,10 @@ rowan.opt-level = 3
|
||||||
rustc-hash.opt-level = 3
|
rustc-hash.opt-level = 3
|
||||||
smol_str.opt-level = 3
|
smol_str.opt-level = 3
|
||||||
text-size.opt-level = 3
|
text-size.opt-level = 3
|
||||||
|
serde.opt-level = 3
|
||||||
|
salsa.opt-level = 3
|
||||||
# This speeds up `cargo xtask dist`.
|
# This speeds up `cargo xtask dist`.
|
||||||
miniz_oxide.opt-level = 3
|
miniz_oxide.opt-level = 3
|
||||||
salsa.opt-level = 3
|
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
incremental = true
|
incremental = true
|
||||||
|
@ -184,6 +185,8 @@ style = { level = "warn", priority = -1 }
|
||||||
suspicious = { level = "warn", priority = -1 }
|
suspicious = { level = "warn", priority = -1 }
|
||||||
|
|
||||||
## allow following lints
|
## allow following lints
|
||||||
|
# subjective
|
||||||
|
single_match = "allow"
|
||||||
# () makes a fine error in most cases
|
# () makes a fine error in most cases
|
||||||
result_unit_err = "allow"
|
result_unit_err = "allow"
|
||||||
# We don't expose public APIs that matter like this
|
# We don't expose public APIs that matter like this
|
||||||
|
|
|
@ -174,28 +174,3 @@ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
of your accepting any such warranty or additional liability.
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
APPENDIX: How to apply the Apache License to your work.
|
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
|
||||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
|
||||||
replaced with your own identifying information. (Don't include
|
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
|
||||||
comment syntax for the file format. We also recommend that a
|
|
||||||
file or class name and description of purpose be included on the
|
|
||||||
same "printed page" as the copyright notice for easier
|
|
||||||
identification within third-party archives.
|
|
||||||
|
|
||||||
Copyright [yyyy] [name of copyright owner]
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
|
|
|
@ -23,7 +23,6 @@ fst = { version = "0.4.7", default-features = false }
|
||||||
indexmap.workspace = true
|
indexmap.workspace = true
|
||||||
itertools.workspace = true
|
itertools.workspace = true
|
||||||
la-arena.workspace = true
|
la-arena.workspace = true
|
||||||
once_cell = "1.17.0"
|
|
||||||
rustc-hash.workspace = true
|
rustc-hash.workspace = true
|
||||||
tracing.workspace = true
|
tracing.workspace = true
|
||||||
smallvec.workspace = true
|
smallvec.workspace = true
|
||||||
|
|
|
@ -14,7 +14,7 @@ use hir_expand::{name::Name, ExpandError, InFile};
|
||||||
use la_arena::{Arena, ArenaMap, Idx, RawIdx};
|
use la_arena::{Arena, ArenaMap, Idx, RawIdx};
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use span::MacroFileId;
|
use span::{Edition, MacroFileId};
|
||||||
use syntax::{ast, AstPtr, SyntaxNodePtr};
|
use syntax::{ast, AstPtr, SyntaxNodePtr};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
|
@ -201,8 +201,13 @@ impl Body {
|
||||||
self.block_scopes.iter().map(move |&block| (block, db.block_def_map(block)))
|
self.block_scopes.iter().map(move |&block| (block, db.block_def_map(block)))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pretty_print(&self, db: &dyn DefDatabase, owner: DefWithBodyId) -> String {
|
pub fn pretty_print(
|
||||||
pretty::print_body_hir(db, self, owner)
|
&self,
|
||||||
|
db: &dyn DefDatabase,
|
||||||
|
owner: DefWithBodyId,
|
||||||
|
edition: Edition,
|
||||||
|
) -> String {
|
||||||
|
pretty::print_body_hir(db, self, owner, edition)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pretty_print_expr(
|
pub fn pretty_print_expr(
|
||||||
|
@ -210,8 +215,9 @@ impl Body {
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
owner: DefWithBodyId,
|
owner: DefWithBodyId,
|
||||||
expr: ExprId,
|
expr: ExprId,
|
||||||
|
edition: Edition,
|
||||||
) -> String {
|
) -> String {
|
||||||
pretty::print_expr_hir(db, self, owner, expr)
|
pretty::print_expr_hir(db, self, owner, expr, edition)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new(
|
fn new(
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
use std::fmt::{self, Write};
|
use std::fmt::{self, Write};
|
||||||
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
use span::Edition;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
hir::{
|
hir::{
|
||||||
|
@ -15,20 +16,26 @@ use crate::{
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBodyId) -> String {
|
pub(super) fn print_body_hir(
|
||||||
|
db: &dyn DefDatabase,
|
||||||
|
body: &Body,
|
||||||
|
owner: DefWithBodyId,
|
||||||
|
edition: Edition,
|
||||||
|
) -> String {
|
||||||
let header = match owner {
|
let header = match owner {
|
||||||
DefWithBodyId::FunctionId(it) => {
|
DefWithBodyId::FunctionId(it) => it
|
||||||
it.lookup(db).id.resolved(db, |it| format!("fn {}", it.name.display(db.upcast())))
|
.lookup(db)
|
||||||
}
|
.id
|
||||||
|
.resolved(db, |it| format!("fn {}", it.name.display(db.upcast(), edition))),
|
||||||
DefWithBodyId::StaticId(it) => it
|
DefWithBodyId::StaticId(it) => it
|
||||||
.lookup(db)
|
.lookup(db)
|
||||||
.id
|
.id
|
||||||
.resolved(db, |it| format!("static {} = ", it.name.display(db.upcast()))),
|
.resolved(db, |it| format!("static {} = ", it.name.display(db.upcast(), edition))),
|
||||||
DefWithBodyId::ConstId(it) => it.lookup(db).id.resolved(db, |it| {
|
DefWithBodyId::ConstId(it) => it.lookup(db).id.resolved(db, |it| {
|
||||||
format!(
|
format!(
|
||||||
"const {} = ",
|
"const {} = ",
|
||||||
match &it.name {
|
match &it.name {
|
||||||
Some(name) => name.display(db.upcast()).to_string(),
|
Some(name) => name.display(db.upcast(), edition).to_string(),
|
||||||
None => "_".to_owned(),
|
None => "_".to_owned(),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -39,13 +46,13 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo
|
||||||
let enum_loc = loc.parent.lookup(db);
|
let enum_loc = loc.parent.lookup(db);
|
||||||
format!(
|
format!(
|
||||||
"enum {}::{}",
|
"enum {}::{}",
|
||||||
enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast()),
|
enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast(), edition),
|
||||||
loc.id.item_tree(db)[loc.id.value].name.display(db.upcast()),
|
loc.id.item_tree(db)[loc.id.value].name.display(db.upcast(), edition),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut p = Printer { db, body, buf: header, indent_level: 0, needs_indent: false };
|
let mut p = Printer { db, body, buf: header, indent_level: 0, needs_indent: false, edition };
|
||||||
if let DefWithBodyId::FunctionId(it) = owner {
|
if let DefWithBodyId::FunctionId(it) = owner {
|
||||||
p.buf.push('(');
|
p.buf.push('(');
|
||||||
let function_data = &db.function_data(it);
|
let function_data = &db.function_data(it);
|
||||||
|
@ -86,8 +93,10 @@ pub(super) fn print_expr_hir(
|
||||||
body: &Body,
|
body: &Body,
|
||||||
_owner: DefWithBodyId,
|
_owner: DefWithBodyId,
|
||||||
expr: ExprId,
|
expr: ExprId,
|
||||||
|
edition: Edition,
|
||||||
) -> String {
|
) -> String {
|
||||||
let mut p = Printer { db, body, buf: String::new(), indent_level: 0, needs_indent: false };
|
let mut p =
|
||||||
|
Printer { db, body, buf: String::new(), indent_level: 0, needs_indent: false, edition };
|
||||||
p.print_expr(expr);
|
p.print_expr(expr);
|
||||||
p.buf
|
p.buf
|
||||||
}
|
}
|
||||||
|
@ -113,6 +122,7 @@ struct Printer<'a> {
|
||||||
buf: String,
|
buf: String,
|
||||||
indent_level: usize,
|
indent_level: usize,
|
||||||
needs_indent: bool,
|
needs_indent: bool,
|
||||||
|
edition: Edition,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Write for Printer<'_> {
|
impl Write for Printer<'_> {
|
||||||
|
@ -173,13 +183,14 @@ impl Printer<'_> {
|
||||||
Expr::OffsetOf(offset_of) => {
|
Expr::OffsetOf(offset_of) => {
|
||||||
w!(self, "builtin#offset_of(");
|
w!(self, "builtin#offset_of(");
|
||||||
self.print_type_ref(&offset_of.container);
|
self.print_type_ref(&offset_of.container);
|
||||||
|
let edition = self.edition;
|
||||||
w!(
|
w!(
|
||||||
self,
|
self,
|
||||||
", {})",
|
", {})",
|
||||||
offset_of
|
offset_of
|
||||||
.fields
|
.fields
|
||||||
.iter()
|
.iter()
|
||||||
.format_with(".", |field, f| f(&field.display(self.db.upcast())))
|
.format_with(".", |field, f| f(&field.display(self.db.upcast(), edition)))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Expr::Path(path) => self.print_path(path),
|
Expr::Path(path) => self.print_path(path),
|
||||||
|
@ -201,7 +212,7 @@ impl Printer<'_> {
|
||||||
}
|
}
|
||||||
Expr::Loop { body, label } => {
|
Expr::Loop { body, label } => {
|
||||||
if let Some(lbl) = label {
|
if let Some(lbl) = label {
|
||||||
w!(self, "{}: ", self.body[*lbl].name.display(self.db.upcast()));
|
w!(self, "{}: ", self.body[*lbl].name.display(self.db.upcast(), self.edition));
|
||||||
}
|
}
|
||||||
w!(self, "loop ");
|
w!(self, "loop ");
|
||||||
self.print_expr(*body);
|
self.print_expr(*body);
|
||||||
|
@ -221,10 +232,11 @@ impl Printer<'_> {
|
||||||
}
|
}
|
||||||
Expr::MethodCall { receiver, method_name, args, generic_args } => {
|
Expr::MethodCall { receiver, method_name, args, generic_args } => {
|
||||||
self.print_expr(*receiver);
|
self.print_expr(*receiver);
|
||||||
w!(self, ".{}", method_name.display(self.db.upcast()));
|
w!(self, ".{}", method_name.display(self.db.upcast(), self.edition));
|
||||||
if let Some(args) = generic_args {
|
if let Some(args) = generic_args {
|
||||||
w!(self, "::<");
|
w!(self, "::<");
|
||||||
print_generic_args(self.db, args, self).unwrap();
|
let edition = self.edition;
|
||||||
|
print_generic_args(self.db, args, self, edition).unwrap();
|
||||||
w!(self, ">");
|
w!(self, ">");
|
||||||
}
|
}
|
||||||
w!(self, "(");
|
w!(self, "(");
|
||||||
|
@ -259,13 +271,13 @@ impl Printer<'_> {
|
||||||
Expr::Continue { label } => {
|
Expr::Continue { label } => {
|
||||||
w!(self, "continue");
|
w!(self, "continue");
|
||||||
if let Some(lbl) = label {
|
if let Some(lbl) = label {
|
||||||
w!(self, " {}", self.body[*lbl].name.display(self.db.upcast()));
|
w!(self, " {}", self.body[*lbl].name.display(self.db.upcast(), self.edition));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Expr::Break { expr, label } => {
|
Expr::Break { expr, label } => {
|
||||||
w!(self, "break");
|
w!(self, "break");
|
||||||
if let Some(lbl) = label {
|
if let Some(lbl) = label {
|
||||||
w!(self, " {}", self.body[*lbl].name.display(self.db.upcast()));
|
w!(self, " {}", self.body[*lbl].name.display(self.db.upcast(), self.edition));
|
||||||
}
|
}
|
||||||
if let Some(expr) = expr {
|
if let Some(expr) = expr {
|
||||||
self.whitespace();
|
self.whitespace();
|
||||||
|
@ -307,9 +319,10 @@ impl Printer<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
w!(self, "{{");
|
w!(self, "{{");
|
||||||
|
let edition = self.edition;
|
||||||
self.indented(|p| {
|
self.indented(|p| {
|
||||||
for field in &**fields {
|
for field in &**fields {
|
||||||
w!(p, "{}: ", field.name.display(self.db.upcast()));
|
w!(p, "{}: ", field.name.display(self.db.upcast(), edition));
|
||||||
p.print_expr(field.expr);
|
p.print_expr(field.expr);
|
||||||
wln!(p, ",");
|
wln!(p, ",");
|
||||||
}
|
}
|
||||||
|
@ -326,7 +339,7 @@ impl Printer<'_> {
|
||||||
}
|
}
|
||||||
Expr::Field { expr, name } => {
|
Expr::Field { expr, name } => {
|
||||||
self.print_expr(*expr);
|
self.print_expr(*expr);
|
||||||
w!(self, ".{}", name.display(self.db.upcast()));
|
w!(self, ".{}", name.display(self.db.upcast(), self.edition));
|
||||||
}
|
}
|
||||||
Expr::Await { expr } => {
|
Expr::Await { expr } => {
|
||||||
self.print_expr(*expr);
|
self.print_expr(*expr);
|
||||||
|
@ -464,8 +477,9 @@ impl Printer<'_> {
|
||||||
}
|
}
|
||||||
Expr::Literal(lit) => self.print_literal(lit),
|
Expr::Literal(lit) => self.print_literal(lit),
|
||||||
Expr::Block { id: _, statements, tail, label } => {
|
Expr::Block { id: _, statements, tail, label } => {
|
||||||
let label =
|
let label = label.map(|lbl| {
|
||||||
label.map(|lbl| format!("{}: ", self.body[lbl].name.display(self.db.upcast())));
|
format!("{}: ", self.body[lbl].name.display(self.db.upcast(), self.edition))
|
||||||
|
});
|
||||||
self.print_block(label.as_deref(), statements, tail);
|
self.print_block(label.as_deref(), statements, tail);
|
||||||
}
|
}
|
||||||
Expr::Unsafe { id: _, statements, tail } => {
|
Expr::Unsafe { id: _, statements, tail } => {
|
||||||
|
@ -539,9 +553,10 @@ impl Printer<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
w!(self, " {{");
|
w!(self, " {{");
|
||||||
|
let edition = self.edition;
|
||||||
self.indented(|p| {
|
self.indented(|p| {
|
||||||
for arg in args.iter() {
|
for arg in args.iter() {
|
||||||
w!(p, "{}: ", arg.name.display(self.db.upcast()));
|
w!(p, "{}: ", arg.name.display(self.db.upcast(), edition));
|
||||||
p.print_pat(arg.pat);
|
p.print_pat(arg.pat);
|
||||||
wln!(p, ",");
|
wln!(p, ",");
|
||||||
}
|
}
|
||||||
|
@ -686,11 +701,13 @@ impl Printer<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_type_ref(&mut self, ty: &TypeRef) {
|
fn print_type_ref(&mut self, ty: &TypeRef) {
|
||||||
print_type_ref(self.db, ty, self).unwrap();
|
let edition = self.edition;
|
||||||
|
print_type_ref(self.db, ty, self, edition).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_path(&mut self, path: &Path) {
|
fn print_path(&mut self, path: &Path) {
|
||||||
print_path(self.db, path, self).unwrap();
|
let edition = self.edition;
|
||||||
|
print_path(self.db, path, self, edition).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_binding(&mut self, id: BindingId) {
|
fn print_binding(&mut self, id: BindingId) {
|
||||||
|
@ -701,6 +718,6 @@ impl Printer<'_> {
|
||||||
BindingAnnotation::Ref => "ref ",
|
BindingAnnotation::Ref => "ref ",
|
||||||
BindingAnnotation::RefMut => "ref mut ",
|
BindingAnnotation::RefMut => "ref mut ",
|
||||||
};
|
};
|
||||||
w!(self, "{}{}", mode, name.display(self.db.upcast()));
|
w!(self, "{}{}", mode, name.display(self.db.upcast(), self.edition));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -219,7 +219,7 @@ fn main() {
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
}"#]]
|
}"#]]
|
||||||
.assert_eq(&body.pretty_print(&db, def))
|
.assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -285,7 +285,7 @@ impl SsrError {
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
}"#]]
|
}"#]]
|
||||||
.assert_eq(&body.pretty_print(&db, def))
|
.assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -333,5 +333,5 @@ fn f(a: i32, b: u32) -> String {
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
}"#]]
|
}"#]]
|
||||||
.assert_eq(&body.pretty_print(&db, def))
|
.assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
|
||||||
}
|
}
|
||||||
|
|
|
@ -651,6 +651,7 @@ mod tests {
|
||||||
use expect_test::{expect, Expect};
|
use expect_test::{expect, Expect};
|
||||||
use hir_expand::db::ExpandDatabase;
|
use hir_expand::db::ExpandDatabase;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
use span::Edition;
|
||||||
use stdx::format_to;
|
use stdx::format_to;
|
||||||
use syntax::ast::AstNode;
|
use syntax::ast::AstNode;
|
||||||
use test_fixture::WithFixture;
|
use test_fixture::WithFixture;
|
||||||
|
@ -717,8 +718,10 @@ mod tests {
|
||||||
"{:7}(imports {}): {}\n",
|
"{:7}(imports {}): {}\n",
|
||||||
format!("{:?}", prefix),
|
format!("{:?}", prefix),
|
||||||
if ignore_local_imports { '✖' } else { '✔' },
|
if ignore_local_imports { '✖' } else { '✔' },
|
||||||
found_path
|
found_path.map_or_else(
|
||||||
.map_or_else(|| "<unresolvable>".to_owned(), |it| it.display(&db).to_string()),
|
|| "<unresolvable>".to_owned(),
|
||||||
|
|it| it.display(&db, Edition::CURRENT).to_string()
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
expect.assert_eq(&res);
|
expect.assert_eq(&res);
|
||||||
|
|
|
@ -12,7 +12,6 @@ use hir_expand::{
|
||||||
};
|
};
|
||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
use la_arena::{Arena, RawIdx};
|
use la_arena::{Arena, RawIdx};
|
||||||
use once_cell::unsync::Lazy;
|
|
||||||
use stdx::impl_from;
|
use stdx::impl_from;
|
||||||
use syntax::ast::{self, HasGenericParams, HasName, HasTypeBounds};
|
use syntax::ast::{self, HasGenericParams, HasName, HasTypeBounds};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
@ -394,11 +393,16 @@ impl GenericParams {
|
||||||
|
|
||||||
// Don't create an `Expander` if not needed since this
|
// Don't create an `Expander` if not needed since this
|
||||||
// could cause a reparse after the `ItemTree` has been created due to the spanmap.
|
// could cause a reparse after the `ItemTree` has been created due to the spanmap.
|
||||||
let mut expander = Lazy::new(|| {
|
let mut expander = None;
|
||||||
(module.def_map(db), Expander::new(db, loc.id.file_id(), module))
|
|
||||||
});
|
|
||||||
for param in func_data.params.iter() {
|
for param in func_data.params.iter() {
|
||||||
generic_params.fill_implicit_impl_trait_args(db, &mut expander, param);
|
generic_params.fill_implicit_impl_trait_args(
|
||||||
|
db,
|
||||||
|
&mut expander,
|
||||||
|
&mut || {
|
||||||
|
(module.def_map(db), Expander::new(db, loc.id.file_id(), module))
|
||||||
|
},
|
||||||
|
param,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
Interned::new(generic_params.finish())
|
Interned::new(generic_params.finish())
|
||||||
}
|
}
|
||||||
|
@ -597,7 +601,9 @@ impl GenericParamsCollector {
|
||||||
fn fill_implicit_impl_trait_args(
|
fn fill_implicit_impl_trait_args(
|
||||||
&mut self,
|
&mut self,
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
exp: &mut Lazy<(Arc<DefMap>, Expander), impl FnOnce() -> (Arc<DefMap>, Expander)>,
|
// FIXME: Change this back to `LazyCell` if https://github.com/rust-lang/libs-team/issues/429 is accepted.
|
||||||
|
exp: &mut Option<(Arc<DefMap>, Expander)>,
|
||||||
|
exp_fill: &mut dyn FnMut() -> (Arc<DefMap>, Expander),
|
||||||
type_ref: &TypeRef,
|
type_ref: &TypeRef,
|
||||||
) {
|
) {
|
||||||
type_ref.walk(&mut |type_ref| {
|
type_ref.walk(&mut |type_ref| {
|
||||||
|
@ -617,7 +623,7 @@ impl GenericParamsCollector {
|
||||||
}
|
}
|
||||||
if let TypeRef::Macro(mc) = type_ref {
|
if let TypeRef::Macro(mc) = type_ref {
|
||||||
let macro_call = mc.to_node(db.upcast());
|
let macro_call = mc.to_node(db.upcast());
|
||||||
let (def_map, expander) = &mut **exp;
|
let (def_map, expander) = exp.get_or_insert_with(&mut *exp_fill);
|
||||||
|
|
||||||
let module = expander.module.local_id;
|
let module = expander.module.local_id;
|
||||||
let resolver = |path: &_| {
|
let resolver = |path: &_| {
|
||||||
|
@ -637,8 +643,8 @@ impl GenericParamsCollector {
|
||||||
{
|
{
|
||||||
let ctx = expander.ctx(db);
|
let ctx = expander.ctx(db);
|
||||||
let type_ref = TypeRef::from_ast(&ctx, expanded.tree());
|
let type_ref = TypeRef::from_ast(&ctx, expanded.tree());
|
||||||
self.fill_implicit_impl_trait_args(db, &mut *exp, &type_ref);
|
self.fill_implicit_impl_trait_args(db, &mut *exp, exp_fill, &type_ref);
|
||||||
exp.1.exit(mark);
|
exp.get_or_insert_with(&mut *exp_fill).1.exit(mark);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -250,7 +250,7 @@ pub(crate) fn parse(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ArgRef::Name(name, span) => {
|
ArgRef::Name(name, span) => {
|
||||||
let name = Name::new(name, tt::IdentIsRaw::No, call_ctx);
|
let name = Name::new(name, call_ctx);
|
||||||
if let Some((index, _)) = args.by_name(&name) {
|
if let Some((index, _)) = args.by_name(&name) {
|
||||||
record_usage(name, span);
|
record_usage(name, span);
|
||||||
// Name found in `args`, so we resolve it to its index.
|
// Name found in `args`, so we resolve it to its index.
|
||||||
|
|
|
@ -10,6 +10,7 @@ use hir_expand::{
|
||||||
AstId,
|
AstId,
|
||||||
};
|
};
|
||||||
use intern::{sym, Interned, Symbol};
|
use intern::{sym, Interned, Symbol};
|
||||||
|
use span::Edition;
|
||||||
use syntax::ast::{self, HasGenericArgs, HasName, IsString};
|
use syntax::ast::{self, HasGenericArgs, HasName, IsString};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -419,18 +420,22 @@ impl ConstRef {
|
||||||
param.default_val().map(|default| Self::from_const_arg(lower_ctx, Some(default)))
|
param.default_val().map(|default| Self::from_const_arg(lower_ctx, Some(default)))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn display<'a>(&'a self, db: &'a dyn ExpandDatabase) -> impl fmt::Display + 'a {
|
pub fn display<'a>(
|
||||||
struct Display<'a>(&'a dyn ExpandDatabase, &'a ConstRef);
|
&'a self,
|
||||||
|
db: &'a dyn ExpandDatabase,
|
||||||
|
edition: Edition,
|
||||||
|
) -> impl fmt::Display + 'a {
|
||||||
|
struct Display<'a>(&'a dyn ExpandDatabase, &'a ConstRef, Edition);
|
||||||
impl fmt::Display for Display<'_> {
|
impl fmt::Display for Display<'_> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self.1 {
|
match self.1 {
|
||||||
ConstRef::Scalar(s) => s.fmt(f),
|
ConstRef::Scalar(s) => s.fmt(f),
|
||||||
ConstRef::Path(n) => n.display(self.0).fmt(f),
|
ConstRef::Path(n) => n.display(self.0, self.2).fmt(f),
|
||||||
ConstRef::Complex(_) => f.write_str("{const}"),
|
ConstRef::Complex(_) => f.write_str("{const}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Display(db, self)
|
Display(db, self, edition)
|
||||||
}
|
}
|
||||||
|
|
||||||
// We special case literals and single identifiers, to speed up things.
|
// We special case literals and single identifiers, to speed up things.
|
||||||
|
|
|
@ -8,6 +8,7 @@ use hir_expand::name::Name;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
|
use span::Edition;
|
||||||
use stdx::{format_to, TupleExt};
|
use stdx::{format_to, TupleExt};
|
||||||
use syntax::ToSmolStr;
|
use syntax::ToSmolStr;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
@ -66,7 +67,12 @@ impl ImportMap {
|
||||||
for (k, v) in self.item_to_info_map.iter() {
|
for (k, v) in self.item_to_info_map.iter() {
|
||||||
format_to!(out, "{:?} ({:?}) -> ", k, v.1);
|
format_to!(out, "{:?} ({:?}) -> ", k, v.1);
|
||||||
for v in &v.0 {
|
for v in &v.0 {
|
||||||
format_to!(out, "{}:{:?}, ", v.name.display(db.upcast()), v.container);
|
format_to!(
|
||||||
|
out,
|
||||||
|
"{}:{:?}, ",
|
||||||
|
v.name.display(db.upcast(), Edition::CURRENT),
|
||||||
|
v.container
|
||||||
|
);
|
||||||
}
|
}
|
||||||
format_to!(out, "\n");
|
format_to!(out, "\n");
|
||||||
}
|
}
|
||||||
|
@ -83,7 +89,7 @@ impl ImportMap {
|
||||||
// We've only collected items, whose name cannot be tuple field so unwrapping is fine.
|
// We've only collected items, whose name cannot be tuple field so unwrapping is fine.
|
||||||
.flat_map(|(&item, (info, _))| {
|
.flat_map(|(&item, (info, _))| {
|
||||||
info.iter().enumerate().map(move |(idx, info)| {
|
info.iter().enumerate().map(move |(idx, info)| {
|
||||||
(item, info.name.display(db.upcast()).to_smolstr(), idx as u32)
|
(item, info.name.unescaped().display(db.upcast()).to_smolstr(), idx as u32)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
@ -461,7 +467,7 @@ fn search_maps(
|
||||||
query.search_mode.check(
|
query.search_mode.check(
|
||||||
&query.query,
|
&query.query,
|
||||||
query.case_sensitive,
|
query.case_sensitive,
|
||||||
&info.name.display(db.upcast()).to_smolstr(),
|
&info.name.unescaped().display(db.upcast()).to_smolstr(),
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
res.extend(iter.map(TupleExt::head));
|
res.extend(iter.map(TupleExt::head));
|
||||||
|
@ -577,7 +583,7 @@ mod tests {
|
||||||
Some(format!(
|
Some(format!(
|
||||||
"{}::{}",
|
"{}::{}",
|
||||||
render_path(db, &trait_info[0]),
|
render_path(db, &trait_info[0]),
|
||||||
assoc_item_name.display(db.upcast())
|
assoc_item_name.display(db.upcast(), Edition::CURRENT)
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -616,7 +622,7 @@ mod tests {
|
||||||
module = parent;
|
module = parent;
|
||||||
}
|
}
|
||||||
|
|
||||||
segments.iter().rev().map(|it| it.display(db.upcast())).join("::")
|
segments.iter().rev().map(|it| it.display(db.upcast(), Edition::CURRENT)).join("::")
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -1,14 +1,16 @@
|
||||||
//! Describes items defined or visible (ie, imported) in a certain scope.
|
//! Describes items defined or visible (ie, imported) in a certain scope.
|
||||||
//! This is shared between modules and blocks.
|
//! This is shared between modules and blocks.
|
||||||
|
|
||||||
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
use base_db::CrateId;
|
use base_db::CrateId;
|
||||||
use hir_expand::{attrs::AttrId, db::ExpandDatabase, name::Name, AstId, MacroCallId};
|
use hir_expand::{attrs::AttrId, db::ExpandDatabase, name::Name, AstId, MacroCallId};
|
||||||
use indexmap::map::Entry;
|
use indexmap::map::Entry;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use la_arena::Idx;
|
use la_arena::Idx;
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
|
use span::Edition;
|
||||||
use stdx::format_to;
|
use stdx::format_to;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
|
|
||||||
|
@ -129,7 +131,7 @@ struct DeriveMacroInvocation {
|
||||||
derive_call_ids: SmallVec<[Option<MacroCallId>; 1]>,
|
derive_call_ids: SmallVec<[Option<MacroCallId>; 1]>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) static BUILTIN_SCOPE: Lazy<FxIndexMap<Name, PerNs>> = Lazy::new(|| {
|
pub(crate) static BUILTIN_SCOPE: LazyLock<FxIndexMap<Name, PerNs>> = LazyLock::new(|| {
|
||||||
BuiltinType::all_builtin_types()
|
BuiltinType::all_builtin_types()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(name, ty)| (name.clone(), PerNs::types((*ty).into(), Visibility::Public, None)))
|
.map(|(name, ty)| (name.clone(), PerNs::types((*ty).into(), Visibility::Public, None)))
|
||||||
|
@ -706,7 +708,7 @@ impl ItemScope {
|
||||||
format_to!(
|
format_to!(
|
||||||
buf,
|
buf,
|
||||||
"{}:",
|
"{}:",
|
||||||
name.map_or("_".to_owned(), |name| name.display(db).to_string())
|
name.map_or("_".to_owned(), |name| name.display(db, Edition::LATEST).to_string())
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some((.., i)) = def.types {
|
if let Some((.., i)) = def.types {
|
||||||
|
|
|
@ -40,6 +40,7 @@ use std::{
|
||||||
fmt::{self, Debug},
|
fmt::{self, Debug},
|
||||||
hash::{Hash, Hasher},
|
hash::{Hash, Hasher},
|
||||||
ops::{Index, Range},
|
ops::{Index, Range},
|
||||||
|
sync::OnceLock,
|
||||||
};
|
};
|
||||||
|
|
||||||
use ast::{AstNode, StructKind};
|
use ast::{AstNode, StructKind};
|
||||||
|
@ -48,10 +49,9 @@ use either::Either;
|
||||||
use hir_expand::{attrs::RawAttrs, name::Name, ExpandTo, HirFileId, InFile};
|
use hir_expand::{attrs::RawAttrs, name::Name, ExpandTo, HirFileId, InFile};
|
||||||
use intern::{Interned, Symbol};
|
use intern::{Interned, Symbol};
|
||||||
use la_arena::{Arena, Idx, RawIdx};
|
use la_arena::{Arena, Idx, RawIdx};
|
||||||
use once_cell::sync::OnceCell;
|
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use span::{AstIdNode, FileAstId, SyntaxContextId};
|
use span::{AstIdNode, Edition, FileAstId, SyntaxContextId};
|
||||||
use stdx::never;
|
use stdx::never;
|
||||||
use syntax::{ast, match_ast, SyntaxKind};
|
use syntax::{ast, match_ast, SyntaxKind};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
@ -101,7 +101,7 @@ pub struct ItemTree {
|
||||||
impl ItemTree {
|
impl ItemTree {
|
||||||
pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
|
pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
|
||||||
let _p = tracing::info_span!("file_item_tree_query", ?file_id).entered();
|
let _p = tracing::info_span!("file_item_tree_query", ?file_id).entered();
|
||||||
static EMPTY: OnceCell<Arc<ItemTree>> = OnceCell::new();
|
static EMPTY: OnceLock<Arc<ItemTree>> = OnceLock::new();
|
||||||
|
|
||||||
let syntax = db.parse_or_expand(file_id);
|
let syntax = db.parse_or_expand(file_id);
|
||||||
|
|
||||||
|
@ -152,7 +152,7 @@ impl ItemTree {
|
||||||
|
|
||||||
pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc<ItemTree> {
|
pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc<ItemTree> {
|
||||||
let _p = tracing::info_span!("block_item_tree_query", ?block).entered();
|
let _p = tracing::info_span!("block_item_tree_query", ?block).entered();
|
||||||
static EMPTY: OnceCell<Arc<ItemTree>> = OnceCell::new();
|
static EMPTY: OnceLock<Arc<ItemTree>> = OnceLock::new();
|
||||||
|
|
||||||
let loc = block.lookup(db);
|
let loc = block.lookup(db);
|
||||||
let block = loc.ast_id.to_node(db.upcast());
|
let block = loc.ast_id.to_node(db.upcast());
|
||||||
|
@ -199,8 +199,8 @@ impl ItemTree {
|
||||||
Attrs::filter(db, krate, self.raw_attrs(of).clone())
|
Attrs::filter(db, krate, self.raw_attrs(of).clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pretty_print(&self, db: &dyn DefDatabase) -> String {
|
pub fn pretty_print(&self, db: &dyn DefDatabase, edition: Edition) -> String {
|
||||||
pretty::print_item_tree(db, self)
|
pretty::print_item_tree(db, self, edition)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn data(&self) -> &ItemTreeData {
|
fn data(&self) -> &ItemTreeData {
|
||||||
|
@ -626,9 +626,9 @@ impl Index<RawVisibilityId> for ItemTree {
|
||||||
type Output = RawVisibility;
|
type Output = RawVisibility;
|
||||||
fn index(&self, index: RawVisibilityId) -> &Self::Output {
|
fn index(&self, index: RawVisibilityId) -> &Self::Output {
|
||||||
static VIS_PUB: RawVisibility = RawVisibility::Public;
|
static VIS_PUB: RawVisibility = RawVisibility::Public;
|
||||||
static VIS_PRIV_IMPLICIT: OnceCell<RawVisibility> = OnceCell::new();
|
static VIS_PRIV_IMPLICIT: OnceLock<RawVisibility> = OnceLock::new();
|
||||||
static VIS_PRIV_EXPLICIT: OnceCell<RawVisibility> = OnceCell::new();
|
static VIS_PRIV_EXPLICIT: OnceLock<RawVisibility> = OnceLock::new();
|
||||||
static VIS_PUB_CRATE: OnceCell<RawVisibility> = OnceCell::new();
|
static VIS_PUB_CRATE: OnceLock<RawVisibility> = OnceLock::new();
|
||||||
|
|
||||||
match index {
|
match index {
|
||||||
RawVisibilityId::PRIV_IMPLICIT => VIS_PRIV_IMPLICIT.get_or_init(|| {
|
RawVisibilityId::PRIV_IMPLICIT => VIS_PRIV_IMPLICIT.get_or_init(|| {
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
use std::fmt::{self, Write};
|
use std::fmt::{self, Write};
|
||||||
|
|
||||||
use la_arena::{Idx, RawIdx};
|
use la_arena::{Idx, RawIdx};
|
||||||
use span::ErasedFileAstId;
|
use span::{Edition, ErasedFileAstId};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget},
|
generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget},
|
||||||
|
@ -18,8 +18,9 @@ use crate::{
|
||||||
visibility::RawVisibility,
|
visibility::RawVisibility,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(super) fn print_item_tree(db: &dyn DefDatabase, tree: &ItemTree) -> String {
|
pub(super) fn print_item_tree(db: &dyn DefDatabase, tree: &ItemTree, edition: Edition) -> String {
|
||||||
let mut p = Printer { db, tree, buf: String::new(), indent_level: 0, needs_indent: true };
|
let mut p =
|
||||||
|
Printer { db, tree, buf: String::new(), indent_level: 0, needs_indent: true, edition };
|
||||||
|
|
||||||
if let Some(attrs) = tree.attrs.get(&AttrOwner::TopLevel) {
|
if let Some(attrs) = tree.attrs.get(&AttrOwner::TopLevel) {
|
||||||
p.print_attrs(attrs, true, "\n");
|
p.print_attrs(attrs, true, "\n");
|
||||||
|
@ -56,6 +57,7 @@ struct Printer<'a> {
|
||||||
buf: String,
|
buf: String,
|
||||||
indent_level: usize,
|
indent_level: usize,
|
||||||
needs_indent: bool,
|
needs_indent: bool,
|
||||||
|
edition: Edition,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Printer<'_> {
|
impl Printer<'_> {
|
||||||
|
@ -97,7 +99,7 @@ impl Printer<'_> {
|
||||||
self,
|
self,
|
||||||
"#{}[{}{}]{}",
|
"#{}[{}{}]{}",
|
||||||
inner,
|
inner,
|
||||||
attr.path.display(self.db.upcast()),
|
attr.path.display(self.db.upcast(), self.edition),
|
||||||
attr.input.as_ref().map(|it| it.to_string()).unwrap_or_default(),
|
attr.input.as_ref().map(|it| it.to_string()).unwrap_or_default(),
|
||||||
separated_by,
|
separated_by,
|
||||||
);
|
);
|
||||||
|
@ -113,13 +115,14 @@ impl Printer<'_> {
|
||||||
fn print_visibility(&mut self, vis: RawVisibilityId) {
|
fn print_visibility(&mut self, vis: RawVisibilityId) {
|
||||||
match &self.tree[vis] {
|
match &self.tree[vis] {
|
||||||
RawVisibility::Module(path, _expl) => {
|
RawVisibility::Module(path, _expl) => {
|
||||||
w!(self, "pub({}) ", path.display(self.db.upcast()))
|
w!(self, "pub({}) ", path.display(self.db.upcast(), self.edition))
|
||||||
}
|
}
|
||||||
RawVisibility::Public => w!(self, "pub "),
|
RawVisibility::Public => w!(self, "pub "),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_fields(&mut self, parent: FieldParent, kind: FieldsShape, fields: &[Field]) {
|
fn print_fields(&mut self, parent: FieldParent, kind: FieldsShape, fields: &[Field]) {
|
||||||
|
let edition = self.edition;
|
||||||
match kind {
|
match kind {
|
||||||
FieldsShape::Record => {
|
FieldsShape::Record => {
|
||||||
self.whitespace();
|
self.whitespace();
|
||||||
|
@ -131,7 +134,7 @@ impl Printer<'_> {
|
||||||
"\n",
|
"\n",
|
||||||
);
|
);
|
||||||
this.print_visibility(*visibility);
|
this.print_visibility(*visibility);
|
||||||
w!(this, "{}: ", name.display(self.db.upcast()));
|
w!(this, "{}: ", name.display(self.db.upcast(), edition));
|
||||||
this.print_type_ref(type_ref);
|
this.print_type_ref(type_ref);
|
||||||
wln!(this, ",");
|
wln!(this, ",");
|
||||||
}
|
}
|
||||||
|
@ -147,7 +150,7 @@ impl Printer<'_> {
|
||||||
"\n",
|
"\n",
|
||||||
);
|
);
|
||||||
this.print_visibility(*visibility);
|
this.print_visibility(*visibility);
|
||||||
w!(this, "{}: ", name.display(self.db.upcast()));
|
w!(this, "{}: ", name.display(self.db.upcast(), edition));
|
||||||
this.print_type_ref(type_ref);
|
this.print_type_ref(type_ref);
|
||||||
wln!(this, ",");
|
wln!(this, ",");
|
||||||
}
|
}
|
||||||
|
@ -186,20 +189,20 @@ impl Printer<'_> {
|
||||||
fn print_use_tree(&mut self, use_tree: &UseTree) {
|
fn print_use_tree(&mut self, use_tree: &UseTree) {
|
||||||
match &use_tree.kind {
|
match &use_tree.kind {
|
||||||
UseTreeKind::Single { path, alias } => {
|
UseTreeKind::Single { path, alias } => {
|
||||||
w!(self, "{}", path.display(self.db.upcast()));
|
w!(self, "{}", path.display(self.db.upcast(), self.edition));
|
||||||
if let Some(alias) = alias {
|
if let Some(alias) = alias {
|
||||||
w!(self, " as {}", alias);
|
w!(self, " as {}", alias.display(self.edition));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
UseTreeKind::Glob { path } => {
|
UseTreeKind::Glob { path } => {
|
||||||
if let Some(path) = path {
|
if let Some(path) = path {
|
||||||
w!(self, "{}::", path.display(self.db.upcast()));
|
w!(self, "{}::", path.display(self.db.upcast(), self.edition));
|
||||||
}
|
}
|
||||||
w!(self, "*");
|
w!(self, "*");
|
||||||
}
|
}
|
||||||
UseTreeKind::Prefixed { prefix, list } => {
|
UseTreeKind::Prefixed { prefix, list } => {
|
||||||
if let Some(prefix) = prefix {
|
if let Some(prefix) = prefix {
|
||||||
w!(self, "{}::", prefix.display(self.db.upcast()));
|
w!(self, "{}::", prefix.display(self.db.upcast(), self.edition));
|
||||||
}
|
}
|
||||||
w!(self, "{{");
|
w!(self, "{{");
|
||||||
for (i, tree) in list.iter().enumerate() {
|
for (i, tree) in list.iter().enumerate() {
|
||||||
|
@ -229,9 +232,9 @@ impl Printer<'_> {
|
||||||
let ExternCrate { name, alias, visibility, ast_id } = &self.tree[it];
|
let ExternCrate { name, alias, visibility, ast_id } = &self.tree[it];
|
||||||
self.print_ast_id(ast_id.erase());
|
self.print_ast_id(ast_id.erase());
|
||||||
self.print_visibility(*visibility);
|
self.print_visibility(*visibility);
|
||||||
w!(self, "extern crate {}", name.display(self.db.upcast()));
|
w!(self, "extern crate {}", name.display(self.db.upcast(), self.edition));
|
||||||
if let Some(alias) = alias {
|
if let Some(alias) = alias {
|
||||||
w!(self, " as {}", alias);
|
w!(self, " as {}", alias.display(self.edition));
|
||||||
}
|
}
|
||||||
wln!(self, ";");
|
wln!(self, ";");
|
||||||
}
|
}
|
||||||
|
@ -278,7 +281,7 @@ impl Printer<'_> {
|
||||||
if let Some(abi) = abi {
|
if let Some(abi) = abi {
|
||||||
w!(self, "extern \"{}\" ", abi);
|
w!(self, "extern \"{}\" ", abi);
|
||||||
}
|
}
|
||||||
w!(self, "fn {}", name.display(self.db.upcast()));
|
w!(self, "fn {}", name.display(self.db.upcast(), self.edition));
|
||||||
self.print_generic_params(explicit_generic_params, it.into());
|
self.print_generic_params(explicit_generic_params, it.into());
|
||||||
w!(self, "(");
|
w!(self, "(");
|
||||||
if !params.is_empty() {
|
if !params.is_empty() {
|
||||||
|
@ -314,7 +317,7 @@ impl Printer<'_> {
|
||||||
&self.tree[it];
|
&self.tree[it];
|
||||||
self.print_ast_id(ast_id.erase());
|
self.print_ast_id(ast_id.erase());
|
||||||
self.print_visibility(*visibility);
|
self.print_visibility(*visibility);
|
||||||
w!(self, "struct {}", name.display(self.db.upcast()));
|
w!(self, "struct {}", name.display(self.db.upcast(), self.edition));
|
||||||
self.print_generic_params(generic_params, it.into());
|
self.print_generic_params(generic_params, it.into());
|
||||||
self.print_fields_and_where_clause(
|
self.print_fields_and_where_clause(
|
||||||
FieldParent::Struct(it),
|
FieldParent::Struct(it),
|
||||||
|
@ -332,7 +335,7 @@ impl Printer<'_> {
|
||||||
let Union { name, visibility, fields, generic_params, ast_id } = &self.tree[it];
|
let Union { name, visibility, fields, generic_params, ast_id } = &self.tree[it];
|
||||||
self.print_ast_id(ast_id.erase());
|
self.print_ast_id(ast_id.erase());
|
||||||
self.print_visibility(*visibility);
|
self.print_visibility(*visibility);
|
||||||
w!(self, "union {}", name.display(self.db.upcast()));
|
w!(self, "union {}", name.display(self.db.upcast(), self.edition));
|
||||||
self.print_generic_params(generic_params, it.into());
|
self.print_generic_params(generic_params, it.into());
|
||||||
self.print_fields_and_where_clause(
|
self.print_fields_and_where_clause(
|
||||||
FieldParent::Union(it),
|
FieldParent::Union(it),
|
||||||
|
@ -346,15 +349,16 @@ impl Printer<'_> {
|
||||||
let Enum { name, visibility, variants, generic_params, ast_id } = &self.tree[it];
|
let Enum { name, visibility, variants, generic_params, ast_id } = &self.tree[it];
|
||||||
self.print_ast_id(ast_id.erase());
|
self.print_ast_id(ast_id.erase());
|
||||||
self.print_visibility(*visibility);
|
self.print_visibility(*visibility);
|
||||||
w!(self, "enum {}", name.display(self.db.upcast()));
|
w!(self, "enum {}", name.display(self.db.upcast(), self.edition));
|
||||||
self.print_generic_params(generic_params, it.into());
|
self.print_generic_params(generic_params, it.into());
|
||||||
self.print_where_clause_and_opening_brace(generic_params);
|
self.print_where_clause_and_opening_brace(generic_params);
|
||||||
|
let edition = self.edition;
|
||||||
self.indented(|this| {
|
self.indented(|this| {
|
||||||
for variant in FileItemTreeId::range_iter(variants.clone()) {
|
for variant in FileItemTreeId::range_iter(variants.clone()) {
|
||||||
let Variant { name, fields, shape: kind, ast_id } = &this.tree[variant];
|
let Variant { name, fields, shape: kind, ast_id } = &this.tree[variant];
|
||||||
this.print_ast_id(ast_id.erase());
|
this.print_ast_id(ast_id.erase());
|
||||||
this.print_attrs_of(variant, "\n");
|
this.print_attrs_of(variant, "\n");
|
||||||
w!(this, "{}", name.display(self.db.upcast()));
|
w!(this, "{}", name.display(self.db.upcast(), edition));
|
||||||
this.print_fields(FieldParent::Variant(variant), *kind, fields);
|
this.print_fields(FieldParent::Variant(variant), *kind, fields);
|
||||||
wln!(this, ",");
|
wln!(this, ",");
|
||||||
}
|
}
|
||||||
|
@ -367,7 +371,7 @@ impl Printer<'_> {
|
||||||
self.print_visibility(*visibility);
|
self.print_visibility(*visibility);
|
||||||
w!(self, "const ");
|
w!(self, "const ");
|
||||||
match name {
|
match name {
|
||||||
Some(name) => w!(self, "{}", name.display(self.db.upcast())),
|
Some(name) => w!(self, "{}", name.display(self.db.upcast(), self.edition)),
|
||||||
None => w!(self, "_"),
|
None => w!(self, "_"),
|
||||||
}
|
}
|
||||||
w!(self, ": ");
|
w!(self, ": ");
|
||||||
|
@ -382,7 +386,7 @@ impl Printer<'_> {
|
||||||
if *mutable {
|
if *mutable {
|
||||||
w!(self, "mut ");
|
w!(self, "mut ");
|
||||||
}
|
}
|
||||||
w!(self, "{}: ", name.display(self.db.upcast()));
|
w!(self, "{}: ", name.display(self.db.upcast(), self.edition));
|
||||||
self.print_type_ref(type_ref);
|
self.print_type_ref(type_ref);
|
||||||
w!(self, " = _;");
|
w!(self, " = _;");
|
||||||
wln!(self);
|
wln!(self);
|
||||||
|
@ -398,7 +402,7 @@ impl Printer<'_> {
|
||||||
if *is_auto {
|
if *is_auto {
|
||||||
w!(self, "auto ");
|
w!(self, "auto ");
|
||||||
}
|
}
|
||||||
w!(self, "trait {}", name.display(self.db.upcast()));
|
w!(self, "trait {}", name.display(self.db.upcast(), self.edition));
|
||||||
self.print_generic_params(generic_params, it.into());
|
self.print_generic_params(generic_params, it.into());
|
||||||
self.print_where_clause_and_opening_brace(generic_params);
|
self.print_where_clause_and_opening_brace(generic_params);
|
||||||
self.indented(|this| {
|
self.indented(|this| {
|
||||||
|
@ -412,7 +416,7 @@ impl Printer<'_> {
|
||||||
let TraitAlias { name, visibility, generic_params, ast_id } = &self.tree[it];
|
let TraitAlias { name, visibility, generic_params, ast_id } = &self.tree[it];
|
||||||
self.print_ast_id(ast_id.erase());
|
self.print_ast_id(ast_id.erase());
|
||||||
self.print_visibility(*visibility);
|
self.print_visibility(*visibility);
|
||||||
w!(self, "trait {}", name.display(self.db.upcast()));
|
w!(self, "trait {}", name.display(self.db.upcast(), self.edition));
|
||||||
self.print_generic_params(generic_params, it.into());
|
self.print_generic_params(generic_params, it.into());
|
||||||
w!(self, " = ");
|
w!(self, " = ");
|
||||||
self.print_where_clause(generic_params);
|
self.print_where_clause(generic_params);
|
||||||
|
@ -457,7 +461,7 @@ impl Printer<'_> {
|
||||||
&self.tree[it];
|
&self.tree[it];
|
||||||
self.print_ast_id(ast_id.erase());
|
self.print_ast_id(ast_id.erase());
|
||||||
self.print_visibility(*visibility);
|
self.print_visibility(*visibility);
|
||||||
w!(self, "type {}", name.display(self.db.upcast()));
|
w!(self, "type {}", name.display(self.db.upcast(), self.edition));
|
||||||
self.print_generic_params(generic_params, it.into());
|
self.print_generic_params(generic_params, it.into());
|
||||||
if !bounds.is_empty() {
|
if !bounds.is_empty() {
|
||||||
w!(self, ": ");
|
w!(self, ": ");
|
||||||
|
@ -475,7 +479,7 @@ impl Printer<'_> {
|
||||||
let Mod { name, visibility, kind, ast_id } = &self.tree[it];
|
let Mod { name, visibility, kind, ast_id } = &self.tree[it];
|
||||||
self.print_ast_id(ast_id.erase());
|
self.print_ast_id(ast_id.erase());
|
||||||
self.print_visibility(*visibility);
|
self.print_visibility(*visibility);
|
||||||
w!(self, "mod {}", name.display(self.db.upcast()));
|
w!(self, "mod {}", name.display(self.db.upcast(), self.edition));
|
||||||
match kind {
|
match kind {
|
||||||
ModKind::Inline { items } => {
|
ModKind::Inline { items } => {
|
||||||
w!(self, " {{");
|
w!(self, " {{");
|
||||||
|
@ -500,18 +504,22 @@ impl Printer<'_> {
|
||||||
ctxt,
|
ctxt,
|
||||||
expand_to
|
expand_to
|
||||||
);
|
);
|
||||||
wln!(self, "{}!(...);", path.display(self.db.upcast()));
|
wln!(self, "{}!(...);", path.display(self.db.upcast(), self.edition));
|
||||||
}
|
}
|
||||||
ModItem::MacroRules(it) => {
|
ModItem::MacroRules(it) => {
|
||||||
let MacroRules { name, ast_id } = &self.tree[it];
|
let MacroRules { name, ast_id } = &self.tree[it];
|
||||||
self.print_ast_id(ast_id.erase());
|
self.print_ast_id(ast_id.erase());
|
||||||
wln!(self, "macro_rules! {} {{ ... }}", name.display(self.db.upcast()));
|
wln!(
|
||||||
|
self,
|
||||||
|
"macro_rules! {} {{ ... }}",
|
||||||
|
name.display(self.db.upcast(), self.edition)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
ModItem::Macro2(it) => {
|
ModItem::Macro2(it) => {
|
||||||
let Macro2 { name, visibility, ast_id } = &self.tree[it];
|
let Macro2 { name, visibility, ast_id } = &self.tree[it];
|
||||||
self.print_ast_id(ast_id.erase());
|
self.print_ast_id(ast_id.erase());
|
||||||
self.print_visibility(*visibility);
|
self.print_visibility(*visibility);
|
||||||
wln!(self, "macro {} {{ ... }}", name.display(self.db.upcast()));
|
wln!(self, "macro {} {{ ... }}", name.display(self.db.upcast(), self.edition));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -519,15 +527,18 @@ impl Printer<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_type_ref(&mut self, type_ref: &TypeRef) {
|
fn print_type_ref(&mut self, type_ref: &TypeRef) {
|
||||||
print_type_ref(self.db, type_ref, self).unwrap();
|
let edition = self.edition;
|
||||||
|
print_type_ref(self.db, type_ref, self, edition).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_type_bounds(&mut self, bounds: &[Interned<TypeBound>]) {
|
fn print_type_bounds(&mut self, bounds: &[Interned<TypeBound>]) {
|
||||||
print_type_bounds(self.db, bounds, self).unwrap();
|
let edition = self.edition;
|
||||||
|
print_type_bounds(self.db, bounds, self, edition).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_path(&mut self, path: &Path) {
|
fn print_path(&mut self, path: &Path) {
|
||||||
print_path(self.db, path, self).unwrap();
|
let edition = self.edition;
|
||||||
|
print_path(self.db, path, self, edition).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_generic_params(&mut self, params: &GenericParams, parent: GenericModItem) {
|
fn print_generic_params(&mut self, params: &GenericParams, parent: GenericModItem) {
|
||||||
|
@ -543,7 +554,7 @@ impl Printer<'_> {
|
||||||
}
|
}
|
||||||
first = false;
|
first = false;
|
||||||
self.print_attrs_of(AttrOwner::LifetimeParamData(parent, idx), " ");
|
self.print_attrs_of(AttrOwner::LifetimeParamData(parent, idx), " ");
|
||||||
w!(self, "{}", lt.name.display(self.db.upcast()));
|
w!(self, "{}", lt.name.display(self.db.upcast(), self.edition));
|
||||||
}
|
}
|
||||||
for (idx, x) in params.iter_type_or_consts() {
|
for (idx, x) in params.iter_type_or_consts() {
|
||||||
if !first {
|
if !first {
|
||||||
|
@ -553,11 +564,11 @@ impl Printer<'_> {
|
||||||
self.print_attrs_of(AttrOwner::TypeOrConstParamData(parent, idx), " ");
|
self.print_attrs_of(AttrOwner::TypeOrConstParamData(parent, idx), " ");
|
||||||
match x {
|
match x {
|
||||||
TypeOrConstParamData::TypeParamData(ty) => match &ty.name {
|
TypeOrConstParamData::TypeParamData(ty) => match &ty.name {
|
||||||
Some(name) => w!(self, "{}", name.display(self.db.upcast())),
|
Some(name) => w!(self, "{}", name.display(self.db.upcast(), self.edition)),
|
||||||
None => w!(self, "_anon_{}", idx.into_raw()),
|
None => w!(self, "_anon_{}", idx.into_raw()),
|
||||||
},
|
},
|
||||||
TypeOrConstParamData::ConstParamData(konst) => {
|
TypeOrConstParamData::ConstParamData(konst) => {
|
||||||
w!(self, "const {}: ", konst.name.display(self.db.upcast()));
|
w!(self, "const {}: ", konst.name.display(self.db.upcast(), self.edition));
|
||||||
self.print_type_ref(&konst.ty);
|
self.print_type_ref(&konst.ty);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -580,6 +591,7 @@ impl Printer<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
w!(self, "\nwhere");
|
w!(self, "\nwhere");
|
||||||
|
let edition = self.edition;
|
||||||
self.indented(|this| {
|
self.indented(|this| {
|
||||||
for (i, pred) in params.where_predicates().enumerate() {
|
for (i, pred) in params.where_predicates().enumerate() {
|
||||||
if i != 0 {
|
if i != 0 {
|
||||||
|
@ -592,8 +604,8 @@ impl Printer<'_> {
|
||||||
wln!(
|
wln!(
|
||||||
this,
|
this,
|
||||||
"{}: {},",
|
"{}: {},",
|
||||||
target.name.display(self.db.upcast()),
|
target.name.display(self.db.upcast(), edition),
|
||||||
bound.name.display(self.db.upcast())
|
bound.name.display(self.db.upcast(), edition)
|
||||||
);
|
);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -603,7 +615,7 @@ impl Printer<'_> {
|
||||||
if i != 0 {
|
if i != 0 {
|
||||||
w!(this, ", ");
|
w!(this, ", ");
|
||||||
}
|
}
|
||||||
w!(this, "{}", lt.display(self.db.upcast()));
|
w!(this, "{}", lt.display(self.db.upcast(), edition));
|
||||||
}
|
}
|
||||||
w!(this, "> ");
|
w!(this, "> ");
|
||||||
(target, bound)
|
(target, bound)
|
||||||
|
@ -613,7 +625,7 @@ impl Printer<'_> {
|
||||||
match target {
|
match target {
|
||||||
WherePredicateTypeTarget::TypeRef(ty) => this.print_type_ref(ty),
|
WherePredicateTypeTarget::TypeRef(ty) => this.print_type_ref(ty),
|
||||||
WherePredicateTypeTarget::TypeOrConstParam(id) => match params[*id].name() {
|
WherePredicateTypeTarget::TypeOrConstParam(id) => match params[*id].name() {
|
||||||
Some(name) => w!(this, "{}", name.display(self.db.upcast())),
|
Some(name) => w!(this, "{}", name.display(self.db.upcast(), edition)),
|
||||||
None => w!(this, "_anon_{}", id.into_raw()),
|
None => w!(this, "_anon_{}", id.into_raw()),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use expect_test::{expect, Expect};
|
use expect_test::{expect, Expect};
|
||||||
|
use span::Edition;
|
||||||
use test_fixture::WithFixture;
|
use test_fixture::WithFixture;
|
||||||
|
|
||||||
use crate::{db::DefDatabase, test_db::TestDB};
|
use crate::{db::DefDatabase, test_db::TestDB};
|
||||||
|
@ -6,7 +7,7 @@ use crate::{db::DefDatabase, test_db::TestDB};
|
||||||
fn check(ra_fixture: &str, expect: Expect) {
|
fn check(ra_fixture: &str, expect: Expect) {
|
||||||
let (db, file_id) = TestDB::with_single_file(ra_fixture);
|
let (db, file_id) = TestDB::with_single_file(ra_fixture);
|
||||||
let item_tree = db.file_item_tree(file_id.into());
|
let item_tree = db.file_item_tree(file_id.into());
|
||||||
let pretty = item_tree.pretty_print(&db);
|
let pretty = item_tree.pretty_print(&db, Edition::CURRENT);
|
||||||
expect.assert_eq(&pretty);
|
expect.assert_eq(&pretty);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -74,6 +74,13 @@ impl LangItemTarget {
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn as_type_alias(self) -> Option<TypeAliasId> {
|
||||||
|
match self {
|
||||||
|
LangItemTarget::TypeAlias(id) => Some(id),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default, Debug, Clone, PartialEq, Eq)]
|
#[derive(Default, Debug, Clone, PartialEq, Eq)]
|
||||||
|
@ -117,11 +124,19 @@ impl LangItems {
|
||||||
match def {
|
match def {
|
||||||
ModuleDefId::TraitId(trait_) => {
|
ModuleDefId::TraitId(trait_) => {
|
||||||
lang_items.collect_lang_item(db, trait_, LangItemTarget::Trait);
|
lang_items.collect_lang_item(db, trait_, LangItemTarget::Trait);
|
||||||
db.trait_data(trait_).items.iter().for_each(|&(_, assoc_id)| {
|
db.trait_data(trait_).items.iter().for_each(
|
||||||
if let AssocItemId::FunctionId(f) = assoc_id {
|
|&(_, assoc_id)| match assoc_id {
|
||||||
lang_items.collect_lang_item(db, f, LangItemTarget::Function);
|
AssocItemId::FunctionId(f) => {
|
||||||
}
|
lang_items.collect_lang_item(db, f, LangItemTarget::Function);
|
||||||
});
|
}
|
||||||
|
AssocItemId::TypeAliasId(alias) => lang_items.collect_lang_item(
|
||||||
|
db,
|
||||||
|
alias,
|
||||||
|
LangItemTarget::TypeAlias,
|
||||||
|
),
|
||||||
|
AssocItemId::ConstId(_) => {}
|
||||||
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
ModuleDefId::AdtId(AdtId::EnumId(e)) => {
|
ModuleDefId::AdtId(AdtId::EnumId(e)) => {
|
||||||
lang_items.collect_lang_item(db, e, LangItemTarget::EnumId);
|
lang_items.collect_lang_item(db, e, LangItemTarget::EnumId);
|
||||||
|
@ -453,6 +468,7 @@ language_item_table! {
|
||||||
|
|
||||||
Context, sym::Context, context, Target::Struct, GenericRequirement::None;
|
Context, sym::Context, context, Target::Struct, GenericRequirement::None;
|
||||||
FuturePoll, sym::poll, future_poll_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
|
FuturePoll, sym::poll, future_poll_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
|
||||||
|
FutureOutput, sym::future_output, future_output, Target::TypeAlias, GenericRequirement::None;
|
||||||
|
|
||||||
Option, sym::Option, option_type, Target::Enum, GenericRequirement::None;
|
Option, sym::Option, option_type, Target::Enum, GenericRequirement::None;
|
||||||
OptionSome, sym::Some, option_some_variant, Target::Variant, GenericRequirement::None;
|
OptionSome, sym::Some, option_some_variant, Target::Variant, GenericRequirement::None;
|
||||||
|
@ -467,6 +483,7 @@ language_item_table! {
|
||||||
IntoFutureIntoFuture, sym::into_future, into_future_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
|
IntoFutureIntoFuture, sym::into_future, into_future_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
|
||||||
IntoIterIntoIter, sym::into_iter, into_iter_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
|
IntoIterIntoIter, sym::into_iter, into_iter_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
|
||||||
IteratorNext, sym::next, next_fn, Target::Method(MethodKind::Trait { body: false}), GenericRequirement::None;
|
IteratorNext, sym::next, next_fn, Target::Method(MethodKind::Trait { body: false}), GenericRequirement::None;
|
||||||
|
Iterator, sym::iterator, iterator, Target::Trait, GenericRequirement::None;
|
||||||
|
|
||||||
PinNewUnchecked, sym::new_unchecked, new_unchecked_fn, Target::Method(MethodKind::Inherent), GenericRequirement::None;
|
PinNewUnchecked, sym::new_unchecked, new_unchecked_fn, Target::Method(MethodKind::Inherent), GenericRequirement::None;
|
||||||
|
|
||||||
|
|
|
@ -241,7 +241,7 @@ pub type StaticLoc = AssocItemLoc<Static>;
|
||||||
impl_intern!(StaticId, StaticLoc, intern_static, lookup_intern_static);
|
impl_intern!(StaticId, StaticLoc, intern_static, lookup_intern_static);
|
||||||
impl_loc!(StaticLoc, id: Static, container: ItemContainerId);
|
impl_loc!(StaticLoc, id: Static, container: ItemContainerId);
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||||
pub struct TraitId(salsa::InternId);
|
pub struct TraitId(salsa::InternId);
|
||||||
pub type TraitLoc = ItemLoc<Trait>;
|
pub type TraitLoc = ItemLoc<Trait>;
|
||||||
impl_intern!(TraitId, TraitLoc, intern_trait, lookup_intern_trait);
|
impl_intern!(TraitId, TraitLoc, intern_trait, lookup_intern_trait);
|
||||||
|
|
|
@ -25,7 +25,7 @@ use hir_expand::{
|
||||||
InFile, MacroFileId, MacroFileIdExt,
|
InFile, MacroFileId, MacroFileIdExt,
|
||||||
};
|
};
|
||||||
use intern::Symbol;
|
use intern::Symbol;
|
||||||
use span::Span;
|
use span::{Edition, Span};
|
||||||
use stdx::{format_to, format_to_acc};
|
use stdx::{format_to, format_to_acc};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, edit::IndentLevel},
|
ast::{self, edit::IndentLevel},
|
||||||
|
@ -257,21 +257,25 @@ fn pretty_print_macro_expansion(
|
||||||
(T![;] | T!['{'] | T!['}'], _) => "\n",
|
(T![;] | T!['{'] | T!['}'], _) => "\n",
|
||||||
(_, T!['}']) => "\n",
|
(_, T!['}']) => "\n",
|
||||||
(IDENT | LIFETIME_IDENT, IDENT | LIFETIME_IDENT) => " ",
|
(IDENT | LIFETIME_IDENT, IDENT | LIFETIME_IDENT) => " ",
|
||||||
_ if prev_kind.is_keyword() && curr_kind.is_keyword() => " ",
|
_ if prev_kind.is_keyword(Edition::CURRENT)
|
||||||
(IDENT, _) if curr_kind.is_keyword() => " ",
|
&& curr_kind.is_keyword(Edition::CURRENT) =>
|
||||||
(_, IDENT) if prev_kind.is_keyword() => " ",
|
{
|
||||||
|
" "
|
||||||
|
}
|
||||||
|
(IDENT, _) if curr_kind.is_keyword(Edition::CURRENT) => " ",
|
||||||
|
(_, IDENT) if prev_kind.is_keyword(Edition::CURRENT) => " ",
|
||||||
(T![>], IDENT) => " ",
|
(T![>], IDENT) => " ",
|
||||||
(T![>], _) if curr_kind.is_keyword() => " ",
|
(T![>], _) if curr_kind.is_keyword(Edition::CURRENT) => " ",
|
||||||
(T![->], _) | (_, T![->]) => " ",
|
(T![->], _) | (_, T![->]) => " ",
|
||||||
(T![&&], _) | (_, T![&&]) => " ",
|
(T![&&], _) | (_, T![&&]) => " ",
|
||||||
(T![,], _) => " ",
|
(T![,], _) => " ",
|
||||||
(T![:], IDENT | T!['(']) => " ",
|
(T![:], IDENT | T!['(']) => " ",
|
||||||
(T![:], _) if curr_kind.is_keyword() => " ",
|
(T![:], _) if curr_kind.is_keyword(Edition::CURRENT) => " ",
|
||||||
(T![fn], T!['(']) => "",
|
(T![fn], T!['(']) => "",
|
||||||
(T![']'], _) if curr_kind.is_keyword() => " ",
|
(T![']'], _) if curr_kind.is_keyword(Edition::CURRENT) => " ",
|
||||||
(T![']'], T![#]) => "\n",
|
(T![']'], T![#]) => "\n",
|
||||||
(T![Self], T![::]) => "",
|
(T![Self], T![::]) => "",
|
||||||
_ if prev_kind.is_keyword() => " ",
|
_ if prev_kind.is_keyword(Edition::CURRENT) => " ",
|
||||||
_ => "",
|
_ => "",
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -328,6 +328,10 @@ impl DefMap {
|
||||||
/// The module id of a crate or block root.
|
/// The module id of a crate or block root.
|
||||||
pub const ROOT: LocalModuleId = LocalModuleId::from_raw(la_arena::RawIdx::from_u32(0));
|
pub const ROOT: LocalModuleId = LocalModuleId::from_raw(la_arena::RawIdx::from_u32(0));
|
||||||
|
|
||||||
|
pub fn edition(&self) -> Edition {
|
||||||
|
self.data.edition
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, crate_id: CrateId) -> Arc<DefMap> {
|
pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, crate_id: CrateId) -> Arc<DefMap> {
|
||||||
let crate_graph = db.crate_graph();
|
let crate_graph = db.crate_graph();
|
||||||
let krate = &crate_graph[crate_id];
|
let krate = &crate_graph[crate_id];
|
||||||
|
@ -550,7 +554,7 @@ impl DefMap {
|
||||||
for (name, child) in
|
for (name, child) in
|
||||||
map.modules[module].children.iter().sorted_by(|a, b| Ord::cmp(&a.0, &b.0))
|
map.modules[module].children.iter().sorted_by(|a, b| Ord::cmp(&a.0, &b.0))
|
||||||
{
|
{
|
||||||
let path = format!("{path}::{}", name.display(db.upcast()));
|
let path = format!("{path}::{}", name.display(db.upcast(), Edition::LATEST));
|
||||||
buf.push('\n');
|
buf.push('\n');
|
||||||
go(buf, db, map, &path, *child);
|
go(buf, db, map, &path, *child);
|
||||||
}
|
}
|
||||||
|
|
|
@ -548,7 +548,7 @@ impl DefCollector<'_> {
|
||||||
types => {
|
types => {
|
||||||
tracing::debug!(
|
tracing::debug!(
|
||||||
"could not resolve prelude path `{}` to module (resolved to {:?})",
|
"could not resolve prelude path `{}` to module (resolved to {:?})",
|
||||||
path.display(self.db.upcast()),
|
path.display(self.db.upcast(), Edition::LATEST),
|
||||||
types
|
types
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -768,7 +768,7 @@ impl DefCollector<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_import(&self, module_id: LocalModuleId, import: &Import) -> PartialResolvedImport {
|
fn resolve_import(&self, module_id: LocalModuleId, import: &Import) -> PartialResolvedImport {
|
||||||
let _p = tracing::info_span!("resolve_import", import_path = %import.path.display(self.db.upcast()))
|
let _p = tracing::info_span!("resolve_import", import_path = %import.path.display(self.db.upcast(), Edition::LATEST))
|
||||||
.entered();
|
.entered();
|
||||||
tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition);
|
tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition);
|
||||||
match import.source {
|
match import.source {
|
||||||
|
@ -1606,7 +1606,11 @@ impl ModCollector<'_, '_> {
|
||||||
|
|
||||||
// Prelude module is always considered to be `#[macro_use]`.
|
// Prelude module is always considered to be `#[macro_use]`.
|
||||||
if let Some((prelude_module, _use)) = self.def_collector.def_map.prelude {
|
if let Some((prelude_module, _use)) = self.def_collector.def_map.prelude {
|
||||||
if prelude_module.krate != krate && is_crate_root {
|
// Don't insert macros from the prelude into blocks, as they can be shadowed by other macros.
|
||||||
|
if prelude_module.krate != krate
|
||||||
|
&& is_crate_root
|
||||||
|
&& self.def_collector.def_map.block.is_none()
|
||||||
|
{
|
||||||
cov_mark::hit!(prelude_is_macro_use);
|
cov_mark::hit!(prelude_is_macro_use);
|
||||||
self.def_collector.import_macros_from_extern_crate(
|
self.def_collector.import_macros_from_extern_crate(
|
||||||
prelude_module.krate,
|
prelude_module.krate,
|
||||||
|
@ -2151,7 +2155,7 @@ impl ModCollector<'_, '_> {
|
||||||
}
|
}
|
||||||
tracing::debug!(
|
tracing::debug!(
|
||||||
"non-builtin attribute {}",
|
"non-builtin attribute {}",
|
||||||
attr.path.display(self.def_collector.db.upcast())
|
attr.path.display(self.def_collector.db.upcast(), Edition::LATEST)
|
||||||
);
|
);
|
||||||
|
|
||||||
let ast_id = AstIdWithPath::new(
|
let ast_id = AstIdWithPath::new(
|
||||||
|
@ -2286,8 +2290,8 @@ impl ModCollector<'_, '_> {
|
||||||
stdx::always!(
|
stdx::always!(
|
||||||
name == mac.name,
|
name == mac.name,
|
||||||
"built-in macro {} has #[rustc_builtin_macro] which declares different name {}",
|
"built-in macro {} has #[rustc_builtin_macro] which declares different name {}",
|
||||||
mac.name.display(self.def_collector.db.upcast()),
|
mac.name.display(self.def_collector.db.upcast(), Edition::LATEST),
|
||||||
name.display(self.def_collector.db.upcast())
|
name.display(self.def_collector.db.upcast(), Edition::LATEST),
|
||||||
);
|
);
|
||||||
helpers_opt = Some(helpers);
|
helpers_opt = Some(helpers);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
use expect_test::expect;
|
use expect_test::expect;
|
||||||
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
use span::Edition;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
|
@ -1100,7 +1101,7 @@ pub fn derive_macro_2(_item: TokenStream) -> TokenStream {
|
||||||
assert_eq!(def_map.data.exported_derives.len(), 1);
|
assert_eq!(def_map.data.exported_derives.len(), 1);
|
||||||
match def_map.data.exported_derives.values().next() {
|
match def_map.data.exported_derives.values().next() {
|
||||||
Some(helpers) => match &**helpers {
|
Some(helpers) => match &**helpers {
|
||||||
[attr] => assert_eq!(attr.display(&db).to_string(), "helper_attr"),
|
[attr] => assert_eq!(attr.display(&db, Edition::CURRENT).to_string(), "helper_attr"),
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
},
|
},
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
|
@ -1456,7 +1457,7 @@ fn proc_attr(a: TokenStream, b: TokenStream) -> TokenStream { a }
|
||||||
let actual = def_map
|
let actual = def_map
|
||||||
.macro_use_prelude
|
.macro_use_prelude
|
||||||
.keys()
|
.keys()
|
||||||
.map(|name| name.display(&db).to_string())
|
.map(|name| name.display(&db, Edition::CURRENT).to_string())
|
||||||
.sorted()
|
.sorted()
|
||||||
.join("\n");
|
.join("\n");
|
||||||
|
|
||||||
|
|
|
@ -144,14 +144,14 @@ pub struct Baz;
|
||||||
|
|
||||||
crate::r#async
|
crate::r#async
|
||||||
Bar: t v
|
Bar: t v
|
||||||
foo: t
|
|
||||||
r#async: t
|
r#async: t
|
||||||
|
foo: t
|
||||||
crate::r#async::foo
|
|
||||||
Foo: t v
|
|
||||||
|
|
||||||
crate::r#async::r#async
|
crate::r#async::r#async
|
||||||
Baz: t v
|
Baz: t v
|
||||||
|
|
||||||
|
crate::r#async::foo
|
||||||
|
Foo: t v
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,7 +13,8 @@ use crate::{
|
||||||
};
|
};
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
use syntax::{ast, ToSmolStr};
|
use span::Edition;
|
||||||
|
use syntax::ast;
|
||||||
|
|
||||||
pub use hir_expand::mod_path::{path, ModPath, PathKind};
|
pub use hir_expand::mod_path::{path, ModPath, PathKind};
|
||||||
|
|
||||||
|
@ -25,11 +26,21 @@ pub enum ImportAlias {
|
||||||
Alias(Name),
|
Alias(Name),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Display for ImportAlias {
|
impl ImportAlias {
|
||||||
|
pub fn display(&self, edition: Edition) -> impl Display + '_ {
|
||||||
|
ImportAliasDisplay { value: self, edition }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct ImportAliasDisplay<'a> {
|
||||||
|
value: &'a ImportAlias,
|
||||||
|
edition: Edition,
|
||||||
|
}
|
||||||
|
impl Display for ImportAliasDisplay<'_> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self.value {
|
||||||
ImportAlias::Underscore => f.write_str("_"),
|
ImportAlias::Underscore => f.write_str("_"),
|
||||||
ImportAlias::Alias(name) => f.write_str(&name.display_no_db().to_smolstr()),
|
ImportAlias::Alias(name) => Display::fmt(&name.display_no_db(self.edition), f),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,7 @@ use std::fmt::{self, Write};
|
||||||
use hir_expand::mod_path::PathKind;
|
use hir_expand::mod_path::PathKind;
|
||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
use span::Edition;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::DefDatabase,
|
db::DefDatabase,
|
||||||
|
@ -13,46 +14,51 @@ use crate::{
|
||||||
type_ref::{Mutability, TraitBoundModifier, TypeBound, TypeRef},
|
type_ref::{Mutability, TraitBoundModifier, TypeBound, TypeRef},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) fn print_path(db: &dyn DefDatabase, path: &Path, buf: &mut dyn Write) -> fmt::Result {
|
pub(crate) fn print_path(
|
||||||
|
db: &dyn DefDatabase,
|
||||||
|
path: &Path,
|
||||||
|
buf: &mut dyn Write,
|
||||||
|
edition: Edition,
|
||||||
|
) -> fmt::Result {
|
||||||
if let Path::LangItem(it, s) = path {
|
if let Path::LangItem(it, s) = path {
|
||||||
write!(buf, "builtin#lang(")?;
|
write!(buf, "builtin#lang(")?;
|
||||||
match *it {
|
match *it {
|
||||||
LangItemTarget::ImplDef(it) => write!(buf, "{it:?}")?,
|
LangItemTarget::ImplDef(it) => write!(buf, "{it:?}")?,
|
||||||
LangItemTarget::EnumId(it) => {
|
LangItemTarget::EnumId(it) => {
|
||||||
write!(buf, "{}", db.enum_data(it).name.display(db.upcast()))?
|
write!(buf, "{}", db.enum_data(it).name.display(db.upcast(), edition))?
|
||||||
}
|
}
|
||||||
LangItemTarget::Function(it) => {
|
LangItemTarget::Function(it) => {
|
||||||
write!(buf, "{}", db.function_data(it).name.display(db.upcast()))?
|
write!(buf, "{}", db.function_data(it).name.display(db.upcast(), edition))?
|
||||||
}
|
}
|
||||||
LangItemTarget::Static(it) => {
|
LangItemTarget::Static(it) => {
|
||||||
write!(buf, "{}", db.static_data(it).name.display(db.upcast()))?
|
write!(buf, "{}", db.static_data(it).name.display(db.upcast(), edition))?
|
||||||
}
|
}
|
||||||
LangItemTarget::Struct(it) => {
|
LangItemTarget::Struct(it) => {
|
||||||
write!(buf, "{}", db.struct_data(it).name.display(db.upcast()))?
|
write!(buf, "{}", db.struct_data(it).name.display(db.upcast(), edition))?
|
||||||
}
|
}
|
||||||
LangItemTarget::Union(it) => {
|
LangItemTarget::Union(it) => {
|
||||||
write!(buf, "{}", db.union_data(it).name.display(db.upcast()))?
|
write!(buf, "{}", db.union_data(it).name.display(db.upcast(), edition))?
|
||||||
}
|
}
|
||||||
LangItemTarget::TypeAlias(it) => {
|
LangItemTarget::TypeAlias(it) => {
|
||||||
write!(buf, "{}", db.type_alias_data(it).name.display(db.upcast()))?
|
write!(buf, "{}", db.type_alias_data(it).name.display(db.upcast(), edition))?
|
||||||
}
|
}
|
||||||
LangItemTarget::Trait(it) => {
|
LangItemTarget::Trait(it) => {
|
||||||
write!(buf, "{}", db.trait_data(it).name.display(db.upcast()))?
|
write!(buf, "{}", db.trait_data(it).name.display(db.upcast(), edition))?
|
||||||
}
|
}
|
||||||
LangItemTarget::EnumVariant(it) => {
|
LangItemTarget::EnumVariant(it) => {
|
||||||
write!(buf, "{}", db.enum_variant_data(it).name.display(db.upcast()))?
|
write!(buf, "{}", db.enum_variant_data(it).name.display(db.upcast(), edition))?
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(s) = s {
|
if let Some(s) = s {
|
||||||
write!(buf, "::{}", s.display(db.upcast()))?;
|
write!(buf, "::{}", s.display(db.upcast(), edition))?;
|
||||||
}
|
}
|
||||||
return write!(buf, ")");
|
return write!(buf, ")");
|
||||||
}
|
}
|
||||||
match path.type_anchor() {
|
match path.type_anchor() {
|
||||||
Some(anchor) => {
|
Some(anchor) => {
|
||||||
write!(buf, "<")?;
|
write!(buf, "<")?;
|
||||||
print_type_ref(db, anchor, buf)?;
|
print_type_ref(db, anchor, buf, edition)?;
|
||||||
write!(buf, ">::")?;
|
write!(buf, ">::")?;
|
||||||
}
|
}
|
||||||
None => match path.kind() {
|
None => match path.kind() {
|
||||||
|
@ -78,10 +84,10 @@ pub(crate) fn print_path(db: &dyn DefDatabase, path: &Path, buf: &mut dyn Write)
|
||||||
write!(buf, "::")?;
|
write!(buf, "::")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
write!(buf, "{}", segment.name.display(db.upcast()))?;
|
write!(buf, "{}", segment.name.display(db.upcast(), edition))?;
|
||||||
if let Some(generics) = segment.args_and_bindings {
|
if let Some(generics) = segment.args_and_bindings {
|
||||||
write!(buf, "::<")?;
|
write!(buf, "::<")?;
|
||||||
print_generic_args(db, generics, buf)?;
|
print_generic_args(db, generics, buf, edition)?;
|
||||||
|
|
||||||
write!(buf, ">")?;
|
write!(buf, ">")?;
|
||||||
}
|
}
|
||||||
|
@ -94,12 +100,13 @@ pub(crate) fn print_generic_args(
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
generics: &GenericArgs,
|
generics: &GenericArgs,
|
||||||
buf: &mut dyn Write,
|
buf: &mut dyn Write,
|
||||||
|
edition: Edition,
|
||||||
) -> fmt::Result {
|
) -> fmt::Result {
|
||||||
let mut first = true;
|
let mut first = true;
|
||||||
let args = if generics.has_self_type {
|
let args = if generics.has_self_type {
|
||||||
let (self_ty, args) = generics.args.split_first().unwrap();
|
let (self_ty, args) = generics.args.split_first().unwrap();
|
||||||
write!(buf, "Self=")?;
|
write!(buf, "Self=")?;
|
||||||
print_generic_arg(db, self_ty, buf)?;
|
print_generic_arg(db, self_ty, buf, edition)?;
|
||||||
first = false;
|
first = false;
|
||||||
args
|
args
|
||||||
} else {
|
} else {
|
||||||
|
@ -110,21 +117,21 @@ pub(crate) fn print_generic_args(
|
||||||
write!(buf, ", ")?;
|
write!(buf, ", ")?;
|
||||||
}
|
}
|
||||||
first = false;
|
first = false;
|
||||||
print_generic_arg(db, arg, buf)?;
|
print_generic_arg(db, arg, buf, edition)?;
|
||||||
}
|
}
|
||||||
for binding in generics.bindings.iter() {
|
for binding in generics.bindings.iter() {
|
||||||
if !first {
|
if !first {
|
||||||
write!(buf, ", ")?;
|
write!(buf, ", ")?;
|
||||||
}
|
}
|
||||||
first = false;
|
first = false;
|
||||||
write!(buf, "{}", binding.name.display(db.upcast()))?;
|
write!(buf, "{}", binding.name.display(db.upcast(), edition))?;
|
||||||
if !binding.bounds.is_empty() {
|
if !binding.bounds.is_empty() {
|
||||||
write!(buf, ": ")?;
|
write!(buf, ": ")?;
|
||||||
print_type_bounds(db, &binding.bounds, buf)?;
|
print_type_bounds(db, &binding.bounds, buf, edition)?;
|
||||||
}
|
}
|
||||||
if let Some(ty) = &binding.type_ref {
|
if let Some(ty) = &binding.type_ref {
|
||||||
write!(buf, " = ")?;
|
write!(buf, " = ")?;
|
||||||
print_type_ref(db, ty, buf)?;
|
print_type_ref(db, ty, buf, edition)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -134,11 +141,12 @@ pub(crate) fn print_generic_arg(
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
arg: &GenericArg,
|
arg: &GenericArg,
|
||||||
buf: &mut dyn Write,
|
buf: &mut dyn Write,
|
||||||
|
edition: Edition,
|
||||||
) -> fmt::Result {
|
) -> fmt::Result {
|
||||||
match arg {
|
match arg {
|
||||||
GenericArg::Type(ty) => print_type_ref(db, ty, buf),
|
GenericArg::Type(ty) => print_type_ref(db, ty, buf, edition),
|
||||||
GenericArg::Const(c) => write!(buf, "{}", c.display(db.upcast())),
|
GenericArg::Const(c) => write!(buf, "{}", c.display(db.upcast(), edition)),
|
||||||
GenericArg::Lifetime(lt) => write!(buf, "{}", lt.name.display(db.upcast())),
|
GenericArg::Lifetime(lt) => write!(buf, "{}", lt.name.display(db.upcast(), edition)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -146,6 +154,7 @@ pub(crate) fn print_type_ref(
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
type_ref: &TypeRef,
|
type_ref: &TypeRef,
|
||||||
buf: &mut dyn Write,
|
buf: &mut dyn Write,
|
||||||
|
edition: Edition,
|
||||||
) -> fmt::Result {
|
) -> fmt::Result {
|
||||||
// FIXME: deduplicate with `HirDisplay` impl
|
// FIXME: deduplicate with `HirDisplay` impl
|
||||||
match type_ref {
|
match type_ref {
|
||||||
|
@ -157,18 +166,18 @@ pub(crate) fn print_type_ref(
|
||||||
if i != 0 {
|
if i != 0 {
|
||||||
write!(buf, ", ")?;
|
write!(buf, ", ")?;
|
||||||
}
|
}
|
||||||
print_type_ref(db, field, buf)?;
|
print_type_ref(db, field, buf, edition)?;
|
||||||
}
|
}
|
||||||
write!(buf, ")")?;
|
write!(buf, ")")?;
|
||||||
}
|
}
|
||||||
TypeRef::Path(path) => print_path(db, path, buf)?,
|
TypeRef::Path(path) => print_path(db, path, buf, edition)?,
|
||||||
TypeRef::RawPtr(pointee, mtbl) => {
|
TypeRef::RawPtr(pointee, mtbl) => {
|
||||||
let mtbl = match mtbl {
|
let mtbl = match mtbl {
|
||||||
Mutability::Shared => "*const",
|
Mutability::Shared => "*const",
|
||||||
Mutability::Mut => "*mut",
|
Mutability::Mut => "*mut",
|
||||||
};
|
};
|
||||||
write!(buf, "{mtbl} ")?;
|
write!(buf, "{mtbl} ")?;
|
||||||
print_type_ref(db, pointee, buf)?;
|
print_type_ref(db, pointee, buf, edition)?;
|
||||||
}
|
}
|
||||||
TypeRef::Reference(pointee, lt, mtbl) => {
|
TypeRef::Reference(pointee, lt, mtbl) => {
|
||||||
let mtbl = match mtbl {
|
let mtbl = match mtbl {
|
||||||
|
@ -177,19 +186,19 @@ pub(crate) fn print_type_ref(
|
||||||
};
|
};
|
||||||
write!(buf, "&")?;
|
write!(buf, "&")?;
|
||||||
if let Some(lt) = lt {
|
if let Some(lt) = lt {
|
||||||
write!(buf, "{} ", lt.name.display(db.upcast()))?;
|
write!(buf, "{} ", lt.name.display(db.upcast(), edition))?;
|
||||||
}
|
}
|
||||||
write!(buf, "{mtbl}")?;
|
write!(buf, "{mtbl}")?;
|
||||||
print_type_ref(db, pointee, buf)?;
|
print_type_ref(db, pointee, buf, edition)?;
|
||||||
}
|
}
|
||||||
TypeRef::Array(elem, len) => {
|
TypeRef::Array(elem, len) => {
|
||||||
write!(buf, "[")?;
|
write!(buf, "[")?;
|
||||||
print_type_ref(db, elem, buf)?;
|
print_type_ref(db, elem, buf, edition)?;
|
||||||
write!(buf, "; {}]", len.display(db.upcast()))?;
|
write!(buf, "; {}]", len.display(db.upcast(), edition))?;
|
||||||
}
|
}
|
||||||
TypeRef::Slice(elem) => {
|
TypeRef::Slice(elem) => {
|
||||||
write!(buf, "[")?;
|
write!(buf, "[")?;
|
||||||
print_type_ref(db, elem, buf)?;
|
print_type_ref(db, elem, buf, edition)?;
|
||||||
write!(buf, "]")?;
|
write!(buf, "]")?;
|
||||||
}
|
}
|
||||||
TypeRef::Fn(args_and_ret, varargs, is_unsafe, abi) => {
|
TypeRef::Fn(args_and_ret, varargs, is_unsafe, abi) => {
|
||||||
|
@ -208,7 +217,7 @@ pub(crate) fn print_type_ref(
|
||||||
if i != 0 {
|
if i != 0 {
|
||||||
write!(buf, ", ")?;
|
write!(buf, ", ")?;
|
||||||
}
|
}
|
||||||
print_type_ref(db, typeref, buf)?;
|
print_type_ref(db, typeref, buf, edition)?;
|
||||||
}
|
}
|
||||||
if *varargs {
|
if *varargs {
|
||||||
if !args.is_empty() {
|
if !args.is_empty() {
|
||||||
|
@ -217,7 +226,7 @@ pub(crate) fn print_type_ref(
|
||||||
write!(buf, "...")?;
|
write!(buf, "...")?;
|
||||||
}
|
}
|
||||||
write!(buf, ") -> ")?;
|
write!(buf, ") -> ")?;
|
||||||
print_type_ref(db, return_type, buf)?;
|
print_type_ref(db, return_type, buf, edition)?;
|
||||||
}
|
}
|
||||||
TypeRef::Macro(_ast_id) => {
|
TypeRef::Macro(_ast_id) => {
|
||||||
write!(buf, "<macro>")?;
|
write!(buf, "<macro>")?;
|
||||||
|
@ -225,11 +234,11 @@ pub(crate) fn print_type_ref(
|
||||||
TypeRef::Error => write!(buf, "{{unknown}}")?,
|
TypeRef::Error => write!(buf, "{{unknown}}")?,
|
||||||
TypeRef::ImplTrait(bounds) => {
|
TypeRef::ImplTrait(bounds) => {
|
||||||
write!(buf, "impl ")?;
|
write!(buf, "impl ")?;
|
||||||
print_type_bounds(db, bounds, buf)?;
|
print_type_bounds(db, bounds, buf, edition)?;
|
||||||
}
|
}
|
||||||
TypeRef::DynTrait(bounds) => {
|
TypeRef::DynTrait(bounds) => {
|
||||||
write!(buf, "dyn ")?;
|
write!(buf, "dyn ")?;
|
||||||
print_type_bounds(db, bounds, buf)?;
|
print_type_bounds(db, bounds, buf, edition)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -240,6 +249,7 @@ pub(crate) fn print_type_bounds(
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
bounds: &[Interned<TypeBound>],
|
bounds: &[Interned<TypeBound>],
|
||||||
buf: &mut dyn Write,
|
buf: &mut dyn Write,
|
||||||
|
edition: Edition,
|
||||||
) -> fmt::Result {
|
) -> fmt::Result {
|
||||||
for (i, bound) in bounds.iter().enumerate() {
|
for (i, bound) in bounds.iter().enumerate() {
|
||||||
if i != 0 {
|
if i != 0 {
|
||||||
|
@ -252,17 +262,17 @@ pub(crate) fn print_type_bounds(
|
||||||
TraitBoundModifier::None => (),
|
TraitBoundModifier::None => (),
|
||||||
TraitBoundModifier::Maybe => write!(buf, "?")?,
|
TraitBoundModifier::Maybe => write!(buf, "?")?,
|
||||||
}
|
}
|
||||||
print_path(db, path, buf)?;
|
print_path(db, path, buf, edition)?;
|
||||||
}
|
}
|
||||||
TypeBound::ForLifetime(lifetimes, path) => {
|
TypeBound::ForLifetime(lifetimes, path) => {
|
||||||
write!(
|
write!(
|
||||||
buf,
|
buf,
|
||||||
"for<{}> ",
|
"for<{}> ",
|
||||||
lifetimes.iter().map(|it| it.display(db.upcast())).format(", ")
|
lifetimes.iter().map(|it| it.display(db.upcast(), edition)).format(", ")
|
||||||
)?;
|
)?;
|
||||||
print_path(db, path, buf)?;
|
print_path(db, path, buf, edition)?;
|
||||||
}
|
}
|
||||||
TypeBound::Lifetime(lt) => write!(buf, "{}", lt.name.display(db.upcast()))?,
|
TypeBound::Lifetime(lt) => write!(buf, "{}", lt.name.display(db.upcast(), edition))?,
|
||||||
TypeBound::Error => write!(buf, "{{unknown}}")?,
|
TypeBound::Error => write!(buf, "{{unknown}}")?,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -176,9 +176,10 @@ fn eager_macro_recur(
|
||||||
Some(path) => match macro_resolver(&path) {
|
Some(path) => match macro_resolver(&path) {
|
||||||
Some(def) => def,
|
Some(def) => def,
|
||||||
None => {
|
None => {
|
||||||
|
let edition = db.crate_graph()[krate].edition;
|
||||||
error = Some(ExpandError::other(
|
error = Some(ExpandError::other(
|
||||||
span_map.span_at(call.syntax().text_range().start()),
|
span_map.span_at(call.syntax().text_range().start()),
|
||||||
format!("unresolved macro {}", path.display(db)),
|
format!("unresolved macro {}", path.display(db, edition)),
|
||||||
));
|
));
|
||||||
offset += call.syntax().text_range().len();
|
offset += call.syntax().text_range().len();
|
||||||
continue;
|
continue;
|
||||||
|
|
|
@ -461,3 +461,12 @@ impl<N: AstNode> InFile<N> {
|
||||||
Some(InRealFile::new(file_id, value))
|
Some(InRealFile::new(file_id, value))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<T> InFile<T> {
|
||||||
|
pub fn into_real_file(self) -> Result<InRealFile<T>, InFile<T>> {
|
||||||
|
match self.file_id.repr() {
|
||||||
|
HirFileIdRepr::FileId(file_id) => Ok(InRealFile { file_id, value: self.value }),
|
||||||
|
HirFileIdRepr::MacroFile(_) => Err(self),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -192,7 +192,7 @@ impl ExpandErrorKind {
|
||||||
("overflow expanding the original macro".to_owned(), true)
|
("overflow expanding the original macro".to_owned(), true)
|
||||||
}
|
}
|
||||||
ExpandErrorKind::Other(e) => ((**e).to_owned(), true),
|
ExpandErrorKind::Other(e) => ((**e).to_owned(), true),
|
||||||
ExpandErrorKind::ProcMacroPanic(e) => ((**e).to_owned(), true),
|
ExpandErrorKind::ProcMacroPanic(e) => (format!("proc-macro panicked: {e}"), true),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -279,6 +279,7 @@ pub enum MacroCallKind {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait HirFileIdExt {
|
pub trait HirFileIdExt {
|
||||||
|
fn edition(self, db: &dyn ExpandDatabase) -> Edition;
|
||||||
/// Returns the original file of this macro call hierarchy.
|
/// Returns the original file of this macro call hierarchy.
|
||||||
fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId;
|
fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId;
|
||||||
|
|
||||||
|
@ -293,6 +294,12 @@ pub trait HirFileIdExt {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HirFileIdExt for HirFileId {
|
impl HirFileIdExt for HirFileId {
|
||||||
|
fn edition(self, db: &dyn ExpandDatabase) -> Edition {
|
||||||
|
match self.repr() {
|
||||||
|
HirFileIdRepr::FileId(file_id) => file_id.edition(),
|
||||||
|
HirFileIdRepr::MacroFile(m) => m.macro_call_id.lookup(db).def.edition,
|
||||||
|
}
|
||||||
|
}
|
||||||
fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId {
|
fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId {
|
||||||
let mut file_id = self;
|
let mut file_id = self;
|
||||||
loop {
|
loop {
|
||||||
|
|
|
@ -14,7 +14,7 @@ use crate::{
|
||||||
use base_db::CrateId;
|
use base_db::CrateId;
|
||||||
use intern::sym;
|
use intern::sym;
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use span::SyntaxContextId;
|
use span::{Edition, SyntaxContextId};
|
||||||
use syntax::{ast, AstNode};
|
use syntax::{ast, AstNode};
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
@ -140,8 +140,12 @@ impl ModPath {
|
||||||
UnescapedModPath(self)
|
UnescapedModPath(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn display<'a>(&'a self, db: &'a dyn crate::db::ExpandDatabase) -> impl fmt::Display + 'a {
|
pub fn display<'a>(
|
||||||
Display { db, path: self }
|
&'a self,
|
||||||
|
db: &'a dyn crate::db::ExpandDatabase,
|
||||||
|
edition: Edition,
|
||||||
|
) -> impl fmt::Display + 'a {
|
||||||
|
Display { db, path: self, edition }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -154,11 +158,12 @@ impl Extend<Name> for ModPath {
|
||||||
struct Display<'a> {
|
struct Display<'a> {
|
||||||
db: &'a dyn ExpandDatabase,
|
db: &'a dyn ExpandDatabase,
|
||||||
path: &'a ModPath,
|
path: &'a ModPath,
|
||||||
|
edition: Edition,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for Display<'_> {
|
impl fmt::Display for Display<'_> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
display_fmt_path(self.db, self.path, f, true)
|
display_fmt_path(self.db, self.path, f, Escape::IfNeeded(self.edition))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -169,7 +174,7 @@ struct UnescapedDisplay<'a> {
|
||||||
|
|
||||||
impl fmt::Display for UnescapedDisplay<'_> {
|
impl fmt::Display for UnescapedDisplay<'_> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
display_fmt_path(self.db, self.path.0, f, false)
|
display_fmt_path(self.db, self.path.0, f, Escape::No)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -178,11 +183,17 @@ impl From<Name> for ModPath {
|
||||||
ModPath::from_segments(PathKind::Plain, iter::once(name))
|
ModPath::from_segments(PathKind::Plain, iter::once(name))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
enum Escape {
|
||||||
|
No,
|
||||||
|
IfNeeded(Edition),
|
||||||
|
}
|
||||||
|
|
||||||
fn display_fmt_path(
|
fn display_fmt_path(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
path: &ModPath,
|
path: &ModPath,
|
||||||
f: &mut fmt::Formatter<'_>,
|
f: &mut fmt::Formatter<'_>,
|
||||||
escaped: bool,
|
escaped: Escape,
|
||||||
) -> fmt::Result {
|
) -> fmt::Result {
|
||||||
let mut first_segment = true;
|
let mut first_segment = true;
|
||||||
let mut add_segment = |s| -> fmt::Result {
|
let mut add_segment = |s| -> fmt::Result {
|
||||||
|
@ -210,10 +221,9 @@ fn display_fmt_path(
|
||||||
f.write_str("::")?;
|
f.write_str("::")?;
|
||||||
}
|
}
|
||||||
first_segment = false;
|
first_segment = false;
|
||||||
if escaped {
|
match escaped {
|
||||||
segment.display(db).fmt(f)?;
|
Escape::IfNeeded(edition) => segment.display(db, edition).fmt(f)?,
|
||||||
} else {
|
Escape::No => segment.unescaped().display(db).fmt(f)?,
|
||||||
segment.unescaped().display(db).fmt(f)?;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -322,9 +332,11 @@ fn convert_path_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModP
|
||||||
tt::Leaf::Ident(tt::Ident { sym: text, .. }) if *text == sym::self_ => PathKind::SELF,
|
tt::Leaf::Ident(tt::Ident { sym: text, .. }) if *text == sym::self_ => PathKind::SELF,
|
||||||
tt::Leaf::Ident(tt::Ident { sym: text, .. }) if *text == sym::super_ => {
|
tt::Leaf::Ident(tt::Ident { sym: text, .. }) if *text == sym::super_ => {
|
||||||
let mut deg = 1;
|
let mut deg = 1;
|
||||||
while let Some(tt::Leaf::Ident(tt::Ident { sym: text, span, is_raw })) = leaves.next() {
|
while let Some(tt::Leaf::Ident(tt::Ident { sym: text, span, is_raw: _ })) =
|
||||||
|
leaves.next()
|
||||||
|
{
|
||||||
if *text != sym::super_ {
|
if *text != sym::super_ {
|
||||||
segments.push(Name::new_symbol_maybe_raw(text.clone(), *is_raw, span.ctx));
|
segments.push(Name::new_symbol(text.clone(), span.ctx));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
deg += 1;
|
deg += 1;
|
||||||
|
@ -333,19 +345,13 @@ fn convert_path_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModP
|
||||||
}
|
}
|
||||||
tt::Leaf::Ident(tt::Ident { sym: text, .. }) if *text == sym::crate_ => PathKind::Crate,
|
tt::Leaf::Ident(tt::Ident { sym: text, .. }) if *text == sym::crate_ => PathKind::Crate,
|
||||||
tt::Leaf::Ident(ident) => {
|
tt::Leaf::Ident(ident) => {
|
||||||
segments.push(Name::new_symbol_maybe_raw(
|
segments.push(Name::new_symbol(ident.sym.clone(), ident.span.ctx));
|
||||||
ident.sym.clone(),
|
|
||||||
ident.is_raw,
|
|
||||||
ident.span.ctx,
|
|
||||||
));
|
|
||||||
PathKind::Plain
|
PathKind::Plain
|
||||||
}
|
}
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
segments.extend(leaves.filter_map(|leaf| match leaf {
|
segments.extend(leaves.filter_map(|leaf| match leaf {
|
||||||
::tt::Leaf::Ident(ident) => {
|
::tt::Leaf::Ident(ident) => Some(Name::new_symbol(ident.sym.clone(), ident.span.ctx)),
|
||||||
Some(Name::new_symbol_maybe_raw(ident.sym.clone(), ident.is_raw, ident.span.ctx))
|
|
||||||
}
|
|
||||||
_ => None,
|
_ => None,
|
||||||
}));
|
}));
|
||||||
Some(ModPath { kind, segments })
|
Some(ModPath { kind, segments })
|
||||||
|
|
|
@ -3,22 +3,22 @@
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
use intern::{sym, Symbol};
|
use intern::{sym, Symbol};
|
||||||
use span::SyntaxContextId;
|
use span::{Edition, SyntaxContextId};
|
||||||
use syntax::{ast, utils::is_raw_identifier};
|
use syntax::ast;
|
||||||
|
use syntax::utils::is_raw_identifier;
|
||||||
|
|
||||||
/// `Name` is a wrapper around string, which is used in hir for both references
|
/// `Name` is a wrapper around string, which is used in hir for both references
|
||||||
/// and declarations. In theory, names should also carry hygiene info, but we are
|
/// and declarations. In theory, names should also carry hygiene info, but we are
|
||||||
/// not there yet!
|
/// not there yet!
|
||||||
///
|
///
|
||||||
/// Note that `Name` holds and prints escaped name i.e. prefixed with "r#" when it
|
/// Note that the rawness (`r#`) of names does not depend on whether they are written raw.
|
||||||
/// is a raw identifier. Use [`unescaped()`][Name::unescaped] when you need the
|
/// This is because we want to show (in completions etc.) names as raw depending on the needs
|
||||||
/// name without "r#".
|
/// of the current crate, for example if it is edition 2021 complete `gen` even if the defining
|
||||||
|
/// crate is in edition 2024 and wrote `r#gen`, and the opposite holds as well.
|
||||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||||
pub struct Name {
|
pub struct Name {
|
||||||
symbol: Symbol,
|
symbol: Symbol,
|
||||||
ctx: (),
|
ctx: (),
|
||||||
// FIXME: We should probably encode rawness as a property here instead, once we have hygiene
|
|
||||||
// in here we've got 4 bytes of padding to fill anyways
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for Name {
|
impl fmt::Debug for Name {
|
||||||
|
@ -42,6 +42,7 @@ impl PartialOrd for Name {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// No need to strip `r#`, all comparisons are done against well-known symbols.
|
||||||
impl PartialEq<Symbol> for Name {
|
impl PartialEq<Symbol> for Name {
|
||||||
fn eq(&self, sym: &Symbol) -> bool {
|
fn eq(&self, sym: &Symbol) -> bool {
|
||||||
self.symbol == *sym
|
self.symbol == *sym
|
||||||
|
@ -55,16 +56,16 @@ impl PartialEq<Name> for Symbol {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Wrapper of `Name` to print the name without "r#" even when it is a raw identifier.
|
/// Wrapper of `Name` to print the name without "r#" even when it is a raw identifier.
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
pub struct UnescapedName<'a>(&'a Name);
|
pub struct UnescapedName<'a>(&'a Name);
|
||||||
|
|
||||||
impl UnescapedName<'_> {
|
impl<'a> UnescapedName<'a> {
|
||||||
pub fn display(&self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + '_ {
|
pub fn display(self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + 'a {
|
||||||
_ = db;
|
_ = db;
|
||||||
UnescapedDisplay { name: self }
|
UnescapedDisplay { name: self }
|
||||||
}
|
}
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
pub fn display_no_db(&self) -> impl fmt::Display + '_ {
|
pub fn display_no_db(self) -> impl fmt::Display + 'a {
|
||||||
UnescapedDisplay { name: self }
|
UnescapedDisplay { name: self }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -77,16 +78,9 @@ impl Name {
|
||||||
Name { symbol: Symbol::intern(text), ctx: () }
|
Name { symbol: Symbol::intern(text), ctx: () }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(text: &str, raw: tt::IdentIsRaw, ctx: SyntaxContextId) -> Name {
|
pub fn new(text: &str, ctx: SyntaxContextId) -> Name {
|
||||||
_ = ctx;
|
_ = ctx;
|
||||||
Name {
|
Self::new_text(text)
|
||||||
symbol: if raw.yes() {
|
|
||||||
Symbol::intern(&format!("{}{text}", raw.as_str()))
|
|
||||||
} else {
|
|
||||||
Symbol::intern(text)
|
|
||||||
},
|
|
||||||
ctx: (),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_tuple_field(idx: usize) -> Name {
|
pub fn new_tuple_field(idx: usize) -> Name {
|
||||||
|
@ -97,23 +91,9 @@ impl Name {
|
||||||
Name { symbol: Symbol::intern(lt.text().as_str()), ctx: () }
|
Name { symbol: Symbol::intern(lt.text().as_str()), ctx: () }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Shortcut to create a name from a string literal.
|
|
||||||
fn new_ref(text: &str) -> Name {
|
|
||||||
Name { symbol: Symbol::intern(text), ctx: () }
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Resolve a name from the text of token.
|
/// Resolve a name from the text of token.
|
||||||
fn resolve(raw_text: &str) -> Name {
|
fn resolve(raw_text: &str) -> Name {
|
||||||
match raw_text.strip_prefix("r#") {
|
Name::new_text(raw_text.trim_start_matches("r#"))
|
||||||
// When `raw_text` starts with "r#" but the name does not coincide with any
|
|
||||||
// keyword, we never need the prefix so we strip it.
|
|
||||||
Some(text) if !is_raw_identifier(text) => Name::new_ref(text),
|
|
||||||
// Keywords (in the current edition) *can* be used as a name in earlier editions of
|
|
||||||
// Rust, e.g. "try" in Rust 2015. Even in such cases, we keep track of them in their
|
|
||||||
// escaped form.
|
|
||||||
None if is_raw_identifier(raw_text) => Name::new_text(&format!("r#{}", raw_text)),
|
|
||||||
_ => Name::new_text(raw_text),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A fake name for things missing in the source code.
|
/// A fake name for things missing in the source code.
|
||||||
|
@ -159,19 +139,23 @@ impl Name {
|
||||||
UnescapedName(self)
|
UnescapedName(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_escaped(&self) -> bool {
|
pub fn is_escaped(&self, edition: Edition) -> bool {
|
||||||
self.symbol.as_str().starts_with("r#")
|
is_raw_identifier(self.symbol.as_str(), edition)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn display<'a>(&'a self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + 'a {
|
pub fn display<'a>(
|
||||||
|
&'a self,
|
||||||
|
db: &dyn crate::db::ExpandDatabase,
|
||||||
|
edition: Edition,
|
||||||
|
) -> impl fmt::Display + 'a {
|
||||||
_ = db;
|
_ = db;
|
||||||
Display { name: self }
|
self.display_no_db(edition)
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: Remove this
|
// FIXME: Remove this
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
pub fn display_no_db(&self) -> impl fmt::Display + '_ {
|
pub fn display_no_db(&self, edition: Edition) -> impl fmt::Display + '_ {
|
||||||
Display { name: self }
|
Display { name: self, needs_escaping: is_raw_identifier(self.symbol.as_str(), edition) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn symbol(&self) -> &Symbol {
|
pub fn symbol(&self) -> &Symbol {
|
||||||
|
@ -183,39 +167,39 @@ impl Name {
|
||||||
Self { symbol, ctx: () }
|
Self { symbol, ctx: () }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_symbol_maybe_raw(sym: Symbol, raw: tt::IdentIsRaw, ctx: SyntaxContextId) -> Self {
|
|
||||||
if raw.no() {
|
|
||||||
Self { symbol: sym, ctx: () }
|
|
||||||
} else {
|
|
||||||
Name::new(sym.as_str(), raw, ctx)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// FIXME: This needs to go once we have hygiene
|
// FIXME: This needs to go once we have hygiene
|
||||||
pub const fn new_symbol_root(sym: Symbol) -> Self {
|
pub const fn new_symbol_root(sym: Symbol) -> Self {
|
||||||
Self { symbol: sym, ctx: () }
|
Self { symbol: sym, ctx: () }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn eq_ident(&self, ident: &str) -> bool {
|
||||||
|
self.as_str() == ident.trim_start_matches("r#")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Display<'a> {
|
struct Display<'a> {
|
||||||
name: &'a Name,
|
name: &'a Name,
|
||||||
|
needs_escaping: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for Display<'_> {
|
impl fmt::Display for Display<'_> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
if self.needs_escaping {
|
||||||
|
write!(f, "r#")?;
|
||||||
|
}
|
||||||
fmt::Display::fmt(self.name.symbol.as_str(), f)
|
fmt::Display::fmt(self.name.symbol.as_str(), f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct UnescapedDisplay<'a> {
|
struct UnescapedDisplay<'a> {
|
||||||
name: &'a UnescapedName<'a>,
|
name: UnescapedName<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for UnescapedDisplay<'_> {
|
impl fmt::Display for UnescapedDisplay<'_> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let symbol = &self.name.0.symbol.as_str();
|
let symbol = self.name.0.symbol.as_str();
|
||||||
let text = symbol.strip_prefix("r#").unwrap_or(symbol);
|
fmt::Display::fmt(symbol, f)
|
||||||
fmt::Display::fmt(&text, f)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,6 @@ chalk-ir.workspace = true
|
||||||
chalk-recursive.workspace = true
|
chalk-recursive.workspace = true
|
||||||
chalk-derive.workspace = true
|
chalk-derive.workspace = true
|
||||||
la-arena.workspace = true
|
la-arena.workspace = true
|
||||||
once_cell = "1.17.0"
|
|
||||||
triomphe.workspace = true
|
triomphe.workspace = true
|
||||||
nohash-hasher.workspace = true
|
nohash-hasher.workspace = true
|
||||||
typed-arena = "2.0.1"
|
typed-arena = "2.0.1"
|
||||||
|
|
|
@ -3,6 +3,8 @@
|
||||||
//! reference to a type with the field `bar`. This is an approximation of the
|
//! reference to a type with the field `bar`. This is an approximation of the
|
||||||
//! logic in rustc (which lives in rustc_hir_analysis/check/autoderef.rs).
|
//! logic in rustc (which lives in rustc_hir_analysis/check/autoderef.rs).
|
||||||
|
|
||||||
|
use std::mem;
|
||||||
|
|
||||||
use chalk_ir::cast::Cast;
|
use chalk_ir::cast::Cast;
|
||||||
use hir_def::lang_item::LangItem;
|
use hir_def::lang_item::LangItem;
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
|
@ -37,7 +39,7 @@ pub fn autoderef(
|
||||||
) -> impl Iterator<Item = Ty> {
|
) -> impl Iterator<Item = Ty> {
|
||||||
let mut table = InferenceTable::new(db, env);
|
let mut table = InferenceTable::new(db, env);
|
||||||
let ty = table.instantiate_canonical(ty);
|
let ty = table.instantiate_canonical(ty);
|
||||||
let mut autoderef = Autoderef::new(&mut table, ty, false);
|
let mut autoderef = Autoderef::new_no_tracking(&mut table, ty, false);
|
||||||
let mut v = Vec::new();
|
let mut v = Vec::new();
|
||||||
while let Some((ty, _steps)) = autoderef.next() {
|
while let Some((ty, _steps)) = autoderef.next() {
|
||||||
// `ty` may contain unresolved inference variables. Since there's no chance they would be
|
// `ty` may contain unresolved inference variables. Since there's no chance they would be
|
||||||
|
@ -58,41 +60,76 @@ pub fn autoderef(
|
||||||
v.into_iter()
|
v.into_iter()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
trait TrackAutoderefSteps {
|
||||||
|
fn len(&self) -> usize;
|
||||||
|
fn push(&mut self, kind: AutoderefKind, ty: &Ty);
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TrackAutoderefSteps for usize {
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
*self
|
||||||
|
}
|
||||||
|
fn push(&mut self, _: AutoderefKind, _: &Ty) {
|
||||||
|
*self += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl TrackAutoderefSteps for Vec<(AutoderefKind, Ty)> {
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
self.len()
|
||||||
|
}
|
||||||
|
fn push(&mut self, kind: AutoderefKind, ty: &Ty) {
|
||||||
|
self.push((kind, ty.clone()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) struct Autoderef<'a, 'db> {
|
pub(crate) struct Autoderef<'table, 'db, T = Vec<(AutoderefKind, Ty)>> {
|
||||||
pub(crate) table: &'a mut InferenceTable<'db>,
|
pub(crate) table: &'table mut InferenceTable<'db>,
|
||||||
ty: Ty,
|
ty: Ty,
|
||||||
at_start: bool,
|
at_start: bool,
|
||||||
steps: Vec<(AutoderefKind, Ty)>,
|
steps: T,
|
||||||
explicit: bool,
|
explicit: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'db> Autoderef<'a, 'db> {
|
impl<'table, 'db> Autoderef<'table, 'db> {
|
||||||
pub(crate) fn new(table: &'a mut InferenceTable<'db>, ty: Ty, explicit: bool) -> Self {
|
pub(crate) fn new(table: &'table mut InferenceTable<'db>, ty: Ty, explicit: bool) -> Self {
|
||||||
let ty = table.resolve_ty_shallow(&ty);
|
let ty = table.resolve_ty_shallow(&ty);
|
||||||
Autoderef { table, ty, at_start: true, steps: Vec::new(), explicit }
|
Autoderef { table, ty, at_start: true, steps: Vec::new(), explicit }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn step_count(&self) -> usize {
|
|
||||||
self.steps.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn steps(&self) -> &[(AutoderefKind, Ty)] {
|
pub(crate) fn steps(&self) -> &[(AutoderefKind, Ty)] {
|
||||||
&self.steps
|
&self.steps
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'table, 'db> Autoderef<'table, 'db, usize> {
|
||||||
|
pub(crate) fn new_no_tracking(
|
||||||
|
table: &'table mut InferenceTable<'db>,
|
||||||
|
ty: Ty,
|
||||||
|
explicit: bool,
|
||||||
|
) -> Self {
|
||||||
|
let ty = table.resolve_ty_shallow(&ty);
|
||||||
|
Autoderef { table, ty, at_start: true, steps: 0, explicit }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(private_bounds)]
|
||||||
|
impl<'table, 'db, T: TrackAutoderefSteps> Autoderef<'table, 'db, T> {
|
||||||
|
pub(crate) fn step_count(&self) -> usize {
|
||||||
|
self.steps.len()
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn final_ty(&self) -> Ty {
|
pub(crate) fn final_ty(&self) -> Ty {
|
||||||
self.ty.clone()
|
self.ty.clone()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Iterator for Autoderef<'_, '_> {
|
impl<T: TrackAutoderefSteps> Iterator for Autoderef<'_, '_, T> {
|
||||||
type Item = (Ty, usize);
|
type Item = (Ty, usize);
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
if self.at_start {
|
if mem::take(&mut self.at_start) {
|
||||||
self.at_start = false;
|
|
||||||
return Some((self.ty.clone(), 0));
|
return Some((self.ty.clone(), 0));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -102,7 +139,7 @@ impl Iterator for Autoderef<'_, '_> {
|
||||||
|
|
||||||
let (kind, new_ty) = autoderef_step(self.table, self.ty.clone(), self.explicit)?;
|
let (kind, new_ty) = autoderef_step(self.table, self.ty.clone(), self.explicit)?;
|
||||||
|
|
||||||
self.steps.push((kind, self.ty.clone()));
|
self.steps.push(kind, &self.ty);
|
||||||
self.ty = new_ty;
|
self.ty = new_ty;
|
||||||
|
|
||||||
Some((self.ty.clone(), self.step_count()))
|
Some((self.ty.clone(), self.step_count()))
|
||||||
|
@ -129,12 +166,8 @@ pub(crate) fn builtin_deref<'ty>(
|
||||||
match ty.kind(Interner) {
|
match ty.kind(Interner) {
|
||||||
TyKind::Ref(.., ty) => Some(ty),
|
TyKind::Ref(.., ty) => Some(ty),
|
||||||
TyKind::Raw(.., ty) if explicit => Some(ty),
|
TyKind::Raw(.., ty) if explicit => Some(ty),
|
||||||
&TyKind::Adt(chalk_ir::AdtId(adt), ref substs) => {
|
&TyKind::Adt(chalk_ir::AdtId(adt), ref substs) if crate::lang_items::is_box(db, adt) => {
|
||||||
if crate::lang_items::is_box(db, adt) {
|
substs.at(Interner, 0).ty(Interner)
|
||||||
substs.at(Interner, 0).ty(Interner)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,7 @@ use std::{iter, ops::ControlFlow, sync::Arc};
|
||||||
|
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
use intern::sym;
|
use intern::sym;
|
||||||
|
use span::Edition;
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
|
|
||||||
use chalk_ir::{cast::Caster, fold::shift::Shift, CanonicalVarKinds};
|
use chalk_ir::{cast::Caster, fold::shift::Shift, CanonicalVarKinds};
|
||||||
|
@ -424,18 +425,19 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
|
||||||
|
|
||||||
fn trait_name(&self, trait_id: chalk_ir::TraitId<Interner>) -> String {
|
fn trait_name(&self, trait_id: chalk_ir::TraitId<Interner>) -> String {
|
||||||
let id = from_chalk_trait_id(trait_id);
|
let id = from_chalk_trait_id(trait_id);
|
||||||
self.db.trait_data(id).name.display(self.db.upcast()).to_string()
|
self.db.trait_data(id).name.display(self.db.upcast(), self.edition()).to_string()
|
||||||
}
|
}
|
||||||
fn adt_name(&self, chalk_ir::AdtId(adt_id): AdtId) -> String {
|
fn adt_name(&self, chalk_ir::AdtId(adt_id): AdtId) -> String {
|
||||||
|
let edition = self.edition();
|
||||||
match adt_id {
|
match adt_id {
|
||||||
hir_def::AdtId::StructId(id) => {
|
hir_def::AdtId::StructId(id) => {
|
||||||
self.db.struct_data(id).name.display(self.db.upcast()).to_string()
|
self.db.struct_data(id).name.display(self.db.upcast(), edition).to_string()
|
||||||
}
|
}
|
||||||
hir_def::AdtId::EnumId(id) => {
|
hir_def::AdtId::EnumId(id) => {
|
||||||
self.db.enum_data(id).name.display(self.db.upcast()).to_string()
|
self.db.enum_data(id).name.display(self.db.upcast(), edition).to_string()
|
||||||
}
|
}
|
||||||
hir_def::AdtId::UnionId(id) => {
|
hir_def::AdtId::UnionId(id) => {
|
||||||
self.db.union_data(id).name.display(self.db.upcast()).to_string()
|
self.db.union_data(id).name.display(self.db.upcast(), edition).to_string()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -445,7 +447,7 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
|
||||||
}
|
}
|
||||||
fn assoc_type_name(&self, assoc_ty_id: chalk_ir::AssocTypeId<Interner>) -> String {
|
fn assoc_type_name(&self, assoc_ty_id: chalk_ir::AssocTypeId<Interner>) -> String {
|
||||||
let id = self.db.associated_ty_data(assoc_ty_id).name;
|
let id = self.db.associated_ty_data(assoc_ty_id).name;
|
||||||
self.db.type_alias_data(id).name.display(self.db.upcast()).to_string()
|
self.db.type_alias_data(id).name.display(self.db.upcast(), self.edition()).to_string()
|
||||||
}
|
}
|
||||||
fn opaque_type_name(&self, opaque_ty_id: chalk_ir::OpaqueTyId<Interner>) -> String {
|
fn opaque_type_name(&self, opaque_ty_id: chalk_ir::OpaqueTyId<Interner>) -> String {
|
||||||
format!("Opaque_{}", opaque_ty_id.0)
|
format!("Opaque_{}", opaque_ty_id.0)
|
||||||
|
@ -519,6 +521,10 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> ChalkContext<'a> {
|
impl<'a> ChalkContext<'a> {
|
||||||
|
fn edition(&self) -> Edition {
|
||||||
|
self.db.crate_graph()[self.krate].edition
|
||||||
|
}
|
||||||
|
|
||||||
fn for_trait_impls(
|
fn for_trait_impls(
|
||||||
&self,
|
&self,
|
||||||
trait_id: hir_def::TraitId,
|
trait_id: hir_def::TraitId,
|
||||||
|
@ -843,7 +849,7 @@ fn impl_def_datum(
|
||||||
"impl {:?}: {}{} where {:?}",
|
"impl {:?}: {}{} where {:?}",
|
||||||
chalk_id,
|
chalk_id,
|
||||||
if negative { "!" } else { "" },
|
if negative { "!" } else { "" },
|
||||||
trait_ref.display(db),
|
trait_ref.display(db, db.crate_graph()[krate].edition),
|
||||||
where_clauses
|
where_clauses
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
use base_db::SourceDatabase;
|
||||||
use chalk_ir::Substitution;
|
use chalk_ir::Substitution;
|
||||||
use hir_def::db::DefDatabase;
|
use hir_def::db::DefDatabase;
|
||||||
use rustc_apfloat::{
|
use rustc_apfloat::{
|
||||||
|
@ -94,9 +95,10 @@ fn check_answer(ra_fixture: &str, check: impl FnOnce(&[u8], &MemoryMap)) {
|
||||||
fn pretty_print_err(e: ConstEvalError, db: TestDB) -> String {
|
fn pretty_print_err(e: ConstEvalError, db: TestDB) -> String {
|
||||||
let mut err = String::new();
|
let mut err = String::new();
|
||||||
let span_formatter = |file, range| format!("{file:?} {range:?}");
|
let span_formatter = |file, range| format!("{file:?} {range:?}");
|
||||||
|
let edition = db.crate_graph()[db.test_crate()].edition;
|
||||||
match e {
|
match e {
|
||||||
ConstEvalError::MirLowerError(e) => e.pretty_print(&mut err, &db, span_formatter),
|
ConstEvalError::MirLowerError(e) => e.pretty_print(&mut err, &db, span_formatter, edition),
|
||||||
ConstEvalError::MirEvalError(e) => e.pretty_print(&mut err, &db, span_formatter),
|
ConstEvalError::MirEvalError(e) => e.pretty_print(&mut err, &db, span_formatter, edition),
|
||||||
}
|
}
|
||||||
.unwrap();
|
.unwrap();
|
||||||
err
|
err
|
||||||
|
@ -110,7 +112,9 @@ fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result<Const, ConstEvalEr
|
||||||
.declarations()
|
.declarations()
|
||||||
.find_map(|x| match x {
|
.find_map(|x| match x {
|
||||||
hir_def::ModuleDefId::ConstId(x) => {
|
hir_def::ModuleDefId::ConstId(x) => {
|
||||||
if db.const_data(x).name.as_ref()?.display(db).to_string() == "GOAL" {
|
if db.const_data(x).name.as_ref()?.display(db, file_id.edition()).to_string()
|
||||||
|
== "GOAL"
|
||||||
|
{
|
||||||
Some(x)
|
Some(x)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -243,6 +247,17 @@ fn casts() {
|
||||||
check_number(r#"const GOAL: i32 = -12i8 as i32"#, -12);
|
check_number(r#"const GOAL: i32 = -12i8 as i32"#, -12);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn floating_point_casts() {
|
||||||
|
check_number(r#"const GOAL: usize = 12i32 as f32 as usize"#, 12);
|
||||||
|
check_number(r#"const GOAL: i8 = -12i32 as f64 as i8"#, -12);
|
||||||
|
check_number(r#"const GOAL: i32 = (-1ui8 as f32 + 2u64 as f32) as i32"#, 1);
|
||||||
|
check_number(r#"const GOAL: i8 = (0./0.) as i8"#, 0);
|
||||||
|
check_number(r#"const GOAL: i8 = (1./0.) as i8"#, 127);
|
||||||
|
check_number(r#"const GOAL: i8 = (-1./0.) as i8"#, -128);
|
||||||
|
check_number(r#"const GOAL: i64 = 1e18f64 as f32 as i64"#, 999999984306749440);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn raw_pointer_equality() {
|
fn raw_pointer_equality() {
|
||||||
check_number(
|
check_number(
|
||||||
|
|
|
@ -17,17 +17,18 @@ use std::fmt;
|
||||||
|
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
data::adt::VariantData, db::DefDatabase, hir::Pat, src::HasSource, AdtId, AttrDefId, ConstId,
|
data::adt::VariantData, db::DefDatabase, hir::Pat, src::HasSource, AdtId, AttrDefId, ConstId,
|
||||||
EnumId, EnumVariantId, FunctionId, ItemContainerId, Lookup, ModuleDefId, ModuleId, StaticId,
|
EnumId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, ModuleDefId, ModuleId,
|
||||||
StructId, TraitId, TypeAliasId,
|
StaticId, StructId, TraitId, TypeAliasId,
|
||||||
};
|
};
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
name::{AsName, Name},
|
name::{AsName, Name},
|
||||||
HirFileId, MacroFileIdExt,
|
HirFileId, HirFileIdExt, MacroFileIdExt,
|
||||||
};
|
};
|
||||||
use intern::sym;
|
use intern::sym;
|
||||||
use stdx::{always, never};
|
use stdx::{always, never};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasName},
|
ast::{self, HasName},
|
||||||
|
utils::is_raw_identifier,
|
||||||
AstNode, AstPtr, ToSmolStr,
|
AstNode, AstPtr, ToSmolStr,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -318,17 +319,21 @@ impl<'a> DeclValidator<'a> {
|
||||||
/// This includes function parameters except for trait implementation associated functions.
|
/// This includes function parameters except for trait implementation associated functions.
|
||||||
fn validate_func_body(&mut self, func: FunctionId) {
|
fn validate_func_body(&mut self, func: FunctionId) {
|
||||||
let body = self.db.body(func.into());
|
let body = self.db.body(func.into());
|
||||||
|
let edition = self.edition(func);
|
||||||
let mut pats_replacements = body
|
let mut pats_replacements = body
|
||||||
.pats
|
.pats
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|(pat_id, pat)| match pat {
|
.filter_map(|(pat_id, pat)| match pat {
|
||||||
Pat::Bind { id, .. } => {
|
Pat::Bind { id, .. } => {
|
||||||
let bind_name = &body.bindings[*id].name;
|
let bind_name = &body.bindings[*id].name;
|
||||||
|
let mut suggested_text =
|
||||||
|
to_lower_snake_case(&bind_name.unescaped().display_no_db().to_smolstr())?;
|
||||||
|
if is_raw_identifier(&suggested_text, edition) {
|
||||||
|
suggested_text.insert_str(0, "r#");
|
||||||
|
}
|
||||||
let replacement = Replacement {
|
let replacement = Replacement {
|
||||||
current_name: bind_name.clone(),
|
current_name: bind_name.clone(),
|
||||||
suggested_text: to_lower_snake_case(
|
suggested_text,
|
||||||
&bind_name.display_no_db().to_smolstr(),
|
|
||||||
)?,
|
|
||||||
expected_case: CaseType::LowerSnakeCase,
|
expected_case: CaseType::LowerSnakeCase,
|
||||||
};
|
};
|
||||||
Some((pat_id, replacement))
|
Some((pat_id, replacement))
|
||||||
|
@ -377,6 +382,11 @@ impl<'a> DeclValidator<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn edition(&self, id: impl HasModule) -> span::Edition {
|
||||||
|
let krate = id.krate(self.db.upcast());
|
||||||
|
self.db.crate_graph()[krate].edition
|
||||||
|
}
|
||||||
|
|
||||||
fn validate_struct(&mut self, struct_id: StructId) {
|
fn validate_struct(&mut self, struct_id: StructId) {
|
||||||
// Check the structure name.
|
// Check the structure name.
|
||||||
let non_camel_case_allowed =
|
let non_camel_case_allowed =
|
||||||
|
@ -405,16 +415,17 @@ impl<'a> DeclValidator<'a> {
|
||||||
let VariantData::Record(fields) = data.variant_data.as_ref() else {
|
let VariantData::Record(fields) = data.variant_data.as_ref() else {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
let edition = self.edition(struct_id);
|
||||||
let mut struct_fields_replacements = fields
|
let mut struct_fields_replacements = fields
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|(_, field)| {
|
.filter_map(|(_, field)| {
|
||||||
to_lower_snake_case(&field.name.display_no_db().to_smolstr()).map(|new_name| {
|
to_lower_snake_case(&field.name.display_no_db(edition).to_smolstr()).map(
|
||||||
Replacement {
|
|new_name| Replacement {
|
||||||
current_name: field.name.clone(),
|
current_name: field.name.clone(),
|
||||||
suggested_text: new_name,
|
suggested_text: new_name,
|
||||||
expected_case: CaseType::LowerSnakeCase,
|
expected_case: CaseType::LowerSnakeCase,
|
||||||
}
|
},
|
||||||
})
|
)
|
||||||
})
|
})
|
||||||
.peekable();
|
.peekable();
|
||||||
|
|
||||||
|
@ -498,14 +509,17 @@ impl<'a> DeclValidator<'a> {
|
||||||
self.validate_enum_variant_fields(*variant_id);
|
self.validate_enum_variant_fields(*variant_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let edition = self.edition(enum_id);
|
||||||
let mut enum_variants_replacements = data
|
let mut enum_variants_replacements = data
|
||||||
.variants
|
.variants
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|(_, name)| {
|
.filter_map(|(_, name)| {
|
||||||
to_camel_case(&name.display_no_db().to_smolstr()).map(|new_name| Replacement {
|
to_camel_case(&name.display_no_db(edition).to_smolstr()).map(|new_name| {
|
||||||
current_name: name.clone(),
|
Replacement {
|
||||||
suggested_text: new_name,
|
current_name: name.clone(),
|
||||||
expected_case: CaseType::UpperCamelCase,
|
suggested_text: new_name,
|
||||||
|
expected_case: CaseType::UpperCamelCase,
|
||||||
|
}
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.peekable();
|
.peekable();
|
||||||
|
@ -566,16 +580,17 @@ impl<'a> DeclValidator<'a> {
|
||||||
let VariantData::Record(fields) = variant_data.variant_data.as_ref() else {
|
let VariantData::Record(fields) = variant_data.variant_data.as_ref() else {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
let edition = self.edition(variant_id);
|
||||||
let mut variant_field_replacements = fields
|
let mut variant_field_replacements = fields
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|(_, field)| {
|
.filter_map(|(_, field)| {
|
||||||
to_lower_snake_case(&field.name.display_no_db().to_smolstr()).map(|new_name| {
|
to_lower_snake_case(&field.name.display_no_db(edition).to_smolstr()).map(
|
||||||
Replacement {
|
|new_name| Replacement {
|
||||||
current_name: field.name.clone(),
|
current_name: field.name.clone(),
|
||||||
suggested_text: new_name,
|
suggested_text: new_name,
|
||||||
expected_case: CaseType::LowerSnakeCase,
|
expected_case: CaseType::LowerSnakeCase,
|
||||||
}
|
},
|
||||||
})
|
)
|
||||||
})
|
})
|
||||||
.peekable();
|
.peekable();
|
||||||
|
|
||||||
|
@ -704,18 +719,22 @@ impl<'a> DeclValidator<'a> {
|
||||||
) where
|
) where
|
||||||
N: AstNode + HasName + fmt::Debug,
|
N: AstNode + HasName + fmt::Debug,
|
||||||
S: HasSource<Value = N>,
|
S: HasSource<Value = N>,
|
||||||
L: Lookup<Data = S, Database<'a> = dyn DefDatabase + 'a>,
|
L: Lookup<Data = S, Database<'a> = dyn DefDatabase + 'a> + HasModule + Copy,
|
||||||
{
|
{
|
||||||
let to_expected_case_type = match expected_case {
|
let to_expected_case_type = match expected_case {
|
||||||
CaseType::LowerSnakeCase => to_lower_snake_case,
|
CaseType::LowerSnakeCase => to_lower_snake_case,
|
||||||
CaseType::UpperSnakeCase => to_upper_snake_case,
|
CaseType::UpperSnakeCase => to_upper_snake_case,
|
||||||
CaseType::UpperCamelCase => to_camel_case,
|
CaseType::UpperCamelCase => to_camel_case,
|
||||||
};
|
};
|
||||||
let Some(replacement) =
|
let edition = self.edition(item_id);
|
||||||
to_expected_case_type(&name.display(self.db.upcast()).to_smolstr()).map(|new_name| {
|
let Some(replacement) = to_expected_case_type(
|
||||||
Replacement { current_name: name.clone(), suggested_text: new_name, expected_case }
|
&name.display(self.db.upcast(), edition).to_smolstr(),
|
||||||
})
|
)
|
||||||
else {
|
.map(|new_name| Replacement {
|
||||||
|
current_name: name.clone(),
|
||||||
|
suggested_text: new_name,
|
||||||
|
expected_case,
|
||||||
|
}) else {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -748,12 +767,13 @@ impl<'a> DeclValidator<'a> {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let edition = file_id.original_file(self.db.upcast()).edition();
|
||||||
let diagnostic = IncorrectCase {
|
let diagnostic = IncorrectCase {
|
||||||
file: file_id,
|
file: file_id,
|
||||||
ident_type,
|
ident_type,
|
||||||
ident: AstPtr::new(&name_ast),
|
ident: AstPtr::new(&name_ast),
|
||||||
expected_case: replacement.expected_case,
|
expected_case: replacement.expected_case,
|
||||||
ident_text: replacement.current_name.display(self.db.upcast()).to_string(),
|
ident_text: replacement.current_name.display(self.db.upcast(), edition).to_string(),
|
||||||
suggested_text: replacement.suggested_text,
|
suggested_text: replacement.suggested_text,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
|
use base_db::CrateId;
|
||||||
use chalk_solve::rust_ir::AdtKind;
|
use chalk_solve::rust_ir::AdtKind;
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
|
@ -15,6 +16,7 @@ use intern::sym;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use rustc_pattern_analysis::constructor::Constructor;
|
use rustc_pattern_analysis::constructor::Constructor;
|
||||||
|
use span::Edition;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, UnaryOp},
|
ast::{self, UnaryOp},
|
||||||
AstNode,
|
AstNode,
|
||||||
|
@ -258,7 +260,13 @@ impl ExprValidator {
|
||||||
if !witnesses.is_empty() {
|
if !witnesses.is_empty() {
|
||||||
self.diagnostics.push(BodyValidationDiagnostic::MissingMatchArms {
|
self.diagnostics.push(BodyValidationDiagnostic::MissingMatchArms {
|
||||||
match_expr,
|
match_expr,
|
||||||
uncovered_patterns: missing_match_arms(&cx, scrut_ty, witnesses, m_arms.is_empty()),
|
uncovered_patterns: missing_match_arms(
|
||||||
|
&cx,
|
||||||
|
scrut_ty,
|
||||||
|
witnesses,
|
||||||
|
m_arms.is_empty(),
|
||||||
|
self.owner.krate(db.upcast()),
|
||||||
|
),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -345,7 +353,13 @@ impl ExprValidator {
|
||||||
if !witnesses.is_empty() {
|
if !witnesses.is_empty() {
|
||||||
self.diagnostics.push(BodyValidationDiagnostic::NonExhaustiveLet {
|
self.diagnostics.push(BodyValidationDiagnostic::NonExhaustiveLet {
|
||||||
pat,
|
pat,
|
||||||
uncovered_patterns: missing_match_arms(&cx, ty, witnesses, false),
|
uncovered_patterns: missing_match_arms(
|
||||||
|
&cx,
|
||||||
|
ty,
|
||||||
|
witnesses,
|
||||||
|
false,
|
||||||
|
self.owner.krate(db.upcast()),
|
||||||
|
),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -616,24 +630,26 @@ fn missing_match_arms<'p>(
|
||||||
scrut_ty: &Ty,
|
scrut_ty: &Ty,
|
||||||
witnesses: Vec<WitnessPat<'p>>,
|
witnesses: Vec<WitnessPat<'p>>,
|
||||||
arms_is_empty: bool,
|
arms_is_empty: bool,
|
||||||
|
krate: CrateId,
|
||||||
) -> String {
|
) -> String {
|
||||||
struct DisplayWitness<'a, 'p>(&'a WitnessPat<'p>, &'a MatchCheckCtx<'p>);
|
struct DisplayWitness<'a, 'p>(&'a WitnessPat<'p>, &'a MatchCheckCtx<'p>, Edition);
|
||||||
impl fmt::Display for DisplayWitness<'_, '_> {
|
impl fmt::Display for DisplayWitness<'_, '_> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let DisplayWitness(witness, cx) = *self;
|
let DisplayWitness(witness, cx, edition) = *self;
|
||||||
let pat = cx.hoist_witness_pat(witness);
|
let pat = cx.hoist_witness_pat(witness);
|
||||||
write!(f, "{}", pat.display(cx.db))
|
write!(f, "{}", pat.display(cx.db, edition))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let edition = cx.db.crate_graph()[krate].edition;
|
||||||
let non_empty_enum = match scrut_ty.as_adt() {
|
let non_empty_enum = match scrut_ty.as_adt() {
|
||||||
Some((AdtId::EnumId(e), _)) => !cx.db.enum_data(e).variants.is_empty(),
|
Some((AdtId::EnumId(e), _)) => !cx.db.enum_data(e).variants.is_empty(),
|
||||||
_ => false,
|
_ => false,
|
||||||
};
|
};
|
||||||
if arms_is_empty && !non_empty_enum {
|
if arms_is_empty && !non_empty_enum {
|
||||||
format!("type `{}` is non-empty", scrut_ty.display(cx.db))
|
format!("type `{}` is non-empty", scrut_ty.display(cx.db, edition))
|
||||||
} else {
|
} else {
|
||||||
let pat_display = |witness| DisplayWitness(witness, cx);
|
let pat_display = |witness| DisplayWitness(witness, cx, edition);
|
||||||
const LIMIT: usize = 3;
|
const LIMIT: usize = 3;
|
||||||
match &*witnesses {
|
match &*witnesses {
|
||||||
[witness] => format!("`{}` not covered", pat_display(witness)),
|
[witness] => format!("`{}` not covered", pat_display(witness)),
|
||||||
|
|
|
@ -14,6 +14,7 @@ use hir_def::{
|
||||||
body::Body, data::adt::VariantData, hir::PatId, AdtId, EnumVariantId, LocalFieldId, VariantId,
|
body::Body, data::adt::VariantData, hir::PatId, AdtId, EnumVariantId, LocalFieldId, VariantId,
|
||||||
};
|
};
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
|
use span::Edition;
|
||||||
use stdx::{always, never};
|
use stdx::{always, never};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -151,7 +152,11 @@ impl<'a> PatCtxt<'a> {
|
||||||
match (bm, ty.kind(Interner)) {
|
match (bm, ty.kind(Interner)) {
|
||||||
(BindingMode::Ref(_), TyKind::Ref(.., rty)) => ty = rty,
|
(BindingMode::Ref(_), TyKind::Ref(.., rty)) => ty = rty,
|
||||||
(BindingMode::Ref(_), _) => {
|
(BindingMode::Ref(_), _) => {
|
||||||
never!("`ref {}` has wrong type {:?}", name.display(self.db.upcast()), ty);
|
never!(
|
||||||
|
"`ref {}` has wrong type {:?}",
|
||||||
|
name.display(self.db.upcast(), Edition::LATEST),
|
||||||
|
ty
|
||||||
|
);
|
||||||
self.errors.push(PatternError::UnexpectedType);
|
self.errors.push(PatternError::UnexpectedType);
|
||||||
return Pat { ty: ty.clone(), kind: PatKind::Wild.into() };
|
return Pat { ty: ty.clone(), kind: PatKind::Wild.into() };
|
||||||
}
|
}
|
||||||
|
@ -297,7 +302,7 @@ impl HirDisplay for Pat {
|
||||||
PatKind::Wild => write!(f, "_"),
|
PatKind::Wild => write!(f, "_"),
|
||||||
PatKind::Never => write!(f, "!"),
|
PatKind::Never => write!(f, "!"),
|
||||||
PatKind::Binding { name, subpattern } => {
|
PatKind::Binding { name, subpattern } => {
|
||||||
write!(f, "{}", name.display(f.db.upcast()))?;
|
write!(f, "{}", name.display(f.db.upcast(), f.edition()))?;
|
||||||
if let Some(subpattern) = subpattern {
|
if let Some(subpattern) = subpattern {
|
||||||
write!(f, " @ ")?;
|
write!(f, " @ ")?;
|
||||||
subpattern.hir_fmt(f)?;
|
subpattern.hir_fmt(f)?;
|
||||||
|
@ -317,14 +322,22 @@ impl HirDisplay for Pat {
|
||||||
if let Some(variant) = variant {
|
if let Some(variant) = variant {
|
||||||
match variant {
|
match variant {
|
||||||
VariantId::EnumVariantId(v) => {
|
VariantId::EnumVariantId(v) => {
|
||||||
write!(f, "{}", f.db.enum_variant_data(v).name.display(f.db.upcast()))?;
|
write!(
|
||||||
}
|
f,
|
||||||
VariantId::StructId(s) => {
|
"{}",
|
||||||
write!(f, "{}", f.db.struct_data(s).name.display(f.db.upcast()))?
|
f.db.enum_variant_data(v).name.display(f.db.upcast(), f.edition())
|
||||||
}
|
)?;
|
||||||
VariantId::UnionId(u) => {
|
|
||||||
write!(f, "{}", f.db.union_data(u).name.display(f.db.upcast()))?
|
|
||||||
}
|
}
|
||||||
|
VariantId::StructId(s) => write!(
|
||||||
|
f,
|
||||||
|
"{}",
|
||||||
|
f.db.struct_data(s).name.display(f.db.upcast(), f.edition())
|
||||||
|
)?,
|
||||||
|
VariantId::UnionId(u) => write!(
|
||||||
|
f,
|
||||||
|
"{}",
|
||||||
|
f.db.union_data(u).name.display(f.db.upcast(), f.edition())
|
||||||
|
)?,
|
||||||
};
|
};
|
||||||
|
|
||||||
let variant_data = variant.variant_data(f.db.upcast());
|
let variant_data = variant.variant_data(f.db.upcast());
|
||||||
|
@ -341,7 +354,9 @@ impl HirDisplay for Pat {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"{}: ",
|
"{}: ",
|
||||||
rec_fields[p.field].name.display(f.db.upcast())
|
rec_fields[p.field]
|
||||||
|
.name
|
||||||
|
.display(f.db.upcast(), f.edition())
|
||||||
)?;
|
)?;
|
||||||
p.pattern.hir_fmt(f)
|
p.pattern.hir_fmt(f)
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
//! Interface with `rustc_pattern_analysis`.
|
//! Interface with `rustc_pattern_analysis`.
|
||||||
|
|
||||||
|
use std::cell::LazyCell;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
use hir_def::{DefWithBodyId, EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
|
use hir_def::{DefWithBodyId, EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
|
||||||
use intern::sym;
|
use intern::sym;
|
||||||
use once_cell::unsync::Lazy;
|
|
||||||
use rustc_pattern_analysis::{
|
use rustc_pattern_analysis::{
|
||||||
constructor::{Constructor, ConstructorSet, VariantVisibility},
|
constructor::{Constructor, ConstructorSet, VariantVisibility},
|
||||||
usefulness::{compute_match_usefulness, PlaceValidity, UsefulnessReport},
|
usefulness::{compute_match_usefulness, PlaceValidity, UsefulnessReport},
|
||||||
|
@ -384,8 +384,9 @@ impl<'db> PatCx for MatchCheckCtx<'db> {
|
||||||
let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
|
let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
|
||||||
|
|
||||||
// Whether we must not match the fields of this variant exhaustively.
|
// Whether we must not match the fields of this variant exhaustively.
|
||||||
let is_non_exhaustive = Lazy::new(|| self.is_foreign_non_exhaustive(adt));
|
let is_non_exhaustive =
|
||||||
let visibilities = Lazy::new(|| self.db.field_visibilities(variant));
|
LazyCell::new(|| self.is_foreign_non_exhaustive(adt));
|
||||||
|
let visibilities = LazyCell::new(|| self.db.field_visibilities(variant));
|
||||||
|
|
||||||
self.list_variant_fields(ty, variant)
|
self.list_variant_fields(ty, variant)
|
||||||
.map(move |(fid, ty)| {
|
.map(move |(fid, ty)| {
|
||||||
|
|
|
@ -33,7 +33,8 @@ use rustc_apfloat::{
|
||||||
Float,
|
Float,
|
||||||
};
|
};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use stdx::{never, IsNoneOr};
|
use span::Edition;
|
||||||
|
use stdx::never;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -131,7 +132,11 @@ pub trait HirDisplay {
|
||||||
|
|
||||||
/// Returns a `Display`able type that is human-readable.
|
/// Returns a `Display`able type that is human-readable.
|
||||||
/// Use this for showing types to the user (e.g. diagnostics)
|
/// Use this for showing types to the user (e.g. diagnostics)
|
||||||
fn display<'a>(&'a self, db: &'a dyn HirDatabase) -> HirDisplayWrapper<'a, Self>
|
fn display<'a>(
|
||||||
|
&'a self,
|
||||||
|
db: &'a dyn HirDatabase,
|
||||||
|
edition: Edition,
|
||||||
|
) -> HirDisplayWrapper<'a, Self>
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
|
@ -142,7 +147,7 @@ pub trait HirDisplay {
|
||||||
limited_size: None,
|
limited_size: None,
|
||||||
omit_verbose_types: false,
|
omit_verbose_types: false,
|
||||||
closure_style: ClosureStyle::ImplFn,
|
closure_style: ClosureStyle::ImplFn,
|
||||||
display_target: DisplayTarget::Diagnostics,
|
display_target: DisplayTarget::Diagnostics { edition },
|
||||||
show_container_bounds: false,
|
show_container_bounds: false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -153,6 +158,7 @@ pub trait HirDisplay {
|
||||||
&'a self,
|
&'a self,
|
||||||
db: &'a dyn HirDatabase,
|
db: &'a dyn HirDatabase,
|
||||||
max_size: Option<usize>,
|
max_size: Option<usize>,
|
||||||
|
edition: Edition,
|
||||||
) -> HirDisplayWrapper<'a, Self>
|
) -> HirDisplayWrapper<'a, Self>
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
|
@ -164,7 +170,7 @@ pub trait HirDisplay {
|
||||||
limited_size: None,
|
limited_size: None,
|
||||||
omit_verbose_types: true,
|
omit_verbose_types: true,
|
||||||
closure_style: ClosureStyle::ImplFn,
|
closure_style: ClosureStyle::ImplFn,
|
||||||
display_target: DisplayTarget::Diagnostics,
|
display_target: DisplayTarget::Diagnostics { edition },
|
||||||
show_container_bounds: false,
|
show_container_bounds: false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -175,6 +181,7 @@ pub trait HirDisplay {
|
||||||
&'a self,
|
&'a self,
|
||||||
db: &'a dyn HirDatabase,
|
db: &'a dyn HirDatabase,
|
||||||
limited_size: Option<usize>,
|
limited_size: Option<usize>,
|
||||||
|
edition: Edition,
|
||||||
) -> HirDisplayWrapper<'a, Self>
|
) -> HirDisplayWrapper<'a, Self>
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
|
@ -186,7 +193,7 @@ pub trait HirDisplay {
|
||||||
limited_size,
|
limited_size,
|
||||||
omit_verbose_types: true,
|
omit_verbose_types: true,
|
||||||
closure_style: ClosureStyle::ImplFn,
|
closure_style: ClosureStyle::ImplFn,
|
||||||
display_target: DisplayTarget::Diagnostics,
|
display_target: DisplayTarget::Diagnostics { edition },
|
||||||
show_container_bounds: false,
|
show_container_bounds: false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -242,6 +249,7 @@ pub trait HirDisplay {
|
||||||
&'a self,
|
&'a self,
|
||||||
db: &'a dyn HirDatabase,
|
db: &'a dyn HirDatabase,
|
||||||
show_container_bounds: bool,
|
show_container_bounds: bool,
|
||||||
|
edition: Edition,
|
||||||
) -> HirDisplayWrapper<'a, Self>
|
) -> HirDisplayWrapper<'a, Self>
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
|
@ -253,13 +261,23 @@ pub trait HirDisplay {
|
||||||
limited_size: None,
|
limited_size: None,
|
||||||
omit_verbose_types: false,
|
omit_verbose_types: false,
|
||||||
closure_style: ClosureStyle::ImplFn,
|
closure_style: ClosureStyle::ImplFn,
|
||||||
display_target: DisplayTarget::Diagnostics,
|
display_target: DisplayTarget::Diagnostics { edition },
|
||||||
show_container_bounds,
|
show_container_bounds,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HirFormatter<'_> {
|
impl HirFormatter<'_> {
|
||||||
|
pub fn edition(&self) -> Edition {
|
||||||
|
match self.display_target {
|
||||||
|
DisplayTarget::Diagnostics { edition } => edition,
|
||||||
|
DisplayTarget::SourceCode { module_id, .. } => {
|
||||||
|
self.db.crate_graph()[module_id.krate()].edition
|
||||||
|
}
|
||||||
|
DisplayTarget::Test => Edition::CURRENT,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn write_joined<T: HirDisplay>(
|
pub fn write_joined<T: HirDisplay>(
|
||||||
&mut self,
|
&mut self,
|
||||||
iter: impl IntoIterator<Item = T>,
|
iter: impl IntoIterator<Item = T>,
|
||||||
|
@ -324,7 +342,7 @@ pub enum DisplayTarget {
|
||||||
/// Display types for inlays, doc popups, autocompletion, etc...
|
/// Display types for inlays, doc popups, autocompletion, etc...
|
||||||
/// Showing `{unknown}` or not qualifying paths is fine here.
|
/// Showing `{unknown}` or not qualifying paths is fine here.
|
||||||
/// There's no reason for this to fail.
|
/// There's no reason for this to fail.
|
||||||
Diagnostics,
|
Diagnostics { edition: Edition },
|
||||||
/// Display types for inserting them in source files.
|
/// Display types for inserting them in source files.
|
||||||
/// The generated code should compile, so paths need to be qualified.
|
/// The generated code should compile, so paths need to be qualified.
|
||||||
SourceCode { module_id: ModuleId, allow_opaque: bool },
|
SourceCode { module_id: ModuleId, allow_opaque: bool },
|
||||||
|
@ -460,7 +478,7 @@ impl HirDisplay for ProjectionTy {
|
||||||
">::{}",
|
">::{}",
|
||||||
f.db.type_alias_data(from_assoc_type_id(self.associated_ty_id))
|
f.db.type_alias_data(from_assoc_type_id(self.associated_ty_id))
|
||||||
.name
|
.name
|
||||||
.display(f.db.upcast())
|
.display(f.db.upcast(), f.edition())
|
||||||
)?;
|
)?;
|
||||||
let proj_params_count =
|
let proj_params_count =
|
||||||
self.substitution.len(Interner) - trait_ref.substitution.len(Interner);
|
self.substitution.len(Interner) - trait_ref.substitution.len(Interner);
|
||||||
|
@ -499,7 +517,7 @@ impl HirDisplay for Const {
|
||||||
let id = from_placeholder_idx(f.db, *idx);
|
let id = from_placeholder_idx(f.db, *idx);
|
||||||
let generics = generics(f.db.upcast(), id.parent);
|
let generics = generics(f.db.upcast(), id.parent);
|
||||||
let param_data = &generics[id.local_id];
|
let param_data = &generics[id.local_id];
|
||||||
write!(f, "{}", param_data.name().unwrap().display(f.db.upcast()))?;
|
write!(f, "{}", param_data.name().unwrap().display(f.db.upcast(), f.edition()))?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
ConstValue::Concrete(c) => match &c.interned {
|
ConstValue::Concrete(c) => match &c.interned {
|
||||||
|
@ -633,7 +651,7 @@ fn render_const_scalar(
|
||||||
TyKind::Adt(adt, _) if b.len() == 2 * size_of::<usize>() => match adt.0 {
|
TyKind::Adt(adt, _) if b.len() == 2 * size_of::<usize>() => match adt.0 {
|
||||||
hir_def::AdtId::StructId(s) => {
|
hir_def::AdtId::StructId(s) => {
|
||||||
let data = f.db.struct_data(s);
|
let data = f.db.struct_data(s);
|
||||||
write!(f, "&{}", data.name.display(f.db.upcast()))?;
|
write!(f, "&{}", data.name.display(f.db.upcast(), f.edition()))?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
_ => f.write_str("<unsized-enum-or-union>"),
|
_ => f.write_str("<unsized-enum-or-union>"),
|
||||||
|
@ -691,7 +709,7 @@ fn render_const_scalar(
|
||||||
match adt.0 {
|
match adt.0 {
|
||||||
hir_def::AdtId::StructId(s) => {
|
hir_def::AdtId::StructId(s) => {
|
||||||
let data = f.db.struct_data(s);
|
let data = f.db.struct_data(s);
|
||||||
write!(f, "{}", data.name.display(f.db.upcast()))?;
|
write!(f, "{}", data.name.display(f.db.upcast(), f.edition()))?;
|
||||||
let field_types = f.db.field_types(s.into());
|
let field_types = f.db.field_types(s.into());
|
||||||
render_variant_after_name(
|
render_variant_after_name(
|
||||||
&data.variant_data,
|
&data.variant_data,
|
||||||
|
@ -705,7 +723,7 @@ fn render_const_scalar(
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
hir_def::AdtId::UnionId(u) => {
|
hir_def::AdtId::UnionId(u) => {
|
||||||
write!(f, "{}", f.db.union_data(u).name.display(f.db.upcast()))
|
write!(f, "{}", f.db.union_data(u).name.display(f.db.upcast(), f.edition()))
|
||||||
}
|
}
|
||||||
hir_def::AdtId::EnumId(e) => {
|
hir_def::AdtId::EnumId(e) => {
|
||||||
let Ok(target_data_layout) = f.db.target_data_layout(trait_env.krate) else {
|
let Ok(target_data_layout) = f.db.target_data_layout(trait_env.krate) else {
|
||||||
|
@ -717,7 +735,7 @@ fn render_const_scalar(
|
||||||
return f.write_str("<failed-to-detect-variant>");
|
return f.write_str("<failed-to-detect-variant>");
|
||||||
};
|
};
|
||||||
let data = f.db.enum_variant_data(var_id);
|
let data = f.db.enum_variant_data(var_id);
|
||||||
write!(f, "{}", data.name.display(f.db.upcast()))?;
|
write!(f, "{}", data.name.display(f.db.upcast(), f.edition()))?;
|
||||||
let field_types = f.db.field_types(var_id.into());
|
let field_types = f.db.field_types(var_id.into());
|
||||||
render_variant_after_name(
|
render_variant_after_name(
|
||||||
&data.variant_data,
|
&data.variant_data,
|
||||||
|
@ -802,11 +820,11 @@ fn render_variant_after_name(
|
||||||
if matches!(data, VariantData::Record(_)) {
|
if matches!(data, VariantData::Record(_)) {
|
||||||
write!(f, " {{")?;
|
write!(f, " {{")?;
|
||||||
if let Some((id, data)) = it.next() {
|
if let Some((id, data)) = it.next() {
|
||||||
write!(f, " {}: ", data.name.display(f.db.upcast()))?;
|
write!(f, " {}: ", data.name.display(f.db.upcast(), f.edition()))?;
|
||||||
render_field(f, id)?;
|
render_field(f, id)?;
|
||||||
}
|
}
|
||||||
for (id, data) in it {
|
for (id, data) in it {
|
||||||
write!(f, ", {}: ", data.name.display(f.db.upcast()))?;
|
write!(f, ", {}: ", data.name.display(f.db.upcast(), f.edition()))?;
|
||||||
render_field(f, id)?;
|
render_field(f, id)?;
|
||||||
}
|
}
|
||||||
write!(f, " }}")?;
|
write!(f, " }}")?;
|
||||||
|
@ -1000,15 +1018,23 @@ impl HirDisplay for Ty {
|
||||||
CallableDefId::FunctionId(ff) => {
|
CallableDefId::FunctionId(ff) => {
|
||||||
write!(f, "fn ")?;
|
write!(f, "fn ")?;
|
||||||
f.start_location_link(def.into());
|
f.start_location_link(def.into());
|
||||||
write!(f, "{}", db.function_data(ff).name.display(f.db.upcast()))?
|
write!(
|
||||||
|
f,
|
||||||
|
"{}",
|
||||||
|
db.function_data(ff).name.display(f.db.upcast(), f.edition())
|
||||||
|
)?
|
||||||
}
|
}
|
||||||
CallableDefId::StructId(s) => {
|
CallableDefId::StructId(s) => {
|
||||||
f.start_location_link(def.into());
|
f.start_location_link(def.into());
|
||||||
write!(f, "{}", db.struct_data(s).name.display(f.db.upcast()))?
|
write!(f, "{}", db.struct_data(s).name.display(f.db.upcast(), f.edition()))?
|
||||||
}
|
}
|
||||||
CallableDefId::EnumVariantId(e) => {
|
CallableDefId::EnumVariantId(e) => {
|
||||||
f.start_location_link(def.into());
|
f.start_location_link(def.into());
|
||||||
write!(f, "{}", db.enum_variant_data(e).name.display(f.db.upcast()))?
|
write!(
|
||||||
|
f,
|
||||||
|
"{}",
|
||||||
|
db.enum_variant_data(e).name.display(f.db.upcast(), f.edition())
|
||||||
|
)?
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
f.end_location_link();
|
f.end_location_link();
|
||||||
|
@ -1019,26 +1045,25 @@ impl HirDisplay for Ty {
|
||||||
let (parent_len, self_param, type_, const_, impl_, lifetime) =
|
let (parent_len, self_param, type_, const_, impl_, lifetime) =
|
||||||
generics.provenance_split();
|
generics.provenance_split();
|
||||||
let parameters = parameters.as_slice(Interner);
|
let parameters = parameters.as_slice(Interner);
|
||||||
|
debug_assert_eq!(
|
||||||
|
parameters.len(),
|
||||||
|
parent_len + self_param as usize + type_ + const_ + impl_ + lifetime
|
||||||
|
);
|
||||||
// We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
|
// We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
|
||||||
if parameters.len() - impl_ > 0 {
|
if parameters.len() - impl_ > 0 {
|
||||||
// `parameters` are in the order of fn's params (including impl traits), fn's lifetimes
|
// `parameters` are in the order of fn's params (including impl traits), fn's lifetimes
|
||||||
|
let parameters =
|
||||||
|
generic_args_sans_defaults(f, Some(generic_def_id), parameters);
|
||||||
let without_impl = self_param as usize + type_ + const_ + lifetime;
|
let without_impl = self_param as usize + type_ + const_ + lifetime;
|
||||||
// parent's params (those from enclosing impl or trait, if any).
|
// parent's params (those from enclosing impl or trait, if any).
|
||||||
let (fn_params, parent_params) = parameters.split_at(without_impl + impl_);
|
let (fn_params, parent_params) = parameters.split_at(without_impl + impl_);
|
||||||
debug_assert_eq!(parent_params.len(), parent_len);
|
|
||||||
|
|
||||||
let parent_params =
|
|
||||||
generic_args_sans_defaults(f, Some(generic_def_id), parent_params);
|
|
||||||
let fn_params =
|
|
||||||
&generic_args_sans_defaults(f, Some(generic_def_id), fn_params)
|
|
||||||
[0..without_impl];
|
|
||||||
|
|
||||||
write!(f, "<")?;
|
write!(f, "<")?;
|
||||||
hir_fmt_generic_arguments(f, parent_params, None)?;
|
hir_fmt_generic_arguments(f, parent_params, None)?;
|
||||||
if !parent_params.is_empty() && !fn_params.is_empty() {
|
if !parent_params.is_empty() && !fn_params.is_empty() {
|
||||||
write!(f, ", ")?;
|
write!(f, ", ")?;
|
||||||
}
|
}
|
||||||
hir_fmt_generic_arguments(f, fn_params, None)?;
|
hir_fmt_generic_arguments(f, &fn_params[0..without_impl], None)?;
|
||||||
write!(f, ">")?;
|
write!(f, ">")?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1054,13 +1079,13 @@ impl HirDisplay for Ty {
|
||||||
TyKind::Adt(AdtId(def_id), parameters) => {
|
TyKind::Adt(AdtId(def_id), parameters) => {
|
||||||
f.start_location_link((*def_id).into());
|
f.start_location_link((*def_id).into());
|
||||||
match f.display_target {
|
match f.display_target {
|
||||||
DisplayTarget::Diagnostics | DisplayTarget::Test => {
|
DisplayTarget::Diagnostics { .. } | DisplayTarget::Test => {
|
||||||
let name = match *def_id {
|
let name = match *def_id {
|
||||||
hir_def::AdtId::StructId(it) => db.struct_data(it).name.clone(),
|
hir_def::AdtId::StructId(it) => db.struct_data(it).name.clone(),
|
||||||
hir_def::AdtId::UnionId(it) => db.union_data(it).name.clone(),
|
hir_def::AdtId::UnionId(it) => db.union_data(it).name.clone(),
|
||||||
hir_def::AdtId::EnumId(it) => db.enum_data(it).name.clone(),
|
hir_def::AdtId::EnumId(it) => db.enum_data(it).name.clone(),
|
||||||
};
|
};
|
||||||
write!(f, "{}", name.display(f.db.upcast()))?;
|
write!(f, "{}", name.display(f.db.upcast(), f.edition()))?;
|
||||||
}
|
}
|
||||||
DisplayTarget::SourceCode { module_id, allow_opaque: _ } => {
|
DisplayTarget::SourceCode { module_id, allow_opaque: _ } => {
|
||||||
if let Some(path) = find_path::find_path(
|
if let Some(path) = find_path::find_path(
|
||||||
|
@ -1076,7 +1101,7 @@ impl HirDisplay for Ty {
|
||||||
prefer_absolute: false,
|
prefer_absolute: false,
|
||||||
},
|
},
|
||||||
) {
|
) {
|
||||||
write!(f, "{}", path.display(f.db.upcast()))?;
|
write!(f, "{}", path.display(f.db.upcast(), f.edition()))?;
|
||||||
} else {
|
} else {
|
||||||
return Err(HirDisplayError::DisplaySourceCodeError(
|
return Err(HirDisplayError::DisplaySourceCodeError(
|
||||||
DisplaySourceCodeError::PathNotFound,
|
DisplaySourceCodeError::PathNotFound,
|
||||||
|
@ -1102,12 +1127,12 @@ impl HirDisplay for Ty {
|
||||||
// Use placeholder associated types when the target is test (https://rust-lang.github.io/chalk/book/clauses/type_equality.html#placeholder-associated-types)
|
// Use placeholder associated types when the target is test (https://rust-lang.github.io/chalk/book/clauses/type_equality.html#placeholder-associated-types)
|
||||||
if f.display_target.is_test() {
|
if f.display_target.is_test() {
|
||||||
f.start_location_link(trait_.into());
|
f.start_location_link(trait_.into());
|
||||||
write!(f, "{}", trait_data.name.display(f.db.upcast()))?;
|
write!(f, "{}", trait_data.name.display(f.db.upcast(), f.edition()))?;
|
||||||
f.end_location_link();
|
f.end_location_link();
|
||||||
write!(f, "::")?;
|
write!(f, "::")?;
|
||||||
|
|
||||||
f.start_location_link(type_alias.into());
|
f.start_location_link(type_alias.into());
|
||||||
write!(f, "{}", type_alias_data.name.display(f.db.upcast()))?;
|
write!(f, "{}", type_alias_data.name.display(f.db.upcast(), f.edition()))?;
|
||||||
f.end_location_link();
|
f.end_location_link();
|
||||||
// Note that the generic args for the associated type come before those for the
|
// Note that the generic args for the associated type come before those for the
|
||||||
// trait (including the self type).
|
// trait (including the self type).
|
||||||
|
@ -1125,7 +1150,7 @@ impl HirDisplay for Ty {
|
||||||
let alias = from_foreign_def_id(*type_alias);
|
let alias = from_foreign_def_id(*type_alias);
|
||||||
let type_alias = db.type_alias_data(alias);
|
let type_alias = db.type_alias_data(alias);
|
||||||
f.start_location_link(alias.into());
|
f.start_location_link(alias.into());
|
||||||
write!(f, "{}", type_alias.name.display(f.db.upcast()))?;
|
write!(f, "{}", type_alias.name.display(f.db.upcast(), f.edition()))?;
|
||||||
f.end_location_link();
|
f.end_location_link();
|
||||||
}
|
}
|
||||||
TyKind::OpaqueType(opaque_ty_id, parameters) => {
|
TyKind::OpaqueType(opaque_ty_id, parameters) => {
|
||||||
|
@ -1257,7 +1282,10 @@ impl HirDisplay for Ty {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"{}",
|
"{}",
|
||||||
p.name.clone().unwrap_or_else(Name::missing).display(f.db.upcast())
|
p.name
|
||||||
|
.clone()
|
||||||
|
.unwrap_or_else(Name::missing)
|
||||||
|
.display(f.db.upcast(), f.edition())
|
||||||
)?
|
)?
|
||||||
}
|
}
|
||||||
TypeParamProvenance::ArgumentImplTrait => {
|
TypeParamProvenance::ArgumentImplTrait => {
|
||||||
|
@ -1290,7 +1318,7 @@ impl HirDisplay for Ty {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TypeOrConstParamData::ConstParamData(p) => {
|
TypeOrConstParamData::ConstParamData(p) => {
|
||||||
write!(f, "{}", p.name.display(f.db.upcast()))?;
|
write!(f, "{}", p.name.display(f.db.upcast(), f.edition()))?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1410,17 +1438,7 @@ fn hir_fmt_generics(
|
||||||
|
|
||||||
let parameters_to_write = generic_args_sans_defaults(f, generic_def, parameters);
|
let parameters_to_write = generic_args_sans_defaults(f, generic_def, parameters);
|
||||||
|
|
||||||
// FIXME: Remote this
|
if !parameters_to_write.is_empty() {
|
||||||
// most of our lifetimes will be errors as we lack elision and inference
|
|
||||||
// so don't render them for now
|
|
||||||
let only_err_lifetimes = !cfg!(test)
|
|
||||||
&& parameters_to_write.iter().all(|arg| {
|
|
||||||
matches!(
|
|
||||||
arg.data(Interner),
|
|
||||||
chalk_ir::GenericArgData::Lifetime(it) if *it.data(Interner) == LifetimeData::Error
|
|
||||||
)
|
|
||||||
});
|
|
||||||
if !parameters_to_write.is_empty() && !only_err_lifetimes {
|
|
||||||
write!(f, "<")?;
|
write!(f, "<")?;
|
||||||
hir_fmt_generic_arguments(f, parameters_to_write, self_)?;
|
hir_fmt_generic_arguments(f, parameters_to_write, self_)?;
|
||||||
write!(f, ">")?;
|
write!(f, ">")?;
|
||||||
|
@ -1461,12 +1479,14 @@ fn generic_args_sans_defaults<'ga>(
|
||||||
}
|
}
|
||||||
// otherwise, if the arg is equal to the param default, hide it (unless the
|
// otherwise, if the arg is equal to the param default, hide it (unless the
|
||||||
// default is an error which can happen for the trait Self type)
|
// default is an error which can happen for the trait Self type)
|
||||||
#[allow(unstable_name_collisions)]
|
match default_parameters.get(i) {
|
||||||
IsNoneOr::is_none_or(default_parameters.get(i), |default_parameter| {
|
None => true,
|
||||||
// !is_err(default_parameter.skip_binders())
|
Some(default_parameter) => {
|
||||||
// &&
|
// !is_err(default_parameter.skip_binders())
|
||||||
arg != &default_parameter.clone().substitute(Interner, ¶meters)
|
// &&
|
||||||
})
|
arg != &default_parameter.clone().substitute(Interner, ¶meters)
|
||||||
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
let mut default_from = 0;
|
let mut default_from = 0;
|
||||||
for (i, parameter) in parameters.iter().enumerate() {
|
for (i, parameter) in parameters.iter().enumerate() {
|
||||||
|
@ -1495,18 +1515,6 @@ fn hir_fmt_generic_arguments(
|
||||||
None => (parameters, &[][..]),
|
None => (parameters, &[][..]),
|
||||||
};
|
};
|
||||||
for generic_arg in lifetimes.iter().chain(ty_or_const) {
|
for generic_arg in lifetimes.iter().chain(ty_or_const) {
|
||||||
// FIXME: Remove this
|
|
||||||
// most of our lifetimes will be errors as we lack elision and inference
|
|
||||||
// so don't render them for now
|
|
||||||
if !cfg!(test)
|
|
||||||
&& matches!(
|
|
||||||
generic_arg.lifetime(Interner),
|
|
||||||
Some(l) if ***l.interned() == LifetimeData::Error
|
|
||||||
)
|
|
||||||
{
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if !mem::take(&mut first) {
|
if !mem::take(&mut first) {
|
||||||
write!(f, ", ")?;
|
write!(f, ", ")?;
|
||||||
}
|
}
|
||||||
|
@ -1633,7 +1641,7 @@ fn write_bounds_like_dyn_trait(
|
||||||
// existential) here, which is the only thing that's
|
// existential) here, which is the only thing that's
|
||||||
// possible in actual Rust, and hence don't print it
|
// possible in actual Rust, and hence don't print it
|
||||||
f.start_location_link(trait_.into());
|
f.start_location_link(trait_.into());
|
||||||
write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast()))?;
|
write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast(), f.edition()))?;
|
||||||
f.end_location_link();
|
f.end_location_link();
|
||||||
if is_fn_trait {
|
if is_fn_trait {
|
||||||
if let [self_, params @ ..] = trait_ref.substitution.as_slice(Interner) {
|
if let [self_, params @ ..] = trait_ref.substitution.as_slice(Interner) {
|
||||||
|
@ -1707,7 +1715,7 @@ fn write_bounds_like_dyn_trait(
|
||||||
let assoc_ty_id = from_assoc_type_id(proj.associated_ty_id);
|
let assoc_ty_id = from_assoc_type_id(proj.associated_ty_id);
|
||||||
let type_alias = f.db.type_alias_data(assoc_ty_id);
|
let type_alias = f.db.type_alias_data(assoc_ty_id);
|
||||||
f.start_location_link(assoc_ty_id.into());
|
f.start_location_link(assoc_ty_id.into());
|
||||||
write!(f, "{}", type_alias.name.display(f.db.upcast()))?;
|
write!(f, "{}", type_alias.name.display(f.db.upcast(), f.edition()))?;
|
||||||
f.end_location_link();
|
f.end_location_link();
|
||||||
|
|
||||||
let proj_arg_count = generics(f.db.upcast(), assoc_ty_id.into()).len_self();
|
let proj_arg_count = generics(f.db.upcast(), assoc_ty_id.into()).len_self();
|
||||||
|
@ -1771,7 +1779,7 @@ fn fmt_trait_ref(
|
||||||
}
|
}
|
||||||
let trait_ = tr.hir_trait_id();
|
let trait_ = tr.hir_trait_id();
|
||||||
f.start_location_link(trait_.into());
|
f.start_location_link(trait_.into());
|
||||||
write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast()))?;
|
write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast(), f.edition()))?;
|
||||||
f.end_location_link();
|
f.end_location_link();
|
||||||
let substs = tr.substitution.as_slice(Interner);
|
let substs = tr.substitution.as_slice(Interner);
|
||||||
hir_fmt_generics(f, &substs[1..], None, substs[0].ty(Interner))
|
hir_fmt_generics(f, &substs[1..], None, substs[0].ty(Interner))
|
||||||
|
@ -1797,7 +1805,11 @@ impl HirDisplay for WhereClause {
|
||||||
write!(f, ">::",)?;
|
write!(f, ">::",)?;
|
||||||
let type_alias = from_assoc_type_id(projection_ty.associated_ty_id);
|
let type_alias = from_assoc_type_id(projection_ty.associated_ty_id);
|
||||||
f.start_location_link(type_alias.into());
|
f.start_location_link(type_alias.into());
|
||||||
write!(f, "{}", f.db.type_alias_data(type_alias).name.display(f.db.upcast()),)?;
|
write!(
|
||||||
|
f,
|
||||||
|
"{}",
|
||||||
|
f.db.type_alias_data(type_alias).name.display(f.db.upcast(), f.edition()),
|
||||||
|
)?;
|
||||||
f.end_location_link();
|
f.end_location_link();
|
||||||
write!(f, " = ")?;
|
write!(f, " = ")?;
|
||||||
ty.hir_fmt(f)?;
|
ty.hir_fmt(f)?;
|
||||||
|
@ -1833,14 +1845,20 @@ impl HirDisplay for LifetimeData {
|
||||||
let id = lt_from_placeholder_idx(f.db, *idx);
|
let id = lt_from_placeholder_idx(f.db, *idx);
|
||||||
let generics = generics(f.db.upcast(), id.parent);
|
let generics = generics(f.db.upcast(), id.parent);
|
||||||
let param_data = &generics[id.local_id];
|
let param_data = &generics[id.local_id];
|
||||||
write!(f, "{}", param_data.name.display(f.db.upcast()))?;
|
write!(f, "{}", param_data.name.display(f.db.upcast(), f.edition()))?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
_ if f.display_target.is_source_code() => write!(f, "'_"),
|
_ if f.display_target.is_source_code() => write!(f, "'_"),
|
||||||
LifetimeData::BoundVar(idx) => idx.hir_fmt(f),
|
LifetimeData::BoundVar(idx) => idx.hir_fmt(f),
|
||||||
LifetimeData::InferenceVar(_) => write!(f, "_"),
|
LifetimeData::InferenceVar(_) => write!(f, "_"),
|
||||||
LifetimeData::Static => write!(f, "'static"),
|
LifetimeData::Static => write!(f, "'static"),
|
||||||
LifetimeData::Error => write!(f, "'?"),
|
LifetimeData::Error => {
|
||||||
|
if cfg!(test) {
|
||||||
|
write!(f, "'?")
|
||||||
|
} else {
|
||||||
|
write!(f, "'_")
|
||||||
|
}
|
||||||
|
}
|
||||||
LifetimeData::Erased => write!(f, "'<erased>"),
|
LifetimeData::Erased => write!(f, "'<erased>"),
|
||||||
LifetimeData::Phantom(void, _) => match *void {},
|
LifetimeData::Phantom(void, _) => match *void {},
|
||||||
}
|
}
|
||||||
|
@ -1855,7 +1873,7 @@ impl HirDisplay for DomainGoal {
|
||||||
wc.hir_fmt(f)?;
|
wc.hir_fmt(f)?;
|
||||||
write!(f, ")")?;
|
write!(f, ")")?;
|
||||||
}
|
}
|
||||||
_ => write!(f, "?")?,
|
_ => write!(f, "_")?,
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1914,7 +1932,7 @@ impl HirDisplay for TypeRef {
|
||||||
};
|
};
|
||||||
write!(f, "&")?;
|
write!(f, "&")?;
|
||||||
if let Some(lifetime) = lifetime {
|
if let Some(lifetime) = lifetime {
|
||||||
write!(f, "{} ", lifetime.name.display(f.db.upcast()))?;
|
write!(f, "{} ", lifetime.name.display(f.db.upcast(), f.edition()))?;
|
||||||
}
|
}
|
||||||
write!(f, "{mutability}")?;
|
write!(f, "{mutability}")?;
|
||||||
inner.hir_fmt(f)?;
|
inner.hir_fmt(f)?;
|
||||||
|
@ -1922,7 +1940,7 @@ impl HirDisplay for TypeRef {
|
||||||
TypeRef::Array(inner, len) => {
|
TypeRef::Array(inner, len) => {
|
||||||
write!(f, "[")?;
|
write!(f, "[")?;
|
||||||
inner.hir_fmt(f)?;
|
inner.hir_fmt(f)?;
|
||||||
write!(f, "; {}]", len.display(f.db.upcast()))?;
|
write!(f, "; {}]", len.display(f.db.upcast(), f.edition()))?;
|
||||||
}
|
}
|
||||||
TypeRef::Slice(inner) => {
|
TypeRef::Slice(inner) => {
|
||||||
write!(f, "[")?;
|
write!(f, "[")?;
|
||||||
|
@ -1943,7 +1961,7 @@ impl HirDisplay for TypeRef {
|
||||||
for index in 0..function_parameters.len() {
|
for index in 0..function_parameters.len() {
|
||||||
let (param_name, param_type) = &function_parameters[index];
|
let (param_name, param_type) = &function_parameters[index];
|
||||||
if let Some(name) = param_name {
|
if let Some(name) = param_name {
|
||||||
write!(f, "{}: ", name.display(f.db.upcast()))?;
|
write!(f, "{}: ", name.display(f.db.upcast(), f.edition()))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
param_type.hir_fmt(f)?;
|
param_type.hir_fmt(f)?;
|
||||||
|
@ -2001,12 +2019,15 @@ impl HirDisplay for TypeBound {
|
||||||
}
|
}
|
||||||
path.hir_fmt(f)
|
path.hir_fmt(f)
|
||||||
}
|
}
|
||||||
TypeBound::Lifetime(lifetime) => write!(f, "{}", lifetime.name.display(f.db.upcast())),
|
TypeBound::Lifetime(lifetime) => {
|
||||||
|
write!(f, "{}", lifetime.name.display(f.db.upcast(), f.edition()))
|
||||||
|
}
|
||||||
TypeBound::ForLifetime(lifetimes, path) => {
|
TypeBound::ForLifetime(lifetimes, path) => {
|
||||||
|
let edition = f.edition();
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"for<{}> ",
|
"for<{}> ",
|
||||||
lifetimes.iter().map(|it| it.display(f.db.upcast())).format(", ")
|
lifetimes.iter().map(|it| it.display(f.db.upcast(), edition)).format(", ")
|
||||||
)?;
|
)?;
|
||||||
path.hir_fmt(f)
|
path.hir_fmt(f)
|
||||||
}
|
}
|
||||||
|
@ -2072,7 +2093,7 @@ impl HirDisplay for Path {
|
||||||
if !matches!(self.kind(), PathKind::Plain) || seg_idx > 0 {
|
if !matches!(self.kind(), PathKind::Plain) || seg_idx > 0 {
|
||||||
write!(f, "::")?;
|
write!(f, "::")?;
|
||||||
}
|
}
|
||||||
write!(f, "{}", segment.name.display(f.db.upcast()))?;
|
write!(f, "{}", segment.name.display(f.db.upcast(), f.edition()))?;
|
||||||
if let Some(generic_args) = segment.args_and_bindings {
|
if let Some(generic_args) = segment.args_and_bindings {
|
||||||
// We should be in type context, so format as `Foo<Bar>` instead of `Foo::<Bar>`.
|
// We should be in type context, so format as `Foo<Bar>` instead of `Foo::<Bar>`.
|
||||||
// Do we actually format expressions?
|
// Do we actually format expressions?
|
||||||
|
@ -2117,7 +2138,7 @@ impl HirDisplay for Path {
|
||||||
} else {
|
} else {
|
||||||
write!(f, ", ")?;
|
write!(f, ", ")?;
|
||||||
}
|
}
|
||||||
write!(f, "{}", binding.name.display(f.db.upcast()))?;
|
write!(f, "{}", binding.name.display(f.db.upcast(), f.edition()))?;
|
||||||
match &binding.type_ref {
|
match &binding.type_ref {
|
||||||
Some(ty) => {
|
Some(ty) => {
|
||||||
write!(f, " = ")?;
|
write!(f, " = ")?;
|
||||||
|
@ -2151,9 +2172,11 @@ impl HirDisplay for hir_def::path::GenericArg {
|
||||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||||
match self {
|
match self {
|
||||||
hir_def::path::GenericArg::Type(ty) => ty.hir_fmt(f),
|
hir_def::path::GenericArg::Type(ty) => ty.hir_fmt(f),
|
||||||
hir_def::path::GenericArg::Const(c) => write!(f, "{}", c.display(f.db.upcast())),
|
hir_def::path::GenericArg::Const(c) => {
|
||||||
|
write!(f, "{}", c.display(f.db.upcast(), f.edition()))
|
||||||
|
}
|
||||||
hir_def::path::GenericArg::Lifetime(lifetime) => {
|
hir_def::path::GenericArg::Lifetime(lifetime) => {
|
||||||
write!(f, "{}", lifetime.name.display(f.db.upcast()))
|
write!(f, "{}", lifetime.name.display(f.db.upcast(), f.edition()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,7 @@ mod pat;
|
||||||
mod path;
|
mod path;
|
||||||
pub(crate) mod unify;
|
pub(crate) mod unify;
|
||||||
|
|
||||||
use std::{convert::identity, iter, ops::Index};
|
use std::{cell::OnceCell, convert::identity, iter, ops::Index};
|
||||||
|
|
||||||
use chalk_ir::{
|
use chalk_ir::{
|
||||||
cast::Cast,
|
cast::Cast,
|
||||||
|
@ -49,17 +49,17 @@ use hir_expand::name::Name;
|
||||||
use indexmap::IndexSet;
|
use indexmap::IndexSet;
|
||||||
use intern::sym;
|
use intern::sym;
|
||||||
use la_arena::{ArenaMap, Entry};
|
use la_arena::{ArenaMap, Entry};
|
||||||
use once_cell::unsync::OnceCell;
|
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use stdx::{always, never};
|
use stdx::{always, never};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::HirDatabase,
|
db::HirDatabase,
|
||||||
error_lifetime, fold_tys,
|
fold_tys,
|
||||||
generics::Generics,
|
generics::Generics,
|
||||||
infer::{coerce::CoerceMany, unify::InferenceTable},
|
infer::{coerce::CoerceMany, unify::InferenceTable},
|
||||||
lower::ImplTraitLoweringMode,
|
lower::ImplTraitLoweringMode,
|
||||||
|
mir::MirSpan,
|
||||||
to_assoc_type_id,
|
to_assoc_type_id,
|
||||||
traits::FnTrait,
|
traits::FnTrait,
|
||||||
utils::{InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder},
|
utils::{InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder},
|
||||||
|
@ -328,13 +328,13 @@ pub struct Adjustment {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Adjustment {
|
impl Adjustment {
|
||||||
pub fn borrow(m: Mutability, ty: Ty) -> Self {
|
pub fn borrow(m: Mutability, ty: Ty, lt: Lifetime) -> Self {
|
||||||
let ty = TyKind::Ref(m, error_lifetime(), ty).intern(Interner);
|
let ty = TyKind::Ref(m, lt.clone(), ty).intern(Interner);
|
||||||
Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(m)), target: ty }
|
Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(lt, m)), target: ty }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||||
pub enum Adjust {
|
pub enum Adjust {
|
||||||
/// Go from ! to any type.
|
/// Go from ! to any type.
|
||||||
NeverToAny,
|
NeverToAny,
|
||||||
|
@ -354,18 +354,18 @@ pub enum Adjust {
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||||
pub struct OverloadedDeref(pub Option<Mutability>);
|
pub struct OverloadedDeref(pub Option<Mutability>);
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||||
pub enum AutoBorrow {
|
pub enum AutoBorrow {
|
||||||
/// Converts from T to &T.
|
/// Converts from T to &T.
|
||||||
Ref(Mutability),
|
Ref(Lifetime, Mutability),
|
||||||
/// Converts from T to *T.
|
/// Converts from T to *T.
|
||||||
RawPtr(Mutability),
|
RawPtr(Mutability),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AutoBorrow {
|
impl AutoBorrow {
|
||||||
fn mutability(self) -> Mutability {
|
fn mutability(&self) -> Mutability {
|
||||||
let (AutoBorrow::Ref(m) | AutoBorrow::RawPtr(m)) = self;
|
let (AutoBorrow::Ref(_, m) | AutoBorrow::RawPtr(m)) = self;
|
||||||
m
|
*m
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -554,6 +554,12 @@ pub(crate) struct InferenceContext<'a> {
|
||||||
|
|
||||||
// fields related to closure capture
|
// fields related to closure capture
|
||||||
current_captures: Vec<CapturedItemWithoutTy>,
|
current_captures: Vec<CapturedItemWithoutTy>,
|
||||||
|
/// A stack that has an entry for each projection in the current capture.
|
||||||
|
///
|
||||||
|
/// For example, in `a.b.c`, we capture the spans of `a`, `a.b`, and `a.b.c`.
|
||||||
|
/// We do that because sometimes we truncate projections (when a closure captures
|
||||||
|
/// both `a.b` and `a.b.c`), and we want to provide accurate spans in this case.
|
||||||
|
current_capture_span_stack: Vec<MirSpan>,
|
||||||
current_closure: Option<ClosureId>,
|
current_closure: Option<ClosureId>,
|
||||||
/// Stores the list of closure ids that need to be analyzed before this closure. See the
|
/// Stores the list of closure ids that need to be analyzed before this closure. See the
|
||||||
/// comment on `InferenceContext::sort_closures`
|
/// comment on `InferenceContext::sort_closures`
|
||||||
|
@ -605,6 +611,11 @@ fn find_continuable(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
enum ImplTraitReplacingMode {
|
||||||
|
ReturnPosition(FxHashSet<Ty>),
|
||||||
|
TypeAlias,
|
||||||
|
}
|
||||||
|
|
||||||
impl<'a> InferenceContext<'a> {
|
impl<'a> InferenceContext<'a> {
|
||||||
fn new(
|
fn new(
|
||||||
db: &'a dyn HirDatabase,
|
db: &'a dyn HirDatabase,
|
||||||
|
@ -630,6 +641,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
breakables: Vec::new(),
|
breakables: Vec::new(),
|
||||||
deferred_cast_checks: Vec::new(),
|
deferred_cast_checks: Vec::new(),
|
||||||
current_captures: Vec::new(),
|
current_captures: Vec::new(),
|
||||||
|
current_capture_span_stack: Vec::new(),
|
||||||
current_closure: None,
|
current_closure: None,
|
||||||
deferred_closures: FxHashMap::default(),
|
deferred_closures: FxHashMap::default(),
|
||||||
closure_dependencies: FxHashMap::default(),
|
closure_dependencies: FxHashMap::default(),
|
||||||
|
@ -826,13 +838,19 @@ impl<'a> InferenceContext<'a> {
|
||||||
self.write_binding_ty(self_param, ty);
|
self.write_binding_ty(self_param, ty);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut params_and_ret_tys = Vec::new();
|
let mut tait_candidates = FxHashSet::default();
|
||||||
for (ty, pat) in param_tys.zip(&*self.body.params) {
|
for (ty, pat) in param_tys.zip(&*self.body.params) {
|
||||||
let ty = self.insert_type_vars(ty);
|
let ty = self.insert_type_vars(ty);
|
||||||
let ty = self.normalize_associated_types_in(ty);
|
let ty = self.normalize_associated_types_in(ty);
|
||||||
|
|
||||||
self.infer_top_pat(*pat, &ty);
|
self.infer_top_pat(*pat, &ty);
|
||||||
params_and_ret_tys.push(ty);
|
if ty
|
||||||
|
.data(Interner)
|
||||||
|
.flags
|
||||||
|
.intersects(TypeFlags::HAS_TY_OPAQUE.union(TypeFlags::HAS_TY_INFER))
|
||||||
|
{
|
||||||
|
tait_candidates.insert(ty);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
let return_ty = &*data.ret_type;
|
let return_ty = &*data.ret_type;
|
||||||
|
|
||||||
|
@ -845,7 +863,12 @@ impl<'a> InferenceContext<'a> {
|
||||||
let return_ty = if let Some(rpits) = self.db.return_type_impl_traits(func) {
|
let return_ty = if let Some(rpits) = self.db.return_type_impl_traits(func) {
|
||||||
// RPIT opaque types use substitution of their parent function.
|
// RPIT opaque types use substitution of their parent function.
|
||||||
let fn_placeholders = TyBuilder::placeholder_subst(self.db, func);
|
let fn_placeholders = TyBuilder::placeholder_subst(self.db, func);
|
||||||
let result = self.insert_inference_vars_for_impl_trait(return_ty, fn_placeholders);
|
let mut mode = ImplTraitReplacingMode::ReturnPosition(FxHashSet::default());
|
||||||
|
let result =
|
||||||
|
self.insert_inference_vars_for_impl_trait(return_ty, fn_placeholders, &mut mode);
|
||||||
|
if let ImplTraitReplacingMode::ReturnPosition(taits) = mode {
|
||||||
|
tait_candidates.extend(taits);
|
||||||
|
}
|
||||||
let rpits = rpits.skip_binders();
|
let rpits = rpits.skip_binders();
|
||||||
for (id, _) in rpits.impl_traits.iter() {
|
for (id, _) in rpits.impl_traits.iter() {
|
||||||
if let Entry::Vacant(e) = self.result.type_of_rpit.entry(id) {
|
if let Entry::Vacant(e) = self.result.type_of_rpit.entry(id) {
|
||||||
|
@ -864,11 +887,23 @@ impl<'a> InferenceContext<'a> {
|
||||||
// Functions might be defining usage sites of TAITs.
|
// Functions might be defining usage sites of TAITs.
|
||||||
// To define an TAITs, that TAIT must appear in the function's signatures.
|
// To define an TAITs, that TAIT must appear in the function's signatures.
|
||||||
// So, it suffices to check for params and return types.
|
// So, it suffices to check for params and return types.
|
||||||
params_and_ret_tys.push(self.return_ty.clone());
|
if self
|
||||||
self.make_tait_coercion_table(params_and_ret_tys.iter());
|
.return_ty
|
||||||
|
.data(Interner)
|
||||||
|
.flags
|
||||||
|
.intersects(TypeFlags::HAS_TY_OPAQUE.union(TypeFlags::HAS_TY_INFER))
|
||||||
|
{
|
||||||
|
tait_candidates.insert(self.return_ty.clone());
|
||||||
|
}
|
||||||
|
self.make_tait_coercion_table(tait_candidates.iter());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_inference_vars_for_impl_trait<T>(&mut self, t: T, placeholders: Substitution) -> T
|
fn insert_inference_vars_for_impl_trait<T>(
|
||||||
|
&mut self,
|
||||||
|
t: T,
|
||||||
|
placeholders: Substitution,
|
||||||
|
mode: &mut ImplTraitReplacingMode,
|
||||||
|
) -> T
|
||||||
where
|
where
|
||||||
T: crate::HasInterner<Interner = Interner> + crate::TypeFoldable<Interner>,
|
T: crate::HasInterner<Interner = Interner> + crate::TypeFoldable<Interner>,
|
||||||
{
|
{
|
||||||
|
@ -881,10 +916,31 @@ impl<'a> InferenceContext<'a> {
|
||||||
};
|
};
|
||||||
let (impl_traits, idx) =
|
let (impl_traits, idx) =
|
||||||
match self.db.lookup_intern_impl_trait_id(opaque_ty_id.into()) {
|
match self.db.lookup_intern_impl_trait_id(opaque_ty_id.into()) {
|
||||||
|
// We don't replace opaque types from other kind with inference vars
|
||||||
|
// because `insert_inference_vars_for_impl_traits` for each kinds
|
||||||
|
// and unreplaced opaque types of other kind are resolved while
|
||||||
|
// inferencing because of `tait_coercion_table`.
|
||||||
|
// Moreover, calling `insert_inference_vars_for_impl_traits` with same
|
||||||
|
// `placeholders` for other kind may cause trouble because
|
||||||
|
// the substs for the bounds of each impl traits do not match
|
||||||
ImplTraitId::ReturnTypeImplTrait(def, idx) => {
|
ImplTraitId::ReturnTypeImplTrait(def, idx) => {
|
||||||
|
if matches!(mode, ImplTraitReplacingMode::TypeAlias) {
|
||||||
|
// RPITs don't have `tait_coercion_table`, so use inserted inference
|
||||||
|
// vars for them.
|
||||||
|
if let Some(ty) = self.result.type_of_rpit.get(idx) {
|
||||||
|
return ty.clone();
|
||||||
|
}
|
||||||
|
return ty;
|
||||||
|
}
|
||||||
(self.db.return_type_impl_traits(def), idx)
|
(self.db.return_type_impl_traits(def), idx)
|
||||||
}
|
}
|
||||||
ImplTraitId::TypeAliasImplTrait(def, idx) => {
|
ImplTraitId::TypeAliasImplTrait(def, idx) => {
|
||||||
|
if let ImplTraitReplacingMode::ReturnPosition(taits) = mode {
|
||||||
|
// Gather TAITs while replacing RPITs because TAITs inside RPITs
|
||||||
|
// may not visited while replacing TAITs
|
||||||
|
taits.insert(ty.clone());
|
||||||
|
return ty;
|
||||||
|
}
|
||||||
(self.db.type_alias_impl_traits(def), idx)
|
(self.db.type_alias_impl_traits(def), idx)
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
|
@ -893,16 +949,20 @@ impl<'a> InferenceContext<'a> {
|
||||||
return ty;
|
return ty;
|
||||||
};
|
};
|
||||||
let bounds = (*impl_traits)
|
let bounds = (*impl_traits)
|
||||||
.map_ref(|rpits| rpits.impl_traits[idx].bounds.map_ref(|it| it.iter()));
|
.map_ref(|its| its.impl_traits[idx].bounds.map_ref(|it| it.iter()));
|
||||||
let var = self.table.new_type_var();
|
let var = self.table.new_type_var();
|
||||||
let var_subst = Substitution::from1(Interner, var.clone());
|
let var_subst = Substitution::from1(Interner, var.clone());
|
||||||
for bound in bounds {
|
for bound in bounds {
|
||||||
let predicate = bound.map(|it| it.cloned()).substitute(Interner, &placeholders);
|
let predicate = bound.map(|it| it.cloned());
|
||||||
|
let predicate = predicate.substitute(Interner, &placeholders);
|
||||||
let (var_predicate, binders) =
|
let (var_predicate, binders) =
|
||||||
predicate.substitute(Interner, &var_subst).into_value_and_skipped_binders();
|
predicate.substitute(Interner, &var_subst).into_value_and_skipped_binders();
|
||||||
always!(binders.is_empty(Interner)); // quantified where clauses not yet handled
|
always!(binders.is_empty(Interner)); // quantified where clauses not yet handled
|
||||||
let var_predicate = self
|
let var_predicate = self.insert_inference_vars_for_impl_trait(
|
||||||
.insert_inference_vars_for_impl_trait(var_predicate, placeholders.clone());
|
var_predicate,
|
||||||
|
placeholders.clone(),
|
||||||
|
mode,
|
||||||
|
);
|
||||||
self.push_obligation(var_predicate.cast(Interner));
|
self.push_obligation(var_predicate.cast(Interner));
|
||||||
}
|
}
|
||||||
self.result.type_of_rpit.insert(idx, var.clone());
|
self.result.type_of_rpit.insert(idx, var.clone());
|
||||||
|
@ -1039,7 +1099,11 @@ impl<'a> InferenceContext<'a> {
|
||||||
self.db.lookup_intern_impl_trait_id(id.into())
|
self.db.lookup_intern_impl_trait_id(id.into())
|
||||||
{
|
{
|
||||||
let subst = TyBuilder::placeholder_subst(self.db, alias_id);
|
let subst = TyBuilder::placeholder_subst(self.db, alias_id);
|
||||||
let ty = self.insert_inference_vars_for_impl_trait(ty, subst);
|
let ty = self.insert_inference_vars_for_impl_trait(
|
||||||
|
ty,
|
||||||
|
subst,
|
||||||
|
&mut ImplTraitReplacingMode::TypeAlias,
|
||||||
|
);
|
||||||
Some((id, ty))
|
Some((id, ty))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -1436,7 +1500,8 @@ impl<'a> InferenceContext<'a> {
|
||||||
let remaining = unresolved.map(|it| path.segments()[it..].len()).filter(|it| it > &0);
|
let remaining = unresolved.map(|it| path.segments()[it..].len()).filter(|it| it > &0);
|
||||||
let ty = match ty.kind(Interner) {
|
let ty = match ty.kind(Interner) {
|
||||||
TyKind::Alias(AliasTy::Projection(proj_ty)) => {
|
TyKind::Alias(AliasTy::Projection(proj_ty)) => {
|
||||||
self.db.normalize_projection(proj_ty.clone(), self.table.trait_env.clone())
|
let ty = self.table.normalize_projection_ty(proj_ty.clone());
|
||||||
|
self.table.resolve_ty_shallow(&ty)
|
||||||
}
|
}
|
||||||
_ => ty,
|
_ => ty,
|
||||||
};
|
};
|
||||||
|
|
|
@ -18,8 +18,9 @@ use hir_def::{
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
use intern::sym;
|
use intern::sym;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use smallvec::SmallVec;
|
use smallvec::{smallvec, SmallVec};
|
||||||
use stdx::never;
|
use stdx::{format_to, never};
|
||||||
|
use syntax::utils::is_raw_identifier;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::{HirDatabase, InternedClosure},
|
db::{HirDatabase, InternedClosure},
|
||||||
|
@ -236,7 +237,13 @@ pub enum CaptureKind {
|
||||||
pub struct CapturedItem {
|
pub struct CapturedItem {
|
||||||
pub(crate) place: HirPlace,
|
pub(crate) place: HirPlace,
|
||||||
pub(crate) kind: CaptureKind,
|
pub(crate) kind: CaptureKind,
|
||||||
pub(crate) span: MirSpan,
|
/// The inner vec is the stacks; the outer vec is for each capture reference.
|
||||||
|
///
|
||||||
|
/// Even though we always report only the last span (i.e. the most inclusive span),
|
||||||
|
/// we need to keep them all, since when a closure occurs inside a closure, we
|
||||||
|
/// copy all captures of the inner closure to the outer closure, and then we may
|
||||||
|
/// truncate them, and we want the correct span to be reported.
|
||||||
|
span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>,
|
||||||
pub(crate) ty: Binders<Ty>,
|
pub(crate) ty: Binders<Ty>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -245,6 +252,11 @@ impl CapturedItem {
|
||||||
self.place.local
|
self.place.local
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns whether this place has any field (aka. non-deref) projections.
|
||||||
|
pub fn has_field_projections(&self) -> bool {
|
||||||
|
self.place.projections.iter().any(|it| !matches!(it, ProjectionElem::Deref))
|
||||||
|
}
|
||||||
|
|
||||||
pub fn ty(&self, subst: &Substitution) -> Ty {
|
pub fn ty(&self, subst: &Substitution) -> Ty {
|
||||||
self.ty.clone().substitute(Interner, utils::ClosureSubst(subst).parent_subst())
|
self.ty.clone().substitute(Interner, utils::ClosureSubst(subst).parent_subst())
|
||||||
}
|
}
|
||||||
|
@ -253,9 +265,106 @@ impl CapturedItem {
|
||||||
self.kind
|
self.kind
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn spans(&self) -> SmallVec<[MirSpan; 3]> {
|
||||||
|
self.span_stacks.iter().map(|stack| *stack.last().expect("empty span stack")).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts the place to a name that can be inserted into source code.
|
||||||
|
pub fn place_to_name(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String {
|
||||||
|
let body = db.body(owner);
|
||||||
|
let mut result = body[self.place.local].name.unescaped().display(db.upcast()).to_string();
|
||||||
|
for proj in &self.place.projections {
|
||||||
|
match proj {
|
||||||
|
ProjectionElem::Deref => {}
|
||||||
|
ProjectionElem::Field(Either::Left(f)) => {
|
||||||
|
match &*f.parent.variant_data(db.upcast()) {
|
||||||
|
VariantData::Record(fields) => {
|
||||||
|
result.push('_');
|
||||||
|
result.push_str(fields[f.local_id].name.as_str())
|
||||||
|
}
|
||||||
|
VariantData::Tuple(fields) => {
|
||||||
|
let index = fields.iter().position(|it| it.0 == f.local_id);
|
||||||
|
if let Some(index) = index {
|
||||||
|
format_to!(result, "_{index}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
VariantData::Unit => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ProjectionElem::Field(Either::Right(f)) => format_to!(result, "_{}", f.index),
|
||||||
|
&ProjectionElem::ClosureField(field) => format_to!(result, "_{field}"),
|
||||||
|
ProjectionElem::Index(_)
|
||||||
|
| ProjectionElem::ConstantIndex { .. }
|
||||||
|
| ProjectionElem::Subslice { .. }
|
||||||
|
| ProjectionElem::OpaqueCast(_) => {
|
||||||
|
never!("Not happen in closure capture");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if is_raw_identifier(&result, db.crate_graph()[owner.module(db.upcast()).krate()].edition) {
|
||||||
|
result.insert_str(0, "r#");
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn display_place_source_code(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String {
|
||||||
|
let body = db.body(owner);
|
||||||
|
let krate = owner.krate(db.upcast());
|
||||||
|
let edition = db.crate_graph()[krate].edition;
|
||||||
|
let mut result = body[self.place.local].name.display(db.upcast(), edition).to_string();
|
||||||
|
for proj in &self.place.projections {
|
||||||
|
match proj {
|
||||||
|
// In source code autoderef kicks in.
|
||||||
|
ProjectionElem::Deref => {}
|
||||||
|
ProjectionElem::Field(Either::Left(f)) => {
|
||||||
|
let variant_data = f.parent.variant_data(db.upcast());
|
||||||
|
match &*variant_data {
|
||||||
|
VariantData::Record(fields) => format_to!(
|
||||||
|
result,
|
||||||
|
".{}",
|
||||||
|
fields[f.local_id].name.display(db.upcast(), edition)
|
||||||
|
),
|
||||||
|
VariantData::Tuple(fields) => format_to!(
|
||||||
|
result,
|
||||||
|
".{}",
|
||||||
|
fields.iter().position(|it| it.0 == f.local_id).unwrap_or_default()
|
||||||
|
),
|
||||||
|
VariantData::Unit => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ProjectionElem::Field(Either::Right(f)) => {
|
||||||
|
let field = f.index;
|
||||||
|
format_to!(result, ".{field}");
|
||||||
|
}
|
||||||
|
&ProjectionElem::ClosureField(field) => {
|
||||||
|
format_to!(result, ".{field}");
|
||||||
|
}
|
||||||
|
ProjectionElem::Index(_)
|
||||||
|
| ProjectionElem::ConstantIndex { .. }
|
||||||
|
| ProjectionElem::Subslice { .. }
|
||||||
|
| ProjectionElem::OpaqueCast(_) => {
|
||||||
|
never!("Not happen in closure capture");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let final_derefs_count = self
|
||||||
|
.place
|
||||||
|
.projections
|
||||||
|
.iter()
|
||||||
|
.rev()
|
||||||
|
.take_while(|proj| matches!(proj, ProjectionElem::Deref))
|
||||||
|
.count();
|
||||||
|
result.insert_str(0, &"*".repeat(final_derefs_count));
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
pub fn display_place(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String {
|
pub fn display_place(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String {
|
||||||
let body = db.body(owner);
|
let body = db.body(owner);
|
||||||
let mut result = body[self.place.local].name.display(db.upcast()).to_string();
|
let krate = owner.krate(db.upcast());
|
||||||
|
let edition = db.crate_graph()[krate].edition;
|
||||||
|
let mut result = body[self.place.local].name.display(db.upcast(), edition).to_string();
|
||||||
let mut field_need_paren = false;
|
let mut field_need_paren = false;
|
||||||
for proj in &self.place.projections {
|
for proj in &self.place.projections {
|
||||||
match proj {
|
match proj {
|
||||||
|
@ -312,7 +421,8 @@ impl CapturedItem {
|
||||||
pub(crate) struct CapturedItemWithoutTy {
|
pub(crate) struct CapturedItemWithoutTy {
|
||||||
pub(crate) place: HirPlace,
|
pub(crate) place: HirPlace,
|
||||||
pub(crate) kind: CaptureKind,
|
pub(crate) kind: CaptureKind,
|
||||||
pub(crate) span: MirSpan,
|
/// The inner vec is the stacks; the outer vec is for each capture reference.
|
||||||
|
pub(crate) span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CapturedItemWithoutTy {
|
impl CapturedItemWithoutTy {
|
||||||
|
@ -331,7 +441,7 @@ impl CapturedItemWithoutTy {
|
||||||
return CapturedItem {
|
return CapturedItem {
|
||||||
place: self.place,
|
place: self.place,
|
||||||
kind: self.kind,
|
kind: self.kind,
|
||||||
span: self.span,
|
span_stacks: self.span_stacks,
|
||||||
ty: replace_placeholder_with_binder(ctx, ty),
|
ty: replace_placeholder_with_binder(ctx, ty),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -391,22 +501,26 @@ impl InferenceContext<'_> {
|
||||||
let r = self.place_of_expr_without_adjust(tgt_expr)?;
|
let r = self.place_of_expr_without_adjust(tgt_expr)?;
|
||||||
let default = vec![];
|
let default = vec![];
|
||||||
let adjustments = self.result.expr_adjustments.get(&tgt_expr).unwrap_or(&default);
|
let adjustments = self.result.expr_adjustments.get(&tgt_expr).unwrap_or(&default);
|
||||||
apply_adjusts_to_place(r, adjustments)
|
apply_adjusts_to_place(&mut self.current_capture_span_stack, r, adjustments)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Changes `current_capture_span_stack` to contain the stack of spans for this expr.
|
||||||
fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
|
fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
|
||||||
|
self.current_capture_span_stack.clear();
|
||||||
match &self.body[tgt_expr] {
|
match &self.body[tgt_expr] {
|
||||||
Expr::Path(p) => {
|
Expr::Path(p) => {
|
||||||
let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr);
|
let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr);
|
||||||
if let Some(ResolveValueResult::ValueNs(ValueNs::LocalBinding(b), _)) =
|
if let Some(ResolveValueResult::ValueNs(ValueNs::LocalBinding(b), _)) =
|
||||||
resolver.resolve_path_in_value_ns(self.db.upcast(), p)
|
resolver.resolve_path_in_value_ns(self.db.upcast(), p)
|
||||||
{
|
{
|
||||||
|
self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
|
||||||
return Some(HirPlace { local: b, projections: vec![] });
|
return Some(HirPlace { local: b, projections: vec![] });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Expr::Field { expr, name: _ } => {
|
Expr::Field { expr, name: _ } => {
|
||||||
let mut place = self.place_of_expr(*expr)?;
|
let mut place = self.place_of_expr(*expr)?;
|
||||||
let field = self.result.field_resolution(tgt_expr)?;
|
let field = self.result.field_resolution(tgt_expr)?;
|
||||||
|
self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
|
||||||
place.projections.push(ProjectionElem::Field(field));
|
place.projections.push(ProjectionElem::Field(field));
|
||||||
return Some(place);
|
return Some(place);
|
||||||
}
|
}
|
||||||
|
@ -416,6 +530,7 @@ impl InferenceContext<'_> {
|
||||||
TyKind::Ref(..) | TyKind::Raw(..)
|
TyKind::Ref(..) | TyKind::Raw(..)
|
||||||
) {
|
) {
|
||||||
let mut place = self.place_of_expr(*expr)?;
|
let mut place = self.place_of_expr(*expr)?;
|
||||||
|
self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
|
||||||
place.projections.push(ProjectionElem::Deref);
|
place.projections.push(ProjectionElem::Deref);
|
||||||
return Some(place);
|
return Some(place);
|
||||||
}
|
}
|
||||||
|
@ -425,29 +540,57 @@ impl InferenceContext<'_> {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
fn push_capture(&mut self, capture: CapturedItemWithoutTy) {
|
fn push_capture(&mut self, place: HirPlace, kind: CaptureKind) {
|
||||||
self.current_captures.push(capture);
|
self.current_captures.push(CapturedItemWithoutTy {
|
||||||
|
place,
|
||||||
|
kind,
|
||||||
|
span_stacks: smallvec![self.current_capture_span_stack.iter().copied().collect()],
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ref_expr(&mut self, expr: ExprId) {
|
fn truncate_capture_spans(&self, capture: &mut CapturedItemWithoutTy, mut truncate_to: usize) {
|
||||||
if let Some(place) = self.place_of_expr(expr) {
|
// The first span is the identifier, and it must always remain.
|
||||||
self.add_capture(place, CaptureKind::ByRef(BorrowKind::Shared), expr.into());
|
truncate_to += 1;
|
||||||
|
for span_stack in &mut capture.span_stacks {
|
||||||
|
let mut remained = truncate_to;
|
||||||
|
let mut actual_truncate_to = 0;
|
||||||
|
for &span in &*span_stack {
|
||||||
|
actual_truncate_to += 1;
|
||||||
|
if !span.is_ref_span(self.body) {
|
||||||
|
remained -= 1;
|
||||||
|
if remained == 0 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if actual_truncate_to < span_stack.len()
|
||||||
|
&& span_stack[actual_truncate_to].is_ref_span(self.body)
|
||||||
|
{
|
||||||
|
// Include the ref operator if there is one, we will fix it later (in `strip_captures_ref_span()`) if it's incorrect.
|
||||||
|
actual_truncate_to += 1;
|
||||||
|
}
|
||||||
|
span_stack.truncate(actual_truncate_to);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ref_expr(&mut self, expr: ExprId, place: Option<HirPlace>) {
|
||||||
|
if let Some(place) = place {
|
||||||
|
self.add_capture(place, CaptureKind::ByRef(BorrowKind::Shared));
|
||||||
}
|
}
|
||||||
self.walk_expr(expr);
|
self.walk_expr(expr);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_capture(&mut self, place: HirPlace, kind: CaptureKind, span: MirSpan) {
|
fn add_capture(&mut self, place: HirPlace, kind: CaptureKind) {
|
||||||
if self.is_upvar(&place) {
|
if self.is_upvar(&place) {
|
||||||
self.push_capture(CapturedItemWithoutTy { place, kind, span });
|
self.push_capture(place, kind);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mutate_expr(&mut self, expr: ExprId) {
|
fn mutate_expr(&mut self, expr: ExprId, place: Option<HirPlace>) {
|
||||||
if let Some(place) = self.place_of_expr(expr) {
|
if let Some(place) = place {
|
||||||
self.add_capture(
|
self.add_capture(
|
||||||
place,
|
place,
|
||||||
CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }),
|
CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }),
|
||||||
expr.into(),
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
self.walk_expr(expr);
|
self.walk_expr(expr);
|
||||||
|
@ -455,12 +598,12 @@ impl InferenceContext<'_> {
|
||||||
|
|
||||||
fn consume_expr(&mut self, expr: ExprId) {
|
fn consume_expr(&mut self, expr: ExprId) {
|
||||||
if let Some(place) = self.place_of_expr(expr) {
|
if let Some(place) = self.place_of_expr(expr) {
|
||||||
self.consume_place(place, expr.into());
|
self.consume_place(place);
|
||||||
}
|
}
|
||||||
self.walk_expr(expr);
|
self.walk_expr(expr);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn consume_place(&mut self, place: HirPlace, span: MirSpan) {
|
fn consume_place(&mut self, place: HirPlace) {
|
||||||
if self.is_upvar(&place) {
|
if self.is_upvar(&place) {
|
||||||
let ty = place.ty(self);
|
let ty = place.ty(self);
|
||||||
let kind = if self.is_ty_copy(ty) {
|
let kind = if self.is_ty_copy(ty) {
|
||||||
|
@ -468,13 +611,13 @@ impl InferenceContext<'_> {
|
||||||
} else {
|
} else {
|
||||||
CaptureKind::ByValue
|
CaptureKind::ByValue
|
||||||
};
|
};
|
||||||
self.push_capture(CapturedItemWithoutTy { place, kind, span });
|
self.push_capture(place, kind);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn walk_expr_with_adjust(&mut self, tgt_expr: ExprId, adjustment: &[Adjustment]) {
|
fn walk_expr_with_adjust(&mut self, tgt_expr: ExprId, adjustment: &[Adjustment]) {
|
||||||
if let Some((last, rest)) = adjustment.split_last() {
|
if let Some((last, rest)) = adjustment.split_last() {
|
||||||
match last.kind {
|
match &last.kind {
|
||||||
Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => {
|
Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => {
|
||||||
self.walk_expr_with_adjust(tgt_expr, rest)
|
self.walk_expr_with_adjust(tgt_expr, rest)
|
||||||
}
|
}
|
||||||
|
@ -499,8 +642,10 @@ impl InferenceContext<'_> {
|
||||||
Mutability::Not => CaptureKind::ByRef(BorrowKind::Shared),
|
Mutability::Not => CaptureKind::ByRef(BorrowKind::Shared),
|
||||||
};
|
};
|
||||||
if let Some(place) = self.place_of_expr_without_adjust(tgt_expr) {
|
if let Some(place) = self.place_of_expr_without_adjust(tgt_expr) {
|
||||||
if let Some(place) = apply_adjusts_to_place(place, rest) {
|
if let Some(place) =
|
||||||
self.add_capture(place, capture_kind, tgt_expr.into());
|
apply_adjusts_to_place(&mut self.current_capture_span_stack, place, rest)
|
||||||
|
{
|
||||||
|
self.add_capture(place, capture_kind);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.walk_expr_with_adjust(tgt_expr, rest);
|
self.walk_expr_with_adjust(tgt_expr, rest);
|
||||||
|
@ -582,11 +727,7 @@ impl InferenceContext<'_> {
|
||||||
self.walk_pat(&mut capture_mode, arm.pat);
|
self.walk_pat(&mut capture_mode, arm.pat);
|
||||||
}
|
}
|
||||||
if let Some(c) = capture_mode {
|
if let Some(c) = capture_mode {
|
||||||
self.push_capture(CapturedItemWithoutTy {
|
self.push_capture(discr_place, c);
|
||||||
place: discr_place,
|
|
||||||
kind: c,
|
|
||||||
span: (*expr).into(),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -630,10 +771,11 @@ impl InferenceContext<'_> {
|
||||||
}
|
}
|
||||||
false
|
false
|
||||||
};
|
};
|
||||||
|
let place = self.place_of_expr(*expr);
|
||||||
if mutability {
|
if mutability {
|
||||||
self.mutate_expr(*expr);
|
self.mutate_expr(*expr, place);
|
||||||
} else {
|
} else {
|
||||||
self.ref_expr(*expr);
|
self.ref_expr(*expr, place);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
self.select_from_expr(*expr);
|
self.select_from_expr(*expr);
|
||||||
|
@ -648,16 +790,22 @@ impl InferenceContext<'_> {
|
||||||
| Expr::Cast { expr, type_ref: _ } => {
|
| Expr::Cast { expr, type_ref: _ } => {
|
||||||
self.consume_expr(*expr);
|
self.consume_expr(*expr);
|
||||||
}
|
}
|
||||||
Expr::Ref { expr, rawness: _, mutability } => match mutability {
|
Expr::Ref { expr, rawness: _, mutability } => {
|
||||||
hir_def::type_ref::Mutability::Shared => self.ref_expr(*expr),
|
// We need to do this before we push the span so the order will be correct.
|
||||||
hir_def::type_ref::Mutability::Mut => self.mutate_expr(*expr),
|
let place = self.place_of_expr(*expr);
|
||||||
},
|
self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
|
||||||
|
match mutability {
|
||||||
|
hir_def::type_ref::Mutability::Shared => self.ref_expr(*expr, place),
|
||||||
|
hir_def::type_ref::Mutability::Mut => self.mutate_expr(*expr, place),
|
||||||
|
}
|
||||||
|
}
|
||||||
Expr::BinaryOp { lhs, rhs, op } => {
|
Expr::BinaryOp { lhs, rhs, op } => {
|
||||||
let Some(op) = op else {
|
let Some(op) = op else {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
if matches!(op, BinaryOp::Assignment { .. }) {
|
if matches!(op, BinaryOp::Assignment { .. }) {
|
||||||
self.mutate_expr(*lhs);
|
let place = self.place_of_expr(*lhs);
|
||||||
|
self.mutate_expr(*lhs, place);
|
||||||
self.consume_expr(*rhs);
|
self.consume_expr(*rhs);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -688,7 +836,11 @@ impl InferenceContext<'_> {
|
||||||
);
|
);
|
||||||
let mut cc = mem::take(&mut self.current_captures);
|
let mut cc = mem::take(&mut self.current_captures);
|
||||||
cc.extend(captures.iter().filter(|it| self.is_upvar(&it.place)).map(|it| {
|
cc.extend(captures.iter().filter(|it| self.is_upvar(&it.place)).map(|it| {
|
||||||
CapturedItemWithoutTy { place: it.place.clone(), kind: it.kind, span: it.span }
|
CapturedItemWithoutTy {
|
||||||
|
place: it.place.clone(),
|
||||||
|
kind: it.kind,
|
||||||
|
span_stacks: it.span_stacks.clone(),
|
||||||
|
}
|
||||||
}));
|
}));
|
||||||
self.current_captures = cc;
|
self.current_captures = cc;
|
||||||
}
|
}
|
||||||
|
@ -810,10 +962,13 @@ impl InferenceContext<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn restrict_precision_for_unsafe(&mut self) {
|
fn restrict_precision_for_unsafe(&mut self) {
|
||||||
for capture in &mut self.current_captures {
|
// FIXME: Borrow checker problems without this.
|
||||||
|
let mut current_captures = std::mem::take(&mut self.current_captures);
|
||||||
|
for capture in &mut current_captures {
|
||||||
let mut ty = self.table.resolve_completely(self.result[capture.place.local].clone());
|
let mut ty = self.table.resolve_completely(self.result[capture.place.local].clone());
|
||||||
if ty.as_raw_ptr().is_some() || ty.is_union() {
|
if ty.as_raw_ptr().is_some() || ty.is_union() {
|
||||||
capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
|
capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
|
||||||
|
self.truncate_capture_spans(capture, 0);
|
||||||
capture.place.projections.truncate(0);
|
capture.place.projections.truncate(0);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -828,29 +983,35 @@ impl InferenceContext<'_> {
|
||||||
);
|
);
|
||||||
if ty.as_raw_ptr().is_some() || ty.is_union() {
|
if ty.as_raw_ptr().is_some() || ty.is_union() {
|
||||||
capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
|
capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
|
||||||
|
self.truncate_capture_spans(capture, i + 1);
|
||||||
capture.place.projections.truncate(i + 1);
|
capture.place.projections.truncate(i + 1);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
self.current_captures = current_captures;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn adjust_for_move_closure(&mut self) {
|
fn adjust_for_move_closure(&mut self) {
|
||||||
for capture in &mut self.current_captures {
|
// FIXME: Borrow checker won't allow without this.
|
||||||
|
let mut current_captures = std::mem::take(&mut self.current_captures);
|
||||||
|
for capture in &mut current_captures {
|
||||||
if let Some(first_deref) =
|
if let Some(first_deref) =
|
||||||
capture.place.projections.iter().position(|proj| *proj == ProjectionElem::Deref)
|
capture.place.projections.iter().position(|proj| *proj == ProjectionElem::Deref)
|
||||||
{
|
{
|
||||||
|
self.truncate_capture_spans(capture, first_deref);
|
||||||
capture.place.projections.truncate(first_deref);
|
capture.place.projections.truncate(first_deref);
|
||||||
}
|
}
|
||||||
capture.kind = CaptureKind::ByValue;
|
capture.kind = CaptureKind::ByValue;
|
||||||
}
|
}
|
||||||
|
self.current_captures = current_captures;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn minimize_captures(&mut self) {
|
fn minimize_captures(&mut self) {
|
||||||
self.current_captures.sort_by_key(|it| it.place.projections.len());
|
self.current_captures.sort_unstable_by_key(|it| it.place.projections.len());
|
||||||
let mut hash_map = FxHashMap::<HirPlace, usize>::default();
|
let mut hash_map = FxHashMap::<HirPlace, usize>::default();
|
||||||
let result = mem::take(&mut self.current_captures);
|
let result = mem::take(&mut self.current_captures);
|
||||||
for item in result {
|
for mut item in result {
|
||||||
let mut lookup_place = HirPlace { local: item.place.local, projections: vec![] };
|
let mut lookup_place = HirPlace { local: item.place.local, projections: vec![] };
|
||||||
let mut it = item.place.projections.iter();
|
let mut it = item.place.projections.iter();
|
||||||
let prev_index = loop {
|
let prev_index = loop {
|
||||||
|
@ -858,12 +1019,17 @@ impl InferenceContext<'_> {
|
||||||
break Some(*k);
|
break Some(*k);
|
||||||
}
|
}
|
||||||
match it.next() {
|
match it.next() {
|
||||||
Some(it) => lookup_place.projections.push(it.clone()),
|
Some(it) => {
|
||||||
|
lookup_place.projections.push(it.clone());
|
||||||
|
}
|
||||||
None => break None,
|
None => break None,
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
match prev_index {
|
match prev_index {
|
||||||
Some(p) => {
|
Some(p) => {
|
||||||
|
let prev_projections_len = self.current_captures[p].place.projections.len();
|
||||||
|
self.truncate_capture_spans(&mut item, prev_projections_len);
|
||||||
|
self.current_captures[p].span_stacks.extend(item.span_stacks);
|
||||||
let len = self.current_captures[p].place.projections.len();
|
let len = self.current_captures[p].place.projections.len();
|
||||||
let kind_after_truncate =
|
let kind_after_truncate =
|
||||||
item.place.capture_kind_of_truncated_place(item.kind, len);
|
item.place.capture_kind_of_truncated_place(item.kind, len);
|
||||||
|
@ -878,113 +1044,128 @@ impl InferenceContext<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn consume_with_pat(&mut self, mut place: HirPlace, pat: PatId) {
|
fn consume_with_pat(&mut self, mut place: HirPlace, tgt_pat: PatId) {
|
||||||
let cnt = self.result.pat_adjustments.get(&pat).map(|it| it.len()).unwrap_or_default();
|
let adjustments_count =
|
||||||
place.projections = place
|
self.result.pat_adjustments.get(&tgt_pat).map(|it| it.len()).unwrap_or_default();
|
||||||
.projections
|
place.projections.extend((0..adjustments_count).map(|_| ProjectionElem::Deref));
|
||||||
.iter()
|
self.current_capture_span_stack
|
||||||
.cloned()
|
.extend((0..adjustments_count).map(|_| MirSpan::PatId(tgt_pat)));
|
||||||
.chain((0..cnt).map(|_| ProjectionElem::Deref))
|
'reset_span_stack: {
|
||||||
.collect::<Vec<_>>();
|
match &self.body[tgt_pat] {
|
||||||
match &self.body[pat] {
|
Pat::Missing | Pat::Wild => (),
|
||||||
Pat::Missing | Pat::Wild => (),
|
Pat::Tuple { args, ellipsis } => {
|
||||||
Pat::Tuple { args, ellipsis } => {
|
let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
|
||||||
let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
|
let field_count = match self.result[tgt_pat].kind(Interner) {
|
||||||
let field_count = match self.result[pat].kind(Interner) {
|
TyKind::Tuple(_, s) => s.len(Interner),
|
||||||
TyKind::Tuple(_, s) => s.len(Interner),
|
_ => break 'reset_span_stack,
|
||||||
_ => return,
|
};
|
||||||
};
|
let fields = 0..field_count;
|
||||||
let fields = 0..field_count;
|
let it = al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev()));
|
||||||
let it = al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev()));
|
for (&arg, i) in it {
|
||||||
for (arg, i) in it {
|
let mut p = place.clone();
|
||||||
let mut p = place.clone();
|
self.current_capture_span_stack.push(MirSpan::PatId(arg));
|
||||||
p.projections.push(ProjectionElem::Field(Either::Right(TupleFieldId {
|
p.projections.push(ProjectionElem::Field(Either::Right(TupleFieldId {
|
||||||
tuple: TupleId(!0), // dummy this, as its unused anyways
|
tuple: TupleId(!0), // dummy this, as its unused anyways
|
||||||
index: i as u32,
|
index: i as u32,
|
||||||
})));
|
})));
|
||||||
self.consume_with_pat(p, *arg);
|
self.consume_with_pat(p, arg);
|
||||||
}
|
self.current_capture_span_stack.pop();
|
||||||
}
|
|
||||||
Pat::Or(pats) => {
|
|
||||||
for pat in pats.iter() {
|
|
||||||
self.consume_with_pat(place.clone(), *pat);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Pat::Record { args, .. } => {
|
|
||||||
let Some(variant) = self.result.variant_resolution_for_pat(pat) else {
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
match variant {
|
|
||||||
VariantId::EnumVariantId(_) | VariantId::UnionId(_) => {
|
|
||||||
self.consume_place(place, pat.into())
|
|
||||||
}
|
}
|
||||||
VariantId::StructId(s) => {
|
}
|
||||||
let vd = &*self.db.struct_data(s).variant_data;
|
Pat::Or(pats) => {
|
||||||
for field_pat in args.iter() {
|
for pat in pats.iter() {
|
||||||
let arg = field_pat.pat;
|
self.consume_with_pat(place.clone(), *pat);
|
||||||
let Some(local_id) = vd.field(&field_pat.name) else {
|
}
|
||||||
continue;
|
}
|
||||||
};
|
Pat::Record { args, .. } => {
|
||||||
let mut p = place.clone();
|
let Some(variant) = self.result.variant_resolution_for_pat(tgt_pat) else {
|
||||||
p.projections.push(ProjectionElem::Field(Either::Left(FieldId {
|
break 'reset_span_stack;
|
||||||
parent: variant,
|
};
|
||||||
local_id,
|
match variant {
|
||||||
})));
|
VariantId::EnumVariantId(_) | VariantId::UnionId(_) => {
|
||||||
self.consume_with_pat(p, arg);
|
self.consume_place(place)
|
||||||
|
}
|
||||||
|
VariantId::StructId(s) => {
|
||||||
|
let vd = &*self.db.struct_data(s).variant_data;
|
||||||
|
for field_pat in args.iter() {
|
||||||
|
let arg = field_pat.pat;
|
||||||
|
let Some(local_id) = vd.field(&field_pat.name) else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
let mut p = place.clone();
|
||||||
|
self.current_capture_span_stack.push(MirSpan::PatId(arg));
|
||||||
|
p.projections.push(ProjectionElem::Field(Either::Left(FieldId {
|
||||||
|
parent: variant,
|
||||||
|
local_id,
|
||||||
|
})));
|
||||||
|
self.consume_with_pat(p, arg);
|
||||||
|
self.current_capture_span_stack.pop();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
Pat::Range { .. }
|
||||||
Pat::Range { .. }
|
| Pat::Slice { .. }
|
||||||
| Pat::Slice { .. }
|
| Pat::ConstBlock(_)
|
||||||
| Pat::ConstBlock(_)
|
| Pat::Path(_)
|
||||||
| Pat::Path(_)
|
| Pat::Lit(_) => self.consume_place(place),
|
||||||
| Pat::Lit(_) => self.consume_place(place, pat.into()),
|
&Pat::Bind { id, subpat: _ } => {
|
||||||
Pat::Bind { id: _, subpat: _ } => {
|
let mode = self.result.binding_modes[tgt_pat];
|
||||||
let mode = self.result.binding_modes[pat];
|
let capture_kind = match mode {
|
||||||
let capture_kind = match mode {
|
BindingMode::Move => {
|
||||||
BindingMode::Move => {
|
self.consume_place(place);
|
||||||
self.consume_place(place, pat.into());
|
break 'reset_span_stack;
|
||||||
return;
|
}
|
||||||
}
|
BindingMode::Ref(Mutability::Not) => BorrowKind::Shared,
|
||||||
BindingMode::Ref(Mutability::Not) => BorrowKind::Shared,
|
BindingMode::Ref(Mutability::Mut) => {
|
||||||
BindingMode::Ref(Mutability::Mut) => {
|
BorrowKind::Mut { kind: MutBorrowKind::Default }
|
||||||
BorrowKind::Mut { kind: MutBorrowKind::Default }
|
}
|
||||||
}
|
};
|
||||||
};
|
self.current_capture_span_stack.push(MirSpan::BindingId(id));
|
||||||
self.add_capture(place, CaptureKind::ByRef(capture_kind), pat.into());
|
self.add_capture(place, CaptureKind::ByRef(capture_kind));
|
||||||
}
|
self.current_capture_span_stack.pop();
|
||||||
Pat::TupleStruct { path: _, args, ellipsis } => {
|
}
|
||||||
let Some(variant) = self.result.variant_resolution_for_pat(pat) else {
|
Pat::TupleStruct { path: _, args, ellipsis } => {
|
||||||
return;
|
let Some(variant) = self.result.variant_resolution_for_pat(tgt_pat) else {
|
||||||
};
|
break 'reset_span_stack;
|
||||||
match variant {
|
};
|
||||||
VariantId::EnumVariantId(_) | VariantId::UnionId(_) => {
|
match variant {
|
||||||
self.consume_place(place, pat.into())
|
VariantId::EnumVariantId(_) | VariantId::UnionId(_) => {
|
||||||
}
|
self.consume_place(place)
|
||||||
VariantId::StructId(s) => {
|
}
|
||||||
let vd = &*self.db.struct_data(s).variant_data;
|
VariantId::StructId(s) => {
|
||||||
let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
|
let vd = &*self.db.struct_data(s).variant_data;
|
||||||
let fields = vd.fields().iter();
|
let (al, ar) =
|
||||||
let it =
|
args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
|
||||||
al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev()));
|
let fields = vd.fields().iter();
|
||||||
for (arg, (i, _)) in it {
|
let it = al
|
||||||
let mut p = place.clone();
|
.iter()
|
||||||
p.projections.push(ProjectionElem::Field(Either::Left(FieldId {
|
.zip(fields.clone())
|
||||||
parent: variant,
|
.chain(ar.iter().rev().zip(fields.rev()));
|
||||||
local_id: i,
|
for (&arg, (i, _)) in it {
|
||||||
})));
|
let mut p = place.clone();
|
||||||
self.consume_with_pat(p, *arg);
|
self.current_capture_span_stack.push(MirSpan::PatId(arg));
|
||||||
|
p.projections.push(ProjectionElem::Field(Either::Left(FieldId {
|
||||||
|
parent: variant,
|
||||||
|
local_id: i,
|
||||||
|
})));
|
||||||
|
self.consume_with_pat(p, arg);
|
||||||
|
self.current_capture_span_stack.pop();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Pat::Ref { pat, mutability: _ } => {
|
||||||
|
self.current_capture_span_stack.push(MirSpan::PatId(tgt_pat));
|
||||||
|
place.projections.push(ProjectionElem::Deref);
|
||||||
|
self.consume_with_pat(place, *pat);
|
||||||
|
self.current_capture_span_stack.pop();
|
||||||
|
}
|
||||||
|
Pat::Box { .. } => (), // not supported
|
||||||
}
|
}
|
||||||
Pat::Ref { pat, mutability: _ } => {
|
|
||||||
place.projections.push(ProjectionElem::Deref);
|
|
||||||
self.consume_with_pat(place, *pat)
|
|
||||||
}
|
|
||||||
Pat::Box { .. } => (), // not supported
|
|
||||||
}
|
}
|
||||||
|
self.current_capture_span_stack
|
||||||
|
.truncate(self.current_capture_span_stack.len() - adjustments_count);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn consume_exprs(&mut self, exprs: impl Iterator<Item = ExprId>) {
|
fn consume_exprs(&mut self, exprs: impl Iterator<Item = ExprId>) {
|
||||||
|
@ -1042,12 +1223,28 @@ impl InferenceContext<'_> {
|
||||||
CaptureBy::Ref => (),
|
CaptureBy::Ref => (),
|
||||||
}
|
}
|
||||||
self.minimize_captures();
|
self.minimize_captures();
|
||||||
|
self.strip_captures_ref_span();
|
||||||
let result = mem::take(&mut self.current_captures);
|
let result = mem::take(&mut self.current_captures);
|
||||||
let captures = result.into_iter().map(|it| it.with_ty(self)).collect::<Vec<_>>();
|
let captures = result.into_iter().map(|it| it.with_ty(self)).collect::<Vec<_>>();
|
||||||
self.result.closure_info.insert(closure, (captures, closure_kind));
|
self.result.closure_info.insert(closure, (captures, closure_kind));
|
||||||
closure_kind
|
closure_kind
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn strip_captures_ref_span(&mut self) {
|
||||||
|
// FIXME: Borrow checker won't allow without this.
|
||||||
|
let mut captures = std::mem::take(&mut self.current_captures);
|
||||||
|
for capture in &mut captures {
|
||||||
|
if matches!(capture.kind, CaptureKind::ByValue) {
|
||||||
|
for span_stack in &mut capture.span_stacks {
|
||||||
|
if span_stack[span_stack.len() - 1].is_ref_span(self.body) {
|
||||||
|
span_stack.truncate(span_stack.len() - 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.current_captures = captures;
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn infer_closures(&mut self) {
|
pub(crate) fn infer_closures(&mut self) {
|
||||||
let deferred_closures = self.sort_closures();
|
let deferred_closures = self.sort_closures();
|
||||||
for (closure, exprs) in deferred_closures.into_iter().rev() {
|
for (closure, exprs) in deferred_closures.into_iter().rev() {
|
||||||
|
@ -1108,10 +1305,17 @@ impl InferenceContext<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn apply_adjusts_to_place(mut r: HirPlace, adjustments: &[Adjustment]) -> Option<HirPlace> {
|
/// Call this only when the last span in the stack isn't a split.
|
||||||
|
fn apply_adjusts_to_place(
|
||||||
|
current_capture_span_stack: &mut Vec<MirSpan>,
|
||||||
|
mut r: HirPlace,
|
||||||
|
adjustments: &[Adjustment],
|
||||||
|
) -> Option<HirPlace> {
|
||||||
|
let span = *current_capture_span_stack.last().expect("empty capture span stack");
|
||||||
for adj in adjustments {
|
for adj in adjustments {
|
||||||
match &adj.kind {
|
match &adj.kind {
|
||||||
Adjust::Deref(None) => {
|
Adjust::Deref(None) => {
|
||||||
|
current_capture_span_stack.push(span);
|
||||||
r.projections.push(ProjectionElem::Deref);
|
r.projections.push(ProjectionElem::Deref);
|
||||||
}
|
}
|
||||||
_ => return None,
|
_ => return None,
|
||||||
|
|
|
@ -18,14 +18,13 @@ use triomphe::Arc;
|
||||||
use crate::{
|
use crate::{
|
||||||
autoderef::{Autoderef, AutoderefKind},
|
autoderef::{Autoderef, AutoderefKind},
|
||||||
db::HirDatabase,
|
db::HirDatabase,
|
||||||
error_lifetime,
|
|
||||||
infer::{
|
infer::{
|
||||||
Adjust, Adjustment, AutoBorrow, InferOk, InferenceContext, OverloadedDeref, PointerCast,
|
Adjust, Adjustment, AutoBorrow, InferOk, InferenceContext, OverloadedDeref, PointerCast,
|
||||||
TypeError, TypeMismatch,
|
TypeError, TypeMismatch,
|
||||||
},
|
},
|
||||||
utils::ClosureSubst,
|
utils::ClosureSubst,
|
||||||
Canonical, DomainGoal, FnAbi, FnPointer, FnSig, Guidance, InEnvironment, Interner, Solution,
|
Canonical, DomainGoal, FnAbi, FnPointer, FnSig, Guidance, InEnvironment, Interner, Lifetime,
|
||||||
Substitution, TraitEnvironment, Ty, TyBuilder, TyExt,
|
Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::unify::InferenceTable;
|
use super::unify::InferenceTable;
|
||||||
|
@ -301,7 +300,7 @@ impl InferenceTable<'_> {
|
||||||
// Examine the supertype and consider auto-borrowing.
|
// Examine the supertype and consider auto-borrowing.
|
||||||
match to_ty.kind(Interner) {
|
match to_ty.kind(Interner) {
|
||||||
TyKind::Raw(mt, _) => return self.coerce_ptr(from_ty, to_ty, *mt),
|
TyKind::Raw(mt, _) => return self.coerce_ptr(from_ty, to_ty, *mt),
|
||||||
TyKind::Ref(mt, _, _) => return self.coerce_ref(from_ty, to_ty, *mt),
|
TyKind::Ref(mt, lt, _) => return self.coerce_ref(from_ty, to_ty, *mt, lt),
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -377,11 +376,17 @@ impl InferenceTable<'_> {
|
||||||
/// Reborrows `&mut A` to `&mut B` and `&(mut) A` to `&B`.
|
/// Reborrows `&mut A` to `&mut B` and `&(mut) A` to `&B`.
|
||||||
/// To match `A` with `B`, autoderef will be performed,
|
/// To match `A` with `B`, autoderef will be performed,
|
||||||
/// calling `deref`/`deref_mut` where necessary.
|
/// calling `deref`/`deref_mut` where necessary.
|
||||||
fn coerce_ref(&mut self, from_ty: Ty, to_ty: &Ty, to_mt: Mutability) -> CoerceResult {
|
fn coerce_ref(
|
||||||
let from_mt = match from_ty.kind(Interner) {
|
&mut self,
|
||||||
&TyKind::Ref(mt, _, _) => {
|
from_ty: Ty,
|
||||||
coerce_mutabilities(mt, to_mt)?;
|
to_ty: &Ty,
|
||||||
mt
|
to_mt: Mutability,
|
||||||
|
to_lt: &Lifetime,
|
||||||
|
) -> CoerceResult {
|
||||||
|
let (_from_lt, from_mt) = match from_ty.kind(Interner) {
|
||||||
|
TyKind::Ref(mt, lt, _) => {
|
||||||
|
coerce_mutabilities(*mt, to_mt)?;
|
||||||
|
(lt.clone(), *mt) // clone is probably not good?
|
||||||
}
|
}
|
||||||
_ => return self.unify_and(&from_ty, to_ty, identity),
|
_ => return self.unify_and(&from_ty, to_ty, identity),
|
||||||
};
|
};
|
||||||
|
@ -427,8 +432,8 @@ impl InferenceTable<'_> {
|
||||||
// compare those. Note that this means we use the target
|
// compare those. Note that this means we use the target
|
||||||
// mutability [1], since it may be that we are coercing
|
// mutability [1], since it may be that we are coercing
|
||||||
// from `&mut T` to `&U`.
|
// from `&mut T` to `&U`.
|
||||||
let lt = error_lifetime(); // FIXME: handle lifetimes correctly, see rustc
|
let lt = to_lt; // FIXME: Involve rustc LUB and SUB flag checks
|
||||||
let derefd_from_ty = TyKind::Ref(to_mt, lt, referent_ty).intern(Interner);
|
let derefd_from_ty = TyKind::Ref(to_mt, lt.clone(), referent_ty).intern(Interner);
|
||||||
match autoderef.table.try_unify(&derefd_from_ty, to_ty) {
|
match autoderef.table.try_unify(&derefd_from_ty, to_ty) {
|
||||||
Ok(result) => {
|
Ok(result) => {
|
||||||
found = Some(result.map(|()| derefd_from_ty));
|
found = Some(result.map(|()| derefd_from_ty));
|
||||||
|
@ -472,8 +477,10 @@ impl InferenceTable<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut adjustments = auto_deref_adjust_steps(&autoderef);
|
let mut adjustments = auto_deref_adjust_steps(&autoderef);
|
||||||
adjustments
|
adjustments.push(Adjustment {
|
||||||
.push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(to_mt)), target: ty.clone() });
|
kind: Adjust::Borrow(AutoBorrow::Ref(to_lt.clone(), to_mt)),
|
||||||
|
target: ty.clone(),
|
||||||
|
});
|
||||||
|
|
||||||
success(adjustments, ty, goals)
|
success(adjustments, ty, goals)
|
||||||
}
|
}
|
||||||
|
@ -621,11 +628,11 @@ impl InferenceTable<'_> {
|
||||||
(TyKind::Ref(from_mt, _, from_inner), &TyKind::Ref(to_mt, _, _)) => {
|
(TyKind::Ref(from_mt, _, from_inner), &TyKind::Ref(to_mt, _, _)) => {
|
||||||
coerce_mutabilities(*from_mt, to_mt)?;
|
coerce_mutabilities(*from_mt, to_mt)?;
|
||||||
|
|
||||||
let lt = error_lifetime();
|
let lt = self.new_lifetime_var();
|
||||||
Some((
|
Some((
|
||||||
Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() },
|
Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() },
|
||||||
Adjustment {
|
Adjustment {
|
||||||
kind: Adjust::Borrow(AutoBorrow::Ref(to_mt)),
|
kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), to_mt)),
|
||||||
target: TyKind::Ref(to_mt, lt, from_inner.clone()).intern(Interner),
|
target: TyKind::Ref(to_mt, lt, from_inner.clone()).intern(Interner),
|
||||||
},
|
},
|
||||||
))
|
))
|
||||||
|
|
|
@ -635,7 +635,10 @@ impl InferenceContext<'_> {
|
||||||
let inner_ty = self.infer_expr_inner(*expr, &expectation);
|
let inner_ty = self.infer_expr_inner(*expr, &expectation);
|
||||||
match rawness {
|
match rawness {
|
||||||
Rawness::RawPtr => TyKind::Raw(mutability, inner_ty),
|
Rawness::RawPtr => TyKind::Raw(mutability, inner_ty),
|
||||||
Rawness::Ref => TyKind::Ref(mutability, error_lifetime(), inner_ty),
|
Rawness::Ref => {
|
||||||
|
let lt = self.table.new_lifetime_var();
|
||||||
|
TyKind::Ref(mutability, lt, inner_ty)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
.intern(Interner)
|
.intern(Interner)
|
||||||
}
|
}
|
||||||
|
@ -786,18 +789,23 @@ impl InferenceContext<'_> {
|
||||||
adj.apply(&mut self.table, base_ty)
|
adj.apply(&mut self.table, base_ty)
|
||||||
});
|
});
|
||||||
// mutability will be fixed up in `InferenceContext::infer_mut`;
|
// mutability will be fixed up in `InferenceContext::infer_mut`;
|
||||||
adj.push(Adjustment::borrow(Mutability::Not, self_ty.clone()));
|
adj.push(Adjustment::borrow(
|
||||||
|
Mutability::Not,
|
||||||
|
self_ty.clone(),
|
||||||
|
self.table.new_lifetime_var(),
|
||||||
|
));
|
||||||
self.write_expr_adj(*base, adj);
|
self.write_expr_adj(*base, adj);
|
||||||
if let Some(func) = self
|
if let Some(func) = self
|
||||||
.db
|
.db
|
||||||
.trait_data(index_trait)
|
.trait_data(index_trait)
|
||||||
.method_by_name(&Name::new_symbol_root(sym::index.clone()))
|
.method_by_name(&Name::new_symbol_root(sym::index.clone()))
|
||||||
{
|
{
|
||||||
let substs = TyBuilder::subst_for_def(self.db, index_trait, None)
|
let subst = TyBuilder::subst_for_def(self.db, index_trait, None);
|
||||||
.push(self_ty.clone())
|
if subst.remaining() != 2 {
|
||||||
.push(index_ty.clone())
|
return self.err_ty();
|
||||||
.build();
|
}
|
||||||
self.write_method_resolution(tgt_expr, func, substs);
|
let subst = subst.push(self_ty.clone()).push(index_ty.clone()).build();
|
||||||
|
self.write_method_resolution(tgt_expr, func, subst);
|
||||||
}
|
}
|
||||||
let assoc = self.resolve_ops_index_output();
|
let assoc = self.resolve_ops_index_output();
|
||||||
let res = self.resolve_associated_type_with_params(
|
let res = self.resolve_associated_type_with_params(
|
||||||
|
@ -990,7 +998,7 @@ impl InferenceContext<'_> {
|
||||||
match fn_x {
|
match fn_x {
|
||||||
FnTrait::FnOnce => (),
|
FnTrait::FnOnce => (),
|
||||||
FnTrait::FnMut => {
|
FnTrait::FnMut => {
|
||||||
if let TyKind::Ref(Mutability::Mut, _, inner) = derefed_callee.kind(Interner) {
|
if let TyKind::Ref(Mutability::Mut, lt, inner) = derefed_callee.kind(Interner) {
|
||||||
if adjustments
|
if adjustments
|
||||||
.last()
|
.last()
|
||||||
.map(|it| matches!(it.kind, Adjust::Borrow(_)))
|
.map(|it| matches!(it.kind, Adjust::Borrow(_)))
|
||||||
|
@ -999,15 +1007,27 @@ impl InferenceContext<'_> {
|
||||||
// prefer reborrow to move
|
// prefer reborrow to move
|
||||||
adjustments
|
adjustments
|
||||||
.push(Adjustment { kind: Adjust::Deref(None), target: inner.clone() });
|
.push(Adjustment { kind: Adjust::Deref(None), target: inner.clone() });
|
||||||
adjustments.push(Adjustment::borrow(Mutability::Mut, inner.clone()))
|
adjustments.push(Adjustment::borrow(
|
||||||
|
Mutability::Mut,
|
||||||
|
inner.clone(),
|
||||||
|
lt.clone(),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
adjustments.push(Adjustment::borrow(Mutability::Mut, derefed_callee.clone()));
|
adjustments.push(Adjustment::borrow(
|
||||||
|
Mutability::Mut,
|
||||||
|
derefed_callee.clone(),
|
||||||
|
self.table.new_lifetime_var(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
FnTrait::Fn => {
|
FnTrait::Fn => {
|
||||||
if !matches!(derefed_callee.kind(Interner), TyKind::Ref(Mutability::Not, _, _)) {
|
if !matches!(derefed_callee.kind(Interner), TyKind::Ref(Mutability::Not, _, _)) {
|
||||||
adjustments.push(Adjustment::borrow(Mutability::Not, derefed_callee.clone()));
|
adjustments.push(Adjustment::borrow(
|
||||||
|
Mutability::Not,
|
||||||
|
derefed_callee.clone(),
|
||||||
|
self.table.new_lifetime_var(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1295,10 +1315,12 @@ impl InferenceContext<'_> {
|
||||||
|
|
||||||
// HACK: We can use this substitution for the function because the function itself doesn't
|
// HACK: We can use this substitution for the function because the function itself doesn't
|
||||||
// have its own generic parameters.
|
// have its own generic parameters.
|
||||||
let subst = TyBuilder::subst_for_def(self.db, trait_, None)
|
let subst = TyBuilder::subst_for_def(self.db, trait_, None);
|
||||||
.push(lhs_ty.clone())
|
if subst.remaining() != 2 {
|
||||||
.push(rhs_ty.clone())
|
return Ty::new(Interner, TyKind::Error);
|
||||||
.build();
|
}
|
||||||
|
let subst = subst.push(lhs_ty.clone()).push(rhs_ty.clone()).build();
|
||||||
|
|
||||||
self.write_method_resolution(tgt_expr, func, subst.clone());
|
self.write_method_resolution(tgt_expr, func, subst.clone());
|
||||||
|
|
||||||
let method_ty = self.db.value_ty(func.into()).unwrap().substitute(Interner, &subst);
|
let method_ty = self.db.value_ty(func.into()).unwrap().substitute(Interner, &subst);
|
||||||
|
@ -1310,11 +1332,11 @@ impl InferenceContext<'_> {
|
||||||
Some(sig) => {
|
Some(sig) => {
|
||||||
let p_left = &sig.params()[0];
|
let p_left = &sig.params()[0];
|
||||||
if matches!(op, BinaryOp::CmpOp(..) | BinaryOp::Assignment { .. }) {
|
if matches!(op, BinaryOp::CmpOp(..) | BinaryOp::Assignment { .. }) {
|
||||||
if let &TyKind::Ref(mtbl, _, _) = p_left.kind(Interner) {
|
if let TyKind::Ref(mtbl, lt, _) = p_left.kind(Interner) {
|
||||||
self.write_expr_adj(
|
self.write_expr_adj(
|
||||||
lhs,
|
lhs,
|
||||||
vec![Adjustment {
|
vec![Adjustment {
|
||||||
kind: Adjust::Borrow(AutoBorrow::Ref(mtbl)),
|
kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)),
|
||||||
target: p_left.clone(),
|
target: p_left.clone(),
|
||||||
}],
|
}],
|
||||||
);
|
);
|
||||||
|
@ -1322,11 +1344,11 @@ impl InferenceContext<'_> {
|
||||||
}
|
}
|
||||||
let p_right = &sig.params()[1];
|
let p_right = &sig.params()[1];
|
||||||
if matches!(op, BinaryOp::CmpOp(..)) {
|
if matches!(op, BinaryOp::CmpOp(..)) {
|
||||||
if let &TyKind::Ref(mtbl, _, _) = p_right.kind(Interner) {
|
if let TyKind::Ref(mtbl, lt, _) = p_right.kind(Interner) {
|
||||||
self.write_expr_adj(
|
self.write_expr_adj(
|
||||||
rhs,
|
rhs,
|
||||||
vec![Adjustment {
|
vec![Adjustment {
|
||||||
kind: Adjust::Borrow(AutoBorrow::Ref(mtbl)),
|
kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)),
|
||||||
target: p_right.clone(),
|
target: p_right.clone(),
|
||||||
}],
|
}],
|
||||||
);
|
);
|
||||||
|
|
|
@ -28,7 +28,7 @@ impl InferenceContext<'_> {
|
||||||
Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => (),
|
Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => (),
|
||||||
Adjust::Deref(Some(d)) => *d = OverloadedDeref(Some(mutability)),
|
Adjust::Deref(Some(d)) => *d = OverloadedDeref(Some(mutability)),
|
||||||
Adjust::Borrow(b) => match b {
|
Adjust::Borrow(b) => match b {
|
||||||
AutoBorrow::Ref(m) | AutoBorrow::RawPtr(m) => mutability = *m,
|
AutoBorrow::Ref(_, m) | AutoBorrow::RawPtr(m) => mutability = *m,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -125,7 +125,7 @@ impl InferenceContext<'_> {
|
||||||
.get_mut(&base)
|
.get_mut(&base)
|
||||||
.and_then(|it| it.last_mut());
|
.and_then(|it| it.last_mut());
|
||||||
if let Some(Adjustment {
|
if let Some(Adjustment {
|
||||||
kind: Adjust::Borrow(AutoBorrow::Ref(mutability)),
|
kind: Adjust::Borrow(AutoBorrow::Ref(_, mutability)),
|
||||||
target,
|
target,
|
||||||
}) = base_adjustments
|
}) = base_adjustments
|
||||||
{
|
{
|
||||||
|
|
|
@ -12,7 +12,6 @@ use stdx::TupleExt;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
consteval::{try_const_usize, usize_const},
|
consteval::{try_const_usize, usize_const},
|
||||||
error_lifetime,
|
|
||||||
infer::{BindingMode, Expectation, InferenceContext, TypeMismatch},
|
infer::{BindingMode, Expectation, InferenceContext, TypeMismatch},
|
||||||
lower::lower_to_chalk_mutability,
|
lower::lower_to_chalk_mutability,
|
||||||
primitive::UintTy,
|
primitive::UintTy,
|
||||||
|
@ -394,19 +393,20 @@ impl InferenceContext<'_> {
|
||||||
expected: &Ty,
|
expected: &Ty,
|
||||||
default_bm: BindingMode,
|
default_bm: BindingMode,
|
||||||
) -> Ty {
|
) -> Ty {
|
||||||
let expectation = match expected.as_reference() {
|
let (expectation_type, expectation_lt) = match expected.as_reference() {
|
||||||
Some((inner_ty, _lifetime, _exp_mut)) => inner_ty.clone(),
|
Some((inner_ty, lifetime, _exp_mut)) => (inner_ty.clone(), lifetime.clone()),
|
||||||
None => {
|
None => {
|
||||||
let inner_ty = self.table.new_type_var();
|
let inner_ty = self.table.new_type_var();
|
||||||
|
let inner_lt = self.table.new_lifetime_var();
|
||||||
let ref_ty =
|
let ref_ty =
|
||||||
TyKind::Ref(mutability, error_lifetime(), inner_ty.clone()).intern(Interner);
|
TyKind::Ref(mutability, inner_lt.clone(), inner_ty.clone()).intern(Interner);
|
||||||
// Unification failure will be reported by the caller.
|
// Unification failure will be reported by the caller.
|
||||||
self.unify(&ref_ty, expected);
|
self.unify(&ref_ty, expected);
|
||||||
inner_ty
|
(inner_ty, inner_lt)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let subty = self.infer_pat(inner_pat, &expectation, default_bm);
|
let subty = self.infer_pat(inner_pat, &expectation_type, default_bm);
|
||||||
TyKind::Ref(mutability, error_lifetime(), subty).intern(Interner)
|
TyKind::Ref(mutability, expectation_lt, subty).intern(Interner)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn infer_bind_pat(
|
fn infer_bind_pat(
|
||||||
|
@ -433,7 +433,8 @@ impl InferenceContext<'_> {
|
||||||
|
|
||||||
let bound_ty = match mode {
|
let bound_ty = match mode {
|
||||||
BindingMode::Ref(mutability) => {
|
BindingMode::Ref(mutability) => {
|
||||||
TyKind::Ref(mutability, error_lifetime(), inner_ty.clone()).intern(Interner)
|
let inner_lt = self.table.new_lifetime_var();
|
||||||
|
TyKind::Ref(mutability, inner_lt, inner_ty.clone()).intern(Interner)
|
||||||
}
|
}
|
||||||
BindingMode::Move => inner_ty.clone(),
|
BindingMode::Move => inner_ty.clone(),
|
||||||
};
|
};
|
||||||
|
|
|
@ -17,12 +17,12 @@ use triomphe::Arc;
|
||||||
|
|
||||||
use super::{InferOk, InferResult, InferenceContext, TypeError};
|
use super::{InferOk, InferResult, InferenceContext, TypeError};
|
||||||
use crate::{
|
use crate::{
|
||||||
consteval::unknown_const, db::HirDatabase, error_lifetime, fold_generic_args,
|
consteval::unknown_const, db::HirDatabase, fold_generic_args, fold_tys_and_consts,
|
||||||
fold_tys_and_consts, to_chalk_trait_id, traits::FnTrait, AliasEq, AliasTy, BoundVar, Canonical,
|
to_chalk_trait_id, traits::FnTrait, AliasEq, AliasTy, BoundVar, Canonical, Const, ConstValue,
|
||||||
Const, ConstValue, DebruijnIndex, DomainGoal, GenericArg, GenericArgData, Goal, GoalData,
|
DebruijnIndex, DomainGoal, GenericArg, GenericArgData, Goal, GoalData, Guidance, InEnvironment,
|
||||||
Guidance, InEnvironment, InferenceVar, Interner, Lifetime, OpaqueTyId, ParamKind, ProjectionTy,
|
InferenceVar, Interner, Lifetime, OpaqueTyId, ParamKind, ProjectionTy, ProjectionTyExt, Scalar,
|
||||||
ProjectionTyExt, Scalar, Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt,
|
Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, VariableKind,
|
||||||
TyKind, VariableKind, WhereClause,
|
WhereClause,
|
||||||
};
|
};
|
||||||
|
|
||||||
impl InferenceContext<'_> {
|
impl InferenceContext<'_> {
|
||||||
|
@ -105,7 +105,7 @@ impl<T: HasInterner<Interner = Interner>> Canonicalized<T> {
|
||||||
VariableKind::Ty(TyVariableKind::Float) => ctx.new_float_var().cast(Interner),
|
VariableKind::Ty(TyVariableKind::Float) => ctx.new_float_var().cast(Interner),
|
||||||
// Chalk can sometimes return new lifetime variables. We just replace them by errors
|
// Chalk can sometimes return new lifetime variables. We just replace them by errors
|
||||||
// for now.
|
// for now.
|
||||||
VariableKind::Lifetime => error_lifetime().cast(Interner),
|
VariableKind::Lifetime => ctx.new_lifetime_var().cast(Interner),
|
||||||
VariableKind::Const(ty) => ctx.new_const_var(ty.clone()).cast(Interner),
|
VariableKind::Const(ty) => ctx.new_const_var(ty.clone()).cast(Interner),
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
|
|
@ -42,19 +42,20 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
|
||||||
hir_def::ModuleDefId::AdtId(x) => {
|
hir_def::ModuleDefId::AdtId(x) => {
|
||||||
let name = match x {
|
let name = match x {
|
||||||
hir_def::AdtId::StructId(x) => {
|
hir_def::AdtId::StructId(x) => {
|
||||||
db.struct_data(x).name.display_no_db().to_smolstr()
|
db.struct_data(x).name.display_no_db(file_id.edition()).to_smolstr()
|
||||||
}
|
}
|
||||||
hir_def::AdtId::UnionId(x) => {
|
hir_def::AdtId::UnionId(x) => {
|
||||||
db.union_data(x).name.display_no_db().to_smolstr()
|
db.union_data(x).name.display_no_db(file_id.edition()).to_smolstr()
|
||||||
}
|
}
|
||||||
hir_def::AdtId::EnumId(x) => {
|
hir_def::AdtId::EnumId(x) => {
|
||||||
db.enum_data(x).name.display_no_db().to_smolstr()
|
db.enum_data(x).name.display_no_db(file_id.edition()).to_smolstr()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
(name == "Goal").then_some(Either::Left(x))
|
(name == "Goal").then_some(Either::Left(x))
|
||||||
}
|
}
|
||||||
hir_def::ModuleDefId::TypeAliasId(x) => {
|
hir_def::ModuleDefId::TypeAliasId(x) => {
|
||||||
let name = db.type_alias_data(x).name.display_no_db().to_smolstr();
|
let name =
|
||||||
|
db.type_alias_data(x).name.display_no_db(file_id.edition()).to_smolstr();
|
||||||
(name == "Goal").then_some(Either::Right(x))
|
(name == "Goal").then_some(Either::Right(x))
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
|
@ -94,7 +95,7 @@ fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
|
||||||
.declarations()
|
.declarations()
|
||||||
.find_map(|x| match x {
|
.find_map(|x| match x {
|
||||||
hir_def::ModuleDefId::FunctionId(x) => {
|
hir_def::ModuleDefId::FunctionId(x) => {
|
||||||
let name = db.function_data(x).name.display_no_db().to_smolstr();
|
let name = db.function_data(x).name.display_no_db(file_id.edition()).to_smolstr();
|
||||||
(name == "main").then_some(x)
|
(name == "main").then_some(x)
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
|
@ -104,7 +105,7 @@ fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
|
||||||
let b = hir_body
|
let b = hir_body
|
||||||
.bindings
|
.bindings
|
||||||
.iter()
|
.iter()
|
||||||
.find(|x| x.1.name.display_no_db().to_smolstr() == "goal")
|
.find(|x| x.1.name.display_no_db(file_id.edition()).to_smolstr() == "goal")
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.0;
|
.0;
|
||||||
let infer = db.infer(function_id.into());
|
let infer = db.infer(function_id.into());
|
||||||
|
|
|
@ -68,6 +68,7 @@ use intern::{sym, Symbol};
|
||||||
use la_arena::{Arena, Idx};
|
use la_arena::{Arena, Idx};
|
||||||
use mir::{MirEvalError, VTableMap};
|
use mir::{MirEvalError, VTableMap};
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
|
use span::Edition;
|
||||||
use syntax::ast::{make, ConstArg};
|
use syntax::ast::{make, ConstArg};
|
||||||
use traits::FnTrait;
|
use traits::FnTrait;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
@ -1027,7 +1028,11 @@ where
|
||||||
collector.placeholders.into_iter().collect()
|
collector.placeholders.into_iter().collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn known_const_to_ast(konst: &Const, db: &dyn HirDatabase) -> Option<ConstArg> {
|
pub fn known_const_to_ast(
|
||||||
|
konst: &Const,
|
||||||
|
db: &dyn HirDatabase,
|
||||||
|
edition: Edition,
|
||||||
|
) -> Option<ConstArg> {
|
||||||
if let ConstValue::Concrete(c) = &konst.interned().value {
|
if let ConstValue::Concrete(c) = &konst.interned().value {
|
||||||
match c.interned {
|
match c.interned {
|
||||||
ConstScalar::UnevaluatedConst(GeneralConstId::InTypeConstId(cid), _) => {
|
ConstScalar::UnevaluatedConst(GeneralConstId::InTypeConstId(cid), _) => {
|
||||||
|
@ -1037,5 +1042,5 @@ pub fn known_const_to_ast(konst: &Const, db: &dyn HirDatabase) -> Option<ConstAr
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Some(make::expr_const_value(konst.display(db).to_string().as_str()))
|
Some(make::expr_const_value(konst.display(db, edition).to_string().as_str()))
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
//!
|
//!
|
||||||
//! This usually involves resolving names, collecting generic arguments etc.
|
//! This usually involves resolving names, collecting generic arguments etc.
|
||||||
use std::{
|
use std::{
|
||||||
cell::{Cell, RefCell, RefMut},
|
cell::{Cell, OnceCell, RefCell, RefMut},
|
||||||
iter,
|
iter,
|
||||||
ops::{self, Not as _},
|
ops::{self, Not as _},
|
||||||
};
|
};
|
||||||
|
@ -43,7 +43,6 @@ use hir_def::{
|
||||||
use hir_expand::{name::Name, ExpandResult};
|
use hir_expand::{name::Name, ExpandResult};
|
||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
use la_arena::{Arena, ArenaMap};
|
use la_arena::{Arena, ArenaMap};
|
||||||
use once_cell::unsync::OnceCell;
|
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use rustc_pattern_analysis::Captures;
|
use rustc_pattern_analysis::Captures;
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
|
@ -378,26 +377,25 @@ impl<'a> TyLoweringContext<'a> {
|
||||||
// Count the number of `impl Trait` things that appear within our bounds.
|
// Count the number of `impl Trait` things that appear within our bounds.
|
||||||
// Since t hose have been emitted as implicit type args already.
|
// Since t hose have been emitted as implicit type args already.
|
||||||
counter.set(idx + count_impl_traits(type_ref) as u16);
|
counter.set(idx + count_impl_traits(type_ref) as u16);
|
||||||
let (
|
let kind = self
|
||||||
_parent_params,
|
|
||||||
self_param,
|
|
||||||
type_params,
|
|
||||||
const_params,
|
|
||||||
_impl_trait_params,
|
|
||||||
lifetime_params,
|
|
||||||
) = self
|
|
||||||
.generics()
|
.generics()
|
||||||
.expect("variable impl trait lowering must be in a generic def")
|
.expect("variable impl trait lowering must be in a generic def")
|
||||||
.provenance_split();
|
.iter()
|
||||||
TyKind::BoundVar(BoundVar::new(
|
.enumerate()
|
||||||
self.in_binders,
|
.filter_map(|(i, (id, data))| match (id, data) {
|
||||||
idx as usize
|
(
|
||||||
+ self_param as usize
|
GenericParamId::TypeParamId(_),
|
||||||
+ type_params
|
GenericParamDataRef::TypeParamData(data),
|
||||||
+ const_params
|
) if data.provenance == TypeParamProvenance::ArgumentImplTrait => {
|
||||||
+ lifetime_params,
|
Some(i)
|
||||||
))
|
}
|
||||||
.intern(Interner)
|
_ => None,
|
||||||
|
})
|
||||||
|
.nth(idx as usize)
|
||||||
|
.map_or(TyKind::Error, |id| {
|
||||||
|
TyKind::BoundVar(BoundVar { debruijn: self.in_binders, index: id })
|
||||||
|
});
|
||||||
|
kind.intern(Interner)
|
||||||
}
|
}
|
||||||
ImplTraitLoweringState::Disallowed => {
|
ImplTraitLoweringState::Disallowed => {
|
||||||
// FIXME: report error
|
// FIXME: report error
|
||||||
|
@ -1553,6 +1551,10 @@ pub(crate) fn generic_predicates_for_param_query(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
if invalid_target {
|
if invalid_target {
|
||||||
|
// If this is filtered out without lowering, `?Sized` is not gathered into `ctx.unsized_types`
|
||||||
|
if let TypeBound::Path(_, TraitBoundModifier::Maybe) = &**bound {
|
||||||
|
ctx.lower_where_predicate(pred, &def, true).for_each(drop);
|
||||||
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1741,15 +1743,39 @@ fn implicitly_sized_clauses<'a, 'subst: 'a>(
|
||||||
substitution: &'subst Substitution,
|
substitution: &'subst Substitution,
|
||||||
resolver: &Resolver,
|
resolver: &Resolver,
|
||||||
) -> Option<impl Iterator<Item = WhereClause> + Captures<'a> + Captures<'subst>> {
|
) -> Option<impl Iterator<Item = WhereClause> + Captures<'a> + Captures<'subst>> {
|
||||||
let is_trait_def = matches!(def, GenericDefId::TraitId(..));
|
|
||||||
let generic_args = &substitution.as_slice(Interner)[is_trait_def as usize..];
|
|
||||||
let sized_trait = db
|
let sized_trait = db
|
||||||
.lang_item(resolver.krate(), LangItem::Sized)
|
.lang_item(resolver.krate(), LangItem::Sized)
|
||||||
.and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
|
.and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id))?;
|
||||||
|
|
||||||
sized_trait.map(move |sized_trait| {
|
let get_trait_self_idx = |container: ItemContainerId| {
|
||||||
generic_args
|
if matches!(container, ItemContainerId::TraitId(_)) {
|
||||||
.iter()
|
let generics = generics(db.upcast(), def);
|
||||||
|
Some(generics.len_self())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let trait_self_idx = match def {
|
||||||
|
GenericDefId::TraitId(_) => Some(0),
|
||||||
|
GenericDefId::FunctionId(it) => get_trait_self_idx(it.lookup(db.upcast()).container),
|
||||||
|
GenericDefId::ConstId(it) => get_trait_self_idx(it.lookup(db.upcast()).container),
|
||||||
|
GenericDefId::TypeAliasId(it) => get_trait_self_idx(it.lookup(db.upcast()).container),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(
|
||||||
|
substitution
|
||||||
|
.iter(Interner)
|
||||||
|
.enumerate()
|
||||||
|
.filter_map(
|
||||||
|
move |(idx, generic_arg)| {
|
||||||
|
if Some(idx) == trait_self_idx {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(generic_arg)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
.filter_map(|generic_arg| generic_arg.ty(Interner))
|
.filter_map(|generic_arg| generic_arg.ty(Interner))
|
||||||
.filter(move |&self_ty| !explicitly_unsized_tys.contains(self_ty))
|
.filter(move |&self_ty| !explicitly_unsized_tys.contains(self_ty))
|
||||||
.map(move |self_ty| {
|
.map(move |self_ty| {
|
||||||
|
@ -1757,8 +1783,8 @@ fn implicitly_sized_clauses<'a, 'subst: 'a>(
|
||||||
trait_id: sized_trait,
|
trait_id: sized_trait,
|
||||||
substitution: Substitution::from1(Interner, self_ty.clone()),
|
substitution: Substitution::from1(Interner, self_ty.clone()),
|
||||||
})
|
})
|
||||||
})
|
}),
|
||||||
})
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
|
@ -1978,13 +2004,13 @@ fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders<Ty> {
|
||||||
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
|
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
|
||||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||||
let type_alias_data = db.type_alias_data(t);
|
let type_alias_data = db.type_alias_data(t);
|
||||||
if type_alias_data.is_extern {
|
let inner = if type_alias_data.is_extern {
|
||||||
Binders::empty(Interner, TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner))
|
TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner)
|
||||||
} else {
|
} else {
|
||||||
let type_ref = &type_alias_data.type_ref;
|
let type_ref = &type_alias_data.type_ref;
|
||||||
let inner = ctx.lower_ty(type_ref.as_deref().unwrap_or(&TypeRef::Error));
|
ctx.lower_ty(type_ref.as_deref().unwrap_or(&TypeRef::Error))
|
||||||
make_binders(db, &generics, inner)
|
};
|
||||||
}
|
make_binders(db, &generics, inner)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
|
|
@ -35,7 +35,7 @@ use crate::{
|
||||||
};
|
};
|
||||||
|
|
||||||
/// This is used as a key for indexing impls.
|
/// This is used as a key for indexing impls.
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||||
pub enum TyFingerprint {
|
pub enum TyFingerprint {
|
||||||
// These are lang item impls:
|
// These are lang item impls:
|
||||||
Str,
|
Str,
|
||||||
|
@ -542,7 +542,8 @@ impl ReceiverAdjustments {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(m) = self.autoref {
|
if let Some(m) = self.autoref {
|
||||||
let a = Adjustment::borrow(m, ty);
|
let lt = table.new_lifetime_var();
|
||||||
|
let a = Adjustment::borrow(m, ty, lt);
|
||||||
ty = a.target.clone();
|
ty = a.target.clone();
|
||||||
adjust.push(a);
|
adjust.push(a);
|
||||||
}
|
}
|
||||||
|
@ -1066,7 +1067,7 @@ fn iterate_method_candidates_by_receiver(
|
||||||
// be found in any of the derefs of receiver_ty, so we have to go through
|
// be found in any of the derefs of receiver_ty, so we have to go through
|
||||||
// that, including raw derefs.
|
// that, including raw derefs.
|
||||||
table.run_in_snapshot(|table| {
|
table.run_in_snapshot(|table| {
|
||||||
let mut autoderef = autoderef::Autoderef::new(table, receiver_ty.clone(), true);
|
let mut autoderef = autoderef::Autoderef::new_no_tracking(table, receiver_ty.clone(), true);
|
||||||
while let Some((self_ty, _)) = autoderef.next() {
|
while let Some((self_ty, _)) = autoderef.next() {
|
||||||
iterate_inherent_methods(
|
iterate_inherent_methods(
|
||||||
&self_ty,
|
&self_ty,
|
||||||
|
@ -1081,7 +1082,7 @@ fn iterate_method_candidates_by_receiver(
|
||||||
ControlFlow::Continue(())
|
ControlFlow::Continue(())
|
||||||
})?;
|
})?;
|
||||||
table.run_in_snapshot(|table| {
|
table.run_in_snapshot(|table| {
|
||||||
let mut autoderef = autoderef::Autoderef::new(table, receiver_ty.clone(), true);
|
let mut autoderef = autoderef::Autoderef::new_no_tracking(table, receiver_ty.clone(), true);
|
||||||
while let Some((self_ty, _)) = autoderef.next() {
|
while let Some((self_ty, _)) = autoderef.next() {
|
||||||
if matches!(self_ty.kind(Interner), TyKind::InferenceVar(_, TyVariableKind::General)) {
|
if matches!(self_ty.kind(Interner), TyKind::InferenceVar(_, TyVariableKind::General)) {
|
||||||
// don't try to resolve methods on unknown types
|
// don't try to resolve methods on unknown types
|
||||||
|
@ -1656,7 +1657,7 @@ fn autoderef_method_receiver(
|
||||||
ty: Ty,
|
ty: Ty,
|
||||||
) -> Vec<(Canonical<Ty>, ReceiverAdjustments)> {
|
) -> Vec<(Canonical<Ty>, ReceiverAdjustments)> {
|
||||||
let mut deref_chain: Vec<_> = Vec::new();
|
let mut deref_chain: Vec<_> = Vec::new();
|
||||||
let mut autoderef = autoderef::Autoderef::new(table, ty, false);
|
let mut autoderef = autoderef::Autoderef::new_no_tracking(table, ty, false);
|
||||||
while let Some((ty, derefs)) = autoderef.next() {
|
while let Some((ty, derefs)) = autoderef.next() {
|
||||||
deref_chain.push((
|
deref_chain.push((
|
||||||
autoderef.table.canonicalize(ty),
|
autoderef.table.canonicalize(ty),
|
||||||
|
|
|
@ -16,7 +16,8 @@ use base_db::CrateId;
|
||||||
use chalk_ir::Mutability;
|
use chalk_ir::Mutability;
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
hir::{BindingId, Expr, ExprId, Ordering, PatId},
|
body::Body,
|
||||||
|
hir::{BindingAnnotation, BindingId, Expr, ExprId, Ordering, PatId},
|
||||||
DefWithBodyId, FieldId, StaticId, TupleFieldId, UnionId, VariantId,
|
DefWithBodyId, FieldId, StaticId, TupleFieldId, UnionId, VariantId,
|
||||||
};
|
};
|
||||||
use la_arena::{Arena, ArenaMap, Idx, RawIdx};
|
use la_arena::{Arena, ArenaMap, Idx, RawIdx};
|
||||||
|
@ -158,7 +159,10 @@ impl<V, T> ProjectionElem<V, T> {
|
||||||
subst.at(Interner, 0).assert_ty_ref(Interner).clone()
|
subst.at(Interner, 0).assert_ty_ref(Interner).clone()
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
never!("Overloaded deref on type {} is not a projection", base.display(db));
|
never!(
|
||||||
|
"Overloaded deref on type {} is not a projection",
|
||||||
|
base.display(db, db.crate_graph()[krate].edition)
|
||||||
|
);
|
||||||
TyKind::Error.intern(Interner)
|
TyKind::Error.intern(Interner)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -633,6 +637,7 @@ pub enum TerminatorKind {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Order of variants in this enum matter: they are used to compare borrow kinds.
|
||||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord)]
|
#[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord)]
|
||||||
pub enum BorrowKind {
|
pub enum BorrowKind {
|
||||||
/// Data must be immutable and is aliasable.
|
/// Data must be immutable and is aliasable.
|
||||||
|
@ -663,15 +668,16 @@ pub enum BorrowKind {
|
||||||
Mut { kind: MutBorrowKind },
|
Mut { kind: MutBorrowKind },
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Order of variants in this enum matter: they are used to compare borrow kinds.
|
||||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord)]
|
#[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord)]
|
||||||
pub enum MutBorrowKind {
|
pub enum MutBorrowKind {
|
||||||
|
/// Data must be immutable but not aliasable. This kind of borrow cannot currently
|
||||||
|
/// be expressed by the user and is used only in implicit closure bindings.
|
||||||
|
ClosureCapture,
|
||||||
Default,
|
Default,
|
||||||
/// This borrow arose from method-call auto-ref
|
/// This borrow arose from method-call auto-ref
|
||||||
/// (i.e., adjustment::Adjust::Borrow).
|
/// (i.e., adjustment::Adjust::Borrow).
|
||||||
TwoPhasedBorrow,
|
TwoPhasedBorrow,
|
||||||
/// Data must be immutable but not aliasable. This kind of borrow cannot currently
|
|
||||||
/// be expressed by the user and is used only in implicit closure bindings.
|
|
||||||
ClosureCapture,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BorrowKind {
|
impl BorrowKind {
|
||||||
|
@ -1169,6 +1175,20 @@ pub enum MirSpan {
|
||||||
Unknown,
|
Unknown,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl MirSpan {
|
||||||
|
pub fn is_ref_span(&self, body: &Body) -> bool {
|
||||||
|
match *self {
|
||||||
|
MirSpan::ExprId(expr) => matches!(body[expr], Expr::Ref { .. }),
|
||||||
|
// FIXME: Figure out if this is correct wrt. match ergonomics.
|
||||||
|
MirSpan::BindingId(binding) => matches!(
|
||||||
|
body.bindings[binding].mode,
|
||||||
|
BindingAnnotation::Ref | BindingAnnotation::RefMut
|
||||||
|
),
|
||||||
|
MirSpan::PatId(_) | MirSpan::SelfParam | MirSpan::Unknown => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl_from!(ExprId, PatId for MirSpan);
|
impl_from!(ExprId, PatId for MirSpan);
|
||||||
|
|
||||||
impl From<&ExprId> for MirSpan {
|
impl From<&ExprId> for MirSpan {
|
||||||
|
|
|
@ -23,7 +23,7 @@ use rustc_apfloat::{
|
||||||
Float,
|
Float,
|
||||||
};
|
};
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use span::FileId;
|
use span::{Edition, FileId};
|
||||||
use stdx::never;
|
use stdx::never;
|
||||||
use syntax::{SyntaxNodePtr, TextRange};
|
use syntax::{SyntaxNodePtr, TextRange};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
@ -358,6 +358,7 @@ impl MirEvalError {
|
||||||
f: &mut String,
|
f: &mut String,
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
span_formatter: impl Fn(FileId, TextRange) -> String,
|
span_formatter: impl Fn(FileId, TextRange) -> String,
|
||||||
|
edition: Edition,
|
||||||
) -> std::result::Result<(), std::fmt::Error> {
|
) -> std::result::Result<(), std::fmt::Error> {
|
||||||
writeln!(f, "Mir eval error:")?;
|
writeln!(f, "Mir eval error:")?;
|
||||||
let mut err = self;
|
let mut err = self;
|
||||||
|
@ -370,7 +371,7 @@ impl MirEvalError {
|
||||||
writeln!(
|
writeln!(
|
||||||
f,
|
f,
|
||||||
"In function {} ({:?})",
|
"In function {} ({:?})",
|
||||||
function_name.name.display(db.upcast()),
|
function_name.name.display(db.upcast(), edition),
|
||||||
func
|
func
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
@ -415,7 +416,7 @@ impl MirEvalError {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"Layout for type `{}` is not available due {err:?}",
|
"Layout for type `{}` is not available due {err:?}",
|
||||||
ty.display(db).with_closure_style(ClosureStyle::ClosureWithId)
|
ty.display(db, edition).with_closure_style(ClosureStyle::ClosureWithId)
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
MirEvalError::MirLowerError(func, err) => {
|
MirEvalError::MirLowerError(func, err) => {
|
||||||
|
@ -423,16 +424,17 @@ impl MirEvalError {
|
||||||
writeln!(
|
writeln!(
|
||||||
f,
|
f,
|
||||||
"MIR lowering for function `{}` ({:?}) failed due:",
|
"MIR lowering for function `{}` ({:?}) failed due:",
|
||||||
function_name.name.display(db.upcast()),
|
function_name.name.display(db.upcast(), edition),
|
||||||
func
|
func
|
||||||
)?;
|
)?;
|
||||||
err.pretty_print(f, db, span_formatter)?;
|
err.pretty_print(f, db, span_formatter, edition)?;
|
||||||
}
|
}
|
||||||
MirEvalError::ConstEvalError(name, err) => {
|
MirEvalError::ConstEvalError(name, err) => {
|
||||||
MirLowerError::ConstEvalError((**name).into(), err.clone()).pretty_print(
|
MirLowerError::ConstEvalError((**name).into(), err.clone()).pretty_print(
|
||||||
f,
|
f,
|
||||||
db,
|
db,
|
||||||
span_formatter,
|
span_formatter,
|
||||||
|
edition,
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
MirEvalError::UndefinedBehavior(_)
|
MirEvalError::UndefinedBehavior(_)
|
||||||
|
@ -1516,9 +1518,97 @@ impl Evaluator<'_> {
|
||||||
self.size_of_sized(target_ty, locals, "destination of int to int cast")?;
|
self.size_of_sized(target_ty, locals, "destination of int to int cast")?;
|
||||||
Owned(current[0..dest_size].to_vec())
|
Owned(current[0..dest_size].to_vec())
|
||||||
}
|
}
|
||||||
CastKind::FloatToInt => not_supported!("float to int cast"),
|
CastKind::FloatToInt => {
|
||||||
CastKind::FloatToFloat => not_supported!("float to float cast"),
|
let ty = self.operand_ty(operand, locals)?;
|
||||||
CastKind::IntToFloat => not_supported!("float to int cast"),
|
let TyKind::Scalar(chalk_ir::Scalar::Float(ty)) = ty.kind(Interner) else {
|
||||||
|
not_supported!("invalid float to int cast");
|
||||||
|
};
|
||||||
|
let value = self.eval_operand(operand, locals)?.get(self)?;
|
||||||
|
let value = match ty {
|
||||||
|
chalk_ir::FloatTy::F32 => {
|
||||||
|
let value = value.try_into().unwrap();
|
||||||
|
f32::from_le_bytes(value) as f64
|
||||||
|
}
|
||||||
|
chalk_ir::FloatTy::F64 => {
|
||||||
|
let value = value.try_into().unwrap();
|
||||||
|
f64::from_le_bytes(value)
|
||||||
|
}
|
||||||
|
chalk_ir::FloatTy::F16 | chalk_ir::FloatTy::F128 => {
|
||||||
|
not_supported!("unstable floating point type f16 and f128");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let is_signed = matches!(
|
||||||
|
target_ty.kind(Interner),
|
||||||
|
TyKind::Scalar(chalk_ir::Scalar::Int(_))
|
||||||
|
);
|
||||||
|
let dest_size =
|
||||||
|
self.size_of_sized(target_ty, locals, "destination of float to int cast")?;
|
||||||
|
let dest_bits = dest_size * 8;
|
||||||
|
let (max, min) = if dest_bits == 128 {
|
||||||
|
(i128::MAX, i128::MIN)
|
||||||
|
} else if is_signed {
|
||||||
|
let max = 1i128 << (dest_bits - 1);
|
||||||
|
(max - 1, -max)
|
||||||
|
} else {
|
||||||
|
(1i128 << dest_bits, 0)
|
||||||
|
};
|
||||||
|
let value = (value as i128).min(max).max(min);
|
||||||
|
let result = value.to_le_bytes();
|
||||||
|
Owned(result[0..dest_size].to_vec())
|
||||||
|
}
|
||||||
|
CastKind::FloatToFloat => {
|
||||||
|
let ty = self.operand_ty(operand, locals)?;
|
||||||
|
let TyKind::Scalar(chalk_ir::Scalar::Float(ty)) = ty.kind(Interner) else {
|
||||||
|
not_supported!("invalid float to int cast");
|
||||||
|
};
|
||||||
|
let value = self.eval_operand(operand, locals)?.get(self)?;
|
||||||
|
let value = match ty {
|
||||||
|
chalk_ir::FloatTy::F32 => {
|
||||||
|
let value = value.try_into().unwrap();
|
||||||
|
f32::from_le_bytes(value) as f64
|
||||||
|
}
|
||||||
|
chalk_ir::FloatTy::F64 => {
|
||||||
|
let value = value.try_into().unwrap();
|
||||||
|
f64::from_le_bytes(value)
|
||||||
|
}
|
||||||
|
chalk_ir::FloatTy::F16 | chalk_ir::FloatTy::F128 => {
|
||||||
|
not_supported!("unstable floating point type f16 and f128");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let TyKind::Scalar(chalk_ir::Scalar::Float(target_ty)) =
|
||||||
|
target_ty.kind(Interner)
|
||||||
|
else {
|
||||||
|
not_supported!("invalid float to float cast");
|
||||||
|
};
|
||||||
|
match target_ty {
|
||||||
|
chalk_ir::FloatTy::F32 => Owned((value as f32).to_le_bytes().to_vec()),
|
||||||
|
chalk_ir::FloatTy::F64 => Owned((value as f64).to_le_bytes().to_vec()),
|
||||||
|
chalk_ir::FloatTy::F16 | chalk_ir::FloatTy::F128 => {
|
||||||
|
not_supported!("unstable floating point type f16 and f128");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
CastKind::IntToFloat => {
|
||||||
|
let current_ty = self.operand_ty(operand, locals)?;
|
||||||
|
let is_signed = matches!(
|
||||||
|
current_ty.kind(Interner),
|
||||||
|
TyKind::Scalar(chalk_ir::Scalar::Int(_))
|
||||||
|
);
|
||||||
|
let value = pad16(self.eval_operand(operand, locals)?.get(self)?, is_signed);
|
||||||
|
let value = i128::from_le_bytes(value);
|
||||||
|
let TyKind::Scalar(chalk_ir::Scalar::Float(target_ty)) =
|
||||||
|
target_ty.kind(Interner)
|
||||||
|
else {
|
||||||
|
not_supported!("invalid int to float cast");
|
||||||
|
};
|
||||||
|
match target_ty {
|
||||||
|
chalk_ir::FloatTy::F32 => Owned((value as f32).to_le_bytes().to_vec()),
|
||||||
|
chalk_ir::FloatTy::F64 => Owned((value as f64).to_le_bytes().to_vec()),
|
||||||
|
chalk_ir::FloatTy::F16 | chalk_ir::FloatTy::F128 => {
|
||||||
|
not_supported!("unstable floating point type f16 and f128");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
CastKind::FnPtrToPtr => not_supported!("fn ptr to ptr cast"),
|
CastKind::FnPtrToPtr => not_supported!("fn ptr to ptr cast"),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
@ -2675,10 +2765,11 @@ impl Evaluator<'_> {
|
||||||
let db = self.db.upcast();
|
let db = self.db.upcast();
|
||||||
let loc = variant.lookup(db);
|
let loc = variant.lookup(db);
|
||||||
let enum_loc = loc.parent.lookup(db);
|
let enum_loc = loc.parent.lookup(db);
|
||||||
|
let edition = self.db.crate_graph()[self.crate_id].edition;
|
||||||
let name = format!(
|
let name = format!(
|
||||||
"{}::{}",
|
"{}::{}",
|
||||||
enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast()),
|
enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast(), edition),
|
||||||
loc.id.item_tree(db)[loc.id.value].name.display(db.upcast()),
|
loc.id.item_tree(db)[loc.id.value].name.display(db.upcast(), edition),
|
||||||
);
|
);
|
||||||
Err(MirEvalError::ConstEvalError(name, Box::new(e)))
|
Err(MirEvalError::ConstEvalError(name, Box::new(e)))
|
||||||
}
|
}
|
||||||
|
|
|
@ -856,7 +856,11 @@ impl Evaluator<'_> {
|
||||||
Ok(ty_name) => ty_name,
|
Ok(ty_name) => ty_name,
|
||||||
// Fallback to human readable display in case of `Err`. Ideally we want to use `display_source_code` to
|
// Fallback to human readable display in case of `Err`. Ideally we want to use `display_source_code` to
|
||||||
// render full paths.
|
// render full paths.
|
||||||
Err(_) => ty.display(self.db).to_string(),
|
Err(_) => {
|
||||||
|
let krate = locals.body.owner.krate(self.db.upcast());
|
||||||
|
let edition = self.db.crate_graph()[krate].edition;
|
||||||
|
ty.display(self.db, edition).to_string()
|
||||||
|
}
|
||||||
};
|
};
|
||||||
let len = ty_name.len();
|
let len = ty_name.len();
|
||||||
let addr = self.heap_allocate(len, 1)?;
|
let addr = self.heap_allocate(len, 1)?;
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use hir_def::db::DefDatabase;
|
use hir_def::db::DefDatabase;
|
||||||
use span::EditionedFileId;
|
use span::{Edition, EditionedFileId};
|
||||||
use syntax::{TextRange, TextSize};
|
use syntax::{TextRange, TextSize};
|
||||||
use test_fixture::WithFixture;
|
use test_fixture::WithFixture;
|
||||||
|
|
||||||
|
@ -15,7 +15,7 @@ fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String),
|
||||||
.declarations()
|
.declarations()
|
||||||
.find_map(|x| match x {
|
.find_map(|x| match x {
|
||||||
hir_def::ModuleDefId::FunctionId(x) => {
|
hir_def::ModuleDefId::FunctionId(x) => {
|
||||||
if db.function_data(x).name.display(db).to_string() == "main" {
|
if db.function_data(x).name.display(db, Edition::CURRENT).to_string() == "main" {
|
||||||
Some(x)
|
Some(x)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -63,7 +63,7 @@ fn check_pass_and_stdio(ra_fixture: &str, expected_stdout: &str, expected_stderr
|
||||||
let span_formatter = |file, range: TextRange| {
|
let span_formatter = |file, range: TextRange| {
|
||||||
format!("{:?} {:?}..{:?}", file, line_index(range.start()), line_index(range.end()))
|
format!("{:?} {:?}..{:?}", file, line_index(range.start()), line_index(range.end()))
|
||||||
};
|
};
|
||||||
e.pretty_print(&mut err, &db, span_formatter).unwrap();
|
e.pretty_print(&mut err, &db, span_formatter, Edition::CURRENT).unwrap();
|
||||||
panic!("Error in interpreting: {err}");
|
panic!("Error in interpreting: {err}");
|
||||||
}
|
}
|
||||||
Ok((stdout, stderr)) => {
|
Ok((stdout, stderr)) => {
|
||||||
|
|
|
@ -21,7 +21,7 @@ use hir_expand::name::Name;
|
||||||
use la_arena::ArenaMap;
|
use la_arena::ArenaMap;
|
||||||
use rustc_apfloat::Float;
|
use rustc_apfloat::Float;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use span::FileId;
|
use span::{Edition, FileId};
|
||||||
use syntax::TextRange;
|
use syntax::TextRange;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
|
@ -157,13 +157,18 @@ impl MirLowerError {
|
||||||
f: &mut String,
|
f: &mut String,
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
span_formatter: impl Fn(FileId, TextRange) -> String,
|
span_formatter: impl Fn(FileId, TextRange) -> String,
|
||||||
|
edition: Edition,
|
||||||
) -> std::result::Result<(), std::fmt::Error> {
|
) -> std::result::Result<(), std::fmt::Error> {
|
||||||
match self {
|
match self {
|
||||||
MirLowerError::ConstEvalError(name, e) => {
|
MirLowerError::ConstEvalError(name, e) => {
|
||||||
writeln!(f, "In evaluating constant {name}")?;
|
writeln!(f, "In evaluating constant {name}")?;
|
||||||
match &**e {
|
match &**e {
|
||||||
ConstEvalError::MirLowerError(e) => e.pretty_print(f, db, span_formatter)?,
|
ConstEvalError::MirLowerError(e) => {
|
||||||
ConstEvalError::MirEvalError(e) => e.pretty_print(f, db, span_formatter)?,
|
e.pretty_print(f, db, span_formatter, edition)?
|
||||||
|
}
|
||||||
|
ConstEvalError::MirEvalError(e) => {
|
||||||
|
e.pretty_print(f, db, span_formatter, edition)?
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
MirLowerError::MissingFunctionDefinition(owner, it) => {
|
MirLowerError::MissingFunctionDefinition(owner, it) => {
|
||||||
|
@ -171,15 +176,15 @@ impl MirLowerError {
|
||||||
writeln!(
|
writeln!(
|
||||||
f,
|
f,
|
||||||
"Missing function definition for {}",
|
"Missing function definition for {}",
|
||||||
body.pretty_print_expr(db.upcast(), *owner, *it)
|
body.pretty_print_expr(db.upcast(), *owner, *it, edition)
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
MirLowerError::TypeMismatch(e) => match e {
|
MirLowerError::TypeMismatch(e) => match e {
|
||||||
Some(e) => writeln!(
|
Some(e) => writeln!(
|
||||||
f,
|
f,
|
||||||
"Type mismatch: Expected {}, found {}",
|
"Type mismatch: Expected {}, found {}",
|
||||||
e.expected.display(db),
|
e.expected.display(db, edition),
|
||||||
e.actual.display(db),
|
e.actual.display(db, edition),
|
||||||
)?,
|
)?,
|
||||||
None => writeln!(f, "Type mismatch: types mismatch with {{unknown}}",)?,
|
None => writeln!(f, "Type mismatch: types mismatch with {{unknown}}",)?,
|
||||||
},
|
},
|
||||||
|
@ -189,11 +194,11 @@ impl MirLowerError {
|
||||||
writeln!(
|
writeln!(
|
||||||
f,
|
f,
|
||||||
"Generic arg not provided for {}",
|
"Generic arg not provided for {}",
|
||||||
param.name().unwrap_or(&Name::missing()).display(db.upcast())
|
param.name().unwrap_or(&Name::missing()).display(db.upcast(), edition)
|
||||||
)?;
|
)?;
|
||||||
writeln!(f, "Provided args: [")?;
|
writeln!(f, "Provided args: [")?;
|
||||||
for g in subst.iter(Interner) {
|
for g in subst.iter(Interner) {
|
||||||
write!(f, " {},", g.display(db))?;
|
write!(f, " {},", g.display(db, edition))?;
|
||||||
}
|
}
|
||||||
writeln!(f, "]")?;
|
writeln!(f, "]")?;
|
||||||
}
|
}
|
||||||
|
@ -242,8 +247,8 @@ impl From<LayoutError> for MirLowerError {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MirLowerError {
|
impl MirLowerError {
|
||||||
fn unresolved_path(db: &dyn HirDatabase, p: &Path) -> Self {
|
fn unresolved_path(db: &dyn HirDatabase, p: &Path, edition: Edition) -> Self {
|
||||||
Self::UnresolvedName(p.display(db).to_string())
|
Self::UnresolvedName(p.display(db, edition).to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -337,7 +342,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
self.push_assignment(current, place, Operand::Copy(p).into(), expr_id.into());
|
self.push_assignment(current, place, Operand::Copy(p).into(), expr_id.into());
|
||||||
Ok(Some(current))
|
Ok(Some(current))
|
||||||
}
|
}
|
||||||
Adjust::Borrow(AutoBorrow::Ref(m) | AutoBorrow::RawPtr(m)) => {
|
Adjust::Borrow(AutoBorrow::Ref(_, m) | AutoBorrow::RawPtr(m)) => {
|
||||||
let Some((p, current)) =
|
let Some((p, current)) =
|
||||||
self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)?
|
self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)?
|
||||||
else {
|
else {
|
||||||
|
@ -436,7 +441,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
VariantId::UnionId(_) => implementation_error!("Union variant as path"),
|
VariantId::UnionId(_) => implementation_error!("Union variant as path"),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let unresolved_name = || MirLowerError::unresolved_path(self.db, p);
|
let unresolved_name =
|
||||||
|
|| MirLowerError::unresolved_path(self.db, p, self.edition());
|
||||||
let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id);
|
let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id);
|
||||||
resolver
|
resolver
|
||||||
.resolve_path_in_value_ns_fully(self.db.upcast(), p)
|
.resolve_path_in_value_ns_fully(self.db.upcast(), p)
|
||||||
|
@ -662,7 +668,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
let (func_id, generic_args) =
|
let (func_id, generic_args) =
|
||||||
self.infer.method_resolution(expr_id).ok_or_else(|| {
|
self.infer.method_resolution(expr_id).ok_or_else(|| {
|
||||||
MirLowerError::UnresolvedMethod(
|
MirLowerError::UnresolvedMethod(
|
||||||
method_name.display(self.db.upcast()).to_string(),
|
method_name.display(self.db.upcast(), self.edition()).to_string(),
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
let func = Operand::from_fn(self.db, func_id, generic_args);
|
let func = Operand::from_fn(self.db, func_id, generic_args);
|
||||||
|
@ -803,7 +809,9 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
};
|
};
|
||||||
let variant_id =
|
let variant_id =
|
||||||
self.infer.variant_resolution_for_expr(expr_id).ok_or_else(|| match path {
|
self.infer.variant_resolution_for_expr(expr_id).ok_or_else(|| match path {
|
||||||
Some(p) => MirLowerError::UnresolvedName(p.display(self.db).to_string()),
|
Some(p) => MirLowerError::UnresolvedName(
|
||||||
|
p.display(self.db, self.edition()).to_string(),
|
||||||
|
),
|
||||||
None => MirLowerError::RecordLiteralWithoutPath,
|
None => MirLowerError::RecordLiteralWithoutPath,
|
||||||
})?;
|
})?;
|
||||||
let subst = match self.expr_ty_without_adjust(expr_id).kind(Interner) {
|
let subst = match self.expr_ty_without_adjust(expr_id).kind(Interner) {
|
||||||
|
@ -1172,8 +1180,15 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
let placeholder_subst = self.placeholder_subst();
|
let placeholder_subst = self.placeholder_subst();
|
||||||
let tmp_ty =
|
let tmp_ty =
|
||||||
capture.ty.clone().substitute(Interner, &placeholder_subst);
|
capture.ty.clone().substitute(Interner, &placeholder_subst);
|
||||||
let tmp: Place = self.temp(tmp_ty, current, capture.span)?.into();
|
// FIXME: Handle more than one span.
|
||||||
self.push_assignment(current, tmp, Rvalue::Ref(*bk, p), capture.span);
|
let capture_spans = capture.spans();
|
||||||
|
let tmp: Place = self.temp(tmp_ty, current, capture_spans[0])?.into();
|
||||||
|
self.push_assignment(
|
||||||
|
current,
|
||||||
|
tmp,
|
||||||
|
Rvalue::Ref(*bk, p),
|
||||||
|
capture_spans[0],
|
||||||
|
);
|
||||||
operands.push(Operand::Move(tmp));
|
operands.push(Operand::Move(tmp));
|
||||||
}
|
}
|
||||||
CaptureKind::ByValue => operands.push(Operand::Move(p)),
|
CaptureKind::ByValue => operands.push(Operand::Move(p)),
|
||||||
|
@ -1378,7 +1393,9 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
"only `char` and numeric types are allowed in range patterns"
|
"only `char` and numeric types are allowed in range patterns"
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
let unresolved_name = || MirLowerError::unresolved_path(self.db, c.as_ref());
|
let edition = self.edition();
|
||||||
|
let unresolved_name =
|
||||||
|
|| MirLowerError::unresolved_path(self.db, c.as_ref(), edition);
|
||||||
let resolver = self.owner.resolver(self.db.upcast());
|
let resolver = self.owner.resolver(self.db.upcast());
|
||||||
let pr = resolver
|
let pr = resolver
|
||||||
.resolve_path_in_value_ns(self.db.upcast(), c.as_ref())
|
.resolve_path_in_value_ns(self.db.upcast(), c.as_ref())
|
||||||
|
@ -1904,19 +1921,25 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
match r {
|
match r {
|
||||||
Ok(r) => Ok(r),
|
Ok(r) => Ok(r),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
let edition = self.edition();
|
||||||
let db = self.db.upcast();
|
let db = self.db.upcast();
|
||||||
let loc = variant.lookup(db);
|
let loc = variant.lookup(db);
|
||||||
let enum_loc = loc.parent.lookup(db);
|
let enum_loc = loc.parent.lookup(db);
|
||||||
let name = format!(
|
let name = format!(
|
||||||
"{}::{}",
|
"{}::{}",
|
||||||
enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast()),
|
enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast(), edition),
|
||||||
loc.id.item_tree(db)[loc.id.value].name.display(db.upcast()),
|
loc.id.item_tree(db)[loc.id.value].name.display(db.upcast(), edition),
|
||||||
);
|
);
|
||||||
Err(MirLowerError::ConstEvalError(name.into(), Box::new(e)))
|
Err(MirLowerError::ConstEvalError(name.into(), Box::new(e)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn edition(&self) -> Edition {
|
||||||
|
let krate = self.owner.krate(self.db.upcast());
|
||||||
|
self.db.crate_graph()[krate].edition
|
||||||
|
}
|
||||||
|
|
||||||
fn drop_until_scope(
|
fn drop_until_scope(
|
||||||
&mut self,
|
&mut self,
|
||||||
scope_index: usize,
|
scope_index: usize,
|
||||||
|
@ -2121,18 +2144,24 @@ pub fn mir_body_for_closure_query(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<MirBody>> {
|
pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<MirBody>> {
|
||||||
|
let krate = def.krate(db.upcast());
|
||||||
|
let edition = db.crate_graph()[krate].edition;
|
||||||
let detail = match def {
|
let detail = match def {
|
||||||
DefWithBodyId::FunctionId(it) => db.function_data(it).name.display(db.upcast()).to_string(),
|
DefWithBodyId::FunctionId(it) => {
|
||||||
DefWithBodyId::StaticId(it) => db.static_data(it).name.display(db.upcast()).to_string(),
|
db.function_data(it).name.display(db.upcast(), edition).to_string()
|
||||||
|
}
|
||||||
|
DefWithBodyId::StaticId(it) => {
|
||||||
|
db.static_data(it).name.display(db.upcast(), edition).to_string()
|
||||||
|
}
|
||||||
DefWithBodyId::ConstId(it) => db
|
DefWithBodyId::ConstId(it) => db
|
||||||
.const_data(it)
|
.const_data(it)
|
||||||
.name
|
.name
|
||||||
.clone()
|
.clone()
|
||||||
.unwrap_or_else(Name::missing)
|
.unwrap_or_else(Name::missing)
|
||||||
.display(db.upcast())
|
.display(db.upcast(), edition)
|
||||||
.to_string(),
|
.to_string(),
|
||||||
DefWithBodyId::VariantId(it) => {
|
DefWithBodyId::VariantId(it) => {
|
||||||
db.enum_variant_data(it).name.display(db.upcast()).to_string()
|
db.enum_variant_data(it).name.display(db.upcast(), edition).to_string()
|
||||||
}
|
}
|
||||||
DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"),
|
DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"),
|
||||||
};
|
};
|
||||||
|
|
|
@ -347,7 +347,8 @@ impl MirLowerCtx<'_> {
|
||||||
// A const don't bind anything. Only needs check.
|
// A const don't bind anything. Only needs check.
|
||||||
return Ok((current, current_else));
|
return Ok((current, current_else));
|
||||||
}
|
}
|
||||||
let unresolved_name = || MirLowerError::unresolved_path(self.db, p);
|
let unresolved_name =
|
||||||
|
|| MirLowerError::unresolved_path(self.db, p, self.edition());
|
||||||
let resolver = self.owner.resolver(self.db.upcast());
|
let resolver = self.owner.resolver(self.db.upcast());
|
||||||
let pr = resolver
|
let pr = resolver
|
||||||
.resolve_path_in_value_ns(self.db.upcast(), p)
|
.resolve_path_in_value_ns(self.db.upcast(), p)
|
||||||
|
|
|
@ -9,6 +9,7 @@ use either::Either;
|
||||||
use hir_def::{body::Body, hir::BindingId};
|
use hir_def::{body::Body, hir::BindingId};
|
||||||
use hir_expand::{name::Name, Lookup};
|
use hir_expand::{name::Name, Lookup};
|
||||||
use la_arena::ArenaMap;
|
use la_arena::ArenaMap;
|
||||||
|
use span::Edition;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::HirDatabase,
|
db::HirDatabase,
|
||||||
|
@ -44,18 +45,21 @@ impl MirBody {
|
||||||
ctx.for_body(|this| match ctx.body.owner {
|
ctx.for_body(|this| match ctx.body.owner {
|
||||||
hir_def::DefWithBodyId::FunctionId(id) => {
|
hir_def::DefWithBodyId::FunctionId(id) => {
|
||||||
let data = db.function_data(id);
|
let data = db.function_data(id);
|
||||||
w!(this, "fn {}() ", data.name.display(db.upcast()));
|
w!(this, "fn {}() ", data.name.display(db.upcast(), Edition::LATEST));
|
||||||
}
|
}
|
||||||
hir_def::DefWithBodyId::StaticId(id) => {
|
hir_def::DefWithBodyId::StaticId(id) => {
|
||||||
let data = db.static_data(id);
|
let data = db.static_data(id);
|
||||||
w!(this, "static {}: _ = ", data.name.display(db.upcast()));
|
w!(this, "static {}: _ = ", data.name.display(db.upcast(), Edition::LATEST));
|
||||||
}
|
}
|
||||||
hir_def::DefWithBodyId::ConstId(id) => {
|
hir_def::DefWithBodyId::ConstId(id) => {
|
||||||
let data = db.const_data(id);
|
let data = db.const_data(id);
|
||||||
w!(
|
w!(
|
||||||
this,
|
this,
|
||||||
"const {}: _ = ",
|
"const {}: _ = ",
|
||||||
data.name.as_ref().unwrap_or(&Name::missing()).display(db.upcast())
|
data.name
|
||||||
|
.as_ref()
|
||||||
|
.unwrap_or(&Name::missing())
|
||||||
|
.display(db.upcast(), Edition::LATEST)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
hir_def::DefWithBodyId::VariantId(id) => {
|
hir_def::DefWithBodyId::VariantId(id) => {
|
||||||
|
@ -64,8 +68,12 @@ impl MirBody {
|
||||||
w!(
|
w!(
|
||||||
this,
|
this,
|
||||||
"enum {}::{} = ",
|
"enum {}::{} = ",
|
||||||
enum_loc.id.item_tree(db.upcast())[enum_loc.id.value].name.display(db.upcast()),
|
enum_loc.id.item_tree(db.upcast())[enum_loc.id.value]
|
||||||
loc.id.item_tree(db.upcast())[loc.id.value].name.display(db.upcast()),
|
.name
|
||||||
|
.display(db.upcast(), Edition::LATEST),
|
||||||
|
loc.id.item_tree(db.upcast())[loc.id.value]
|
||||||
|
.name
|
||||||
|
.display(db.upcast(), Edition::LATEST),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
hir_def::DefWithBodyId::InTypeConstId(id) => {
|
hir_def::DefWithBodyId::InTypeConstId(id) => {
|
||||||
|
@ -122,7 +130,7 @@ impl HirDisplay for LocalName {
|
||||||
match self {
|
match self {
|
||||||
LocalName::Unknown(l) => write!(f, "_{}", u32::from(l.into_raw())),
|
LocalName::Unknown(l) => write!(f, "_{}", u32::from(l.into_raw())),
|
||||||
LocalName::Binding(n, l) => {
|
LocalName::Binding(n, l) => {
|
||||||
write!(f, "{}_{}", n.display(f.db.upcast()), u32::from(l.into_raw()))
|
write!(f, "{}_{}", n.display(f.db.upcast(), f.edition()), u32::from(l.into_raw()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -200,7 +208,7 @@ impl<'a> MirPrettyCtx<'a> {
|
||||||
wln!(
|
wln!(
|
||||||
self,
|
self,
|
||||||
"let {}: {};",
|
"let {}: {};",
|
||||||
self.local_name(id).display(self.db),
|
self.local_name(id).display_test(self.db),
|
||||||
self.hir_display(&local.ty)
|
self.hir_display(&local.ty)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -231,10 +239,18 @@ impl<'a> MirPrettyCtx<'a> {
|
||||||
wln!(this, ";");
|
wln!(this, ";");
|
||||||
}
|
}
|
||||||
StatementKind::StorageDead(p) => {
|
StatementKind::StorageDead(p) => {
|
||||||
wln!(this, "StorageDead({})", this.local_name(*p).display(self.db));
|
wln!(
|
||||||
|
this,
|
||||||
|
"StorageDead({})",
|
||||||
|
this.local_name(*p).display_test(self.db)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
StatementKind::StorageLive(p) => {
|
StatementKind::StorageLive(p) => {
|
||||||
wln!(this, "StorageLive({})", this.local_name(*p).display(self.db));
|
wln!(
|
||||||
|
this,
|
||||||
|
"StorageLive({})",
|
||||||
|
this.local_name(*p).display_test(self.db)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
StatementKind::Deinit(p) => {
|
StatementKind::Deinit(p) => {
|
||||||
w!(this, "Deinit(");
|
w!(this, "Deinit(");
|
||||||
|
@ -297,7 +313,7 @@ impl<'a> MirPrettyCtx<'a> {
|
||||||
fn f(this: &mut MirPrettyCtx<'_>, local: LocalId, projections: &[PlaceElem]) {
|
fn f(this: &mut MirPrettyCtx<'_>, local: LocalId, projections: &[PlaceElem]) {
|
||||||
let Some((last, head)) = projections.split_last() else {
|
let Some((last, head)) = projections.split_last() else {
|
||||||
// no projection
|
// no projection
|
||||||
w!(this, "{}", this.local_name(local).display(this.db));
|
w!(this, "{}", this.local_name(local).display_test(this.db));
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
match last {
|
match last {
|
||||||
|
@ -317,13 +333,13 @@ impl<'a> MirPrettyCtx<'a> {
|
||||||
w!(
|
w!(
|
||||||
this,
|
this,
|
||||||
" as {}).{}",
|
" as {}).{}",
|
||||||
variant_name.display(this.db.upcast()),
|
variant_name.display(this.db.upcast(), Edition::LATEST),
|
||||||
name.display(this.db.upcast())
|
name.display(this.db.upcast(), Edition::LATEST)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
hir_def::VariantId::StructId(_) | hir_def::VariantId::UnionId(_) => {
|
hir_def::VariantId::StructId(_) | hir_def::VariantId::UnionId(_) => {
|
||||||
f(this, local, head);
|
f(this, local, head);
|
||||||
w!(this, ".{}", name.display(this.db.upcast()));
|
w!(this, ".{}", name.display(this.db.upcast(), Edition::LATEST));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -337,7 +353,7 @@ impl<'a> MirPrettyCtx<'a> {
|
||||||
}
|
}
|
||||||
ProjectionElem::Index(l) => {
|
ProjectionElem::Index(l) => {
|
||||||
f(this, local, head);
|
f(this, local, head);
|
||||||
w!(this, "[{}]", this.local_name(*l).display(this.db));
|
w!(this, "[{}]", this.local_name(*l).display_test(this.db));
|
||||||
}
|
}
|
||||||
it => {
|
it => {
|
||||||
f(this, local, head);
|
f(this, local, head);
|
||||||
|
@ -387,7 +403,7 @@ impl<'a> MirPrettyCtx<'a> {
|
||||||
Rvalue::Repeat(op, len) => {
|
Rvalue::Repeat(op, len) => {
|
||||||
w!(self, "[");
|
w!(self, "[");
|
||||||
self.operand(op);
|
self.operand(op);
|
||||||
w!(self, "; {}]", len.display(self.db));
|
w!(self, "; {}]", len.display_test(self.db));
|
||||||
}
|
}
|
||||||
Rvalue::Aggregate(AggregateKind::Adt(_, _), it) => {
|
Rvalue::Aggregate(AggregateKind::Adt(_, _), it) => {
|
||||||
w!(self, "Adt(");
|
w!(self, "Adt(");
|
||||||
|
@ -458,6 +474,6 @@ impl<'a> MirPrettyCtx<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hir_display<T: HirDisplay>(&self, ty: &'a T) -> impl Display + 'a {
|
fn hir_display<T: HirDisplay>(&self, ty: &'a T) -> impl Display + 'a {
|
||||||
ty.display(self.db).with_closure_style(ClosureStyle::ClosureWithSubst)
|
ty.display_test(self.db).with_closure_style(ClosureStyle::ClosureWithSubst)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
mod closure_captures;
|
||||||
mod coercion;
|
mod coercion;
|
||||||
mod diagnostics;
|
mod diagnostics;
|
||||||
mod display_source_code;
|
mod display_source_code;
|
||||||
|
@ -12,6 +13,7 @@ mod traits;
|
||||||
mod type_alias_impl_traits;
|
mod type_alias_impl_traits;
|
||||||
|
|
||||||
use std::env;
|
use std::env;
|
||||||
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
use base_db::SourceDatabaseFileInputExt as _;
|
use base_db::SourceDatabaseFileInputExt as _;
|
||||||
use expect_test::Expect;
|
use expect_test::Expect;
|
||||||
|
@ -25,7 +27,7 @@ use hir_def::{
|
||||||
AssocItemId, DefWithBodyId, HasModule, LocalModuleId, Lookup, ModuleDefId,
|
AssocItemId, DefWithBodyId, HasModule, LocalModuleId, Lookup, ModuleDefId,
|
||||||
};
|
};
|
||||||
use hir_expand::{db::ExpandDatabase, FileRange, InFile};
|
use hir_expand::{db::ExpandDatabase, FileRange, InFile};
|
||||||
use once_cell::race::OnceBool;
|
use itertools::Itertools;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use stdx::format_to;
|
use stdx::format_to;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
|
@ -50,8 +52,8 @@ use crate::{
|
||||||
// `env UPDATE_EXPECT=1 cargo test -p hir_ty` to update the snapshots.
|
// `env UPDATE_EXPECT=1 cargo test -p hir_ty` to update the snapshots.
|
||||||
|
|
||||||
fn setup_tracing() -> Option<tracing::subscriber::DefaultGuard> {
|
fn setup_tracing() -> Option<tracing::subscriber::DefaultGuard> {
|
||||||
static ENABLE: OnceBool = OnceBool::new();
|
static ENABLE: LazyLock<bool> = LazyLock::new(|| env::var("CHALK_DEBUG").is_ok());
|
||||||
if !ENABLE.get_or_init(|| env::var("CHALK_DEBUG").is_ok()) {
|
if !*ENABLE {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -94,7 +96,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
|
||||||
let mut had_annotations = false;
|
let mut had_annotations = false;
|
||||||
let mut mismatches = FxHashMap::default();
|
let mut mismatches = FxHashMap::default();
|
||||||
let mut types = FxHashMap::default();
|
let mut types = FxHashMap::default();
|
||||||
let mut adjustments = FxHashMap::<_, Vec<_>>::default();
|
let mut adjustments = FxHashMap::default();
|
||||||
for (file_id, annotations) in db.extract_annotations() {
|
for (file_id, annotations) in db.extract_annotations() {
|
||||||
for (range, expected) in annotations {
|
for (range, expected) in annotations {
|
||||||
let file_range = FileRange { file_id, range };
|
let file_range = FileRange { file_id, range };
|
||||||
|
@ -107,13 +109,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
|
||||||
} else if expected.starts_with("adjustments:") {
|
} else if expected.starts_with("adjustments:") {
|
||||||
adjustments.insert(
|
adjustments.insert(
|
||||||
file_range,
|
file_range,
|
||||||
expected
|
expected.trim_start_matches("adjustments:").trim().to_owned(),
|
||||||
.trim_start_matches("adjustments:")
|
|
||||||
.trim()
|
|
||||||
.split(',')
|
|
||||||
.map(|it| it.trim().to_owned())
|
|
||||||
.filter(|it| !it.is_empty())
|
|
||||||
.collect(),
|
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
panic!("unexpected annotation: {expected}");
|
panic!("unexpected annotation: {expected}");
|
||||||
|
@ -200,7 +196,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
|
||||||
adjustments
|
adjustments
|
||||||
.iter()
|
.iter()
|
||||||
.map(|Adjustment { kind, .. }| format!("{kind:?}"))
|
.map(|Adjustment { kind, .. }| format!("{kind:?}"))
|
||||||
.collect::<Vec<_>>()
|
.join(", ")
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
433
crates/hir-ty/src/tests/closure_captures.rs
Normal file
433
crates/hir-ty/src/tests/closure_captures.rs
Normal file
|
@ -0,0 +1,433 @@
|
||||||
|
use base_db::salsa::InternKey;
|
||||||
|
use expect_test::{expect, Expect};
|
||||||
|
use hir_def::db::DefDatabase;
|
||||||
|
use hir_expand::files::InFileWrapper;
|
||||||
|
use itertools::Itertools;
|
||||||
|
use span::{HirFileId, TextRange};
|
||||||
|
use syntax::{AstNode, AstPtr};
|
||||||
|
use test_fixture::WithFixture;
|
||||||
|
|
||||||
|
use crate::db::{HirDatabase, InternedClosureId};
|
||||||
|
use crate::display::HirDisplay;
|
||||||
|
use crate::mir::MirSpan;
|
||||||
|
use crate::test_db::TestDB;
|
||||||
|
|
||||||
|
use super::visit_module;
|
||||||
|
|
||||||
|
fn check_closure_captures(ra_fixture: &str, expect: Expect) {
|
||||||
|
let (db, file_id) = TestDB::with_single_file(ra_fixture);
|
||||||
|
let module = db.module_for_file(file_id);
|
||||||
|
let def_map = module.def_map(&db);
|
||||||
|
|
||||||
|
let mut defs = Vec::new();
|
||||||
|
visit_module(&db, &def_map, module.local_id, &mut |it| defs.push(it));
|
||||||
|
|
||||||
|
let mut captures_info = Vec::new();
|
||||||
|
for def in defs {
|
||||||
|
let infer = db.infer(def);
|
||||||
|
let db = &db;
|
||||||
|
captures_info.extend(infer.closure_info.iter().flat_map(|(closure_id, (captures, _))| {
|
||||||
|
let closure = db.lookup_intern_closure(InternedClosureId::from_intern_id(closure_id.0));
|
||||||
|
let (_, source_map) = db.body_with_source_map(closure.0);
|
||||||
|
let closure_text_range = source_map
|
||||||
|
.expr_syntax(closure.1)
|
||||||
|
.expect("failed to map closure to SyntaxNode")
|
||||||
|
.value
|
||||||
|
.text_range();
|
||||||
|
captures.iter().map(move |capture| {
|
||||||
|
fn text_range<N: AstNode>(
|
||||||
|
db: &TestDB,
|
||||||
|
syntax: InFileWrapper<HirFileId, AstPtr<N>>,
|
||||||
|
) -> TextRange {
|
||||||
|
let root = syntax.file_syntax(db);
|
||||||
|
syntax.value.to_node(&root).syntax().text_range()
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME: Deduplicate this with hir::Local::sources().
|
||||||
|
let (body, source_map) = db.body_with_source_map(closure.0);
|
||||||
|
let local_text_range = match body.self_param.zip(source_map.self_param_syntax()) {
|
||||||
|
Some((param, source)) if param == capture.local() => {
|
||||||
|
format!("{:?}", text_range(db, source))
|
||||||
|
}
|
||||||
|
_ => source_map
|
||||||
|
.patterns_for_binding(capture.local())
|
||||||
|
.iter()
|
||||||
|
.map(|&definition| {
|
||||||
|
text_range(db, source_map.pat_syntax(definition).unwrap())
|
||||||
|
})
|
||||||
|
.map(|it| format!("{it:?}"))
|
||||||
|
.join(", "),
|
||||||
|
};
|
||||||
|
let place = capture.display_place(closure.0, db);
|
||||||
|
let capture_ty = capture.ty.skip_binders().display_test(db).to_string();
|
||||||
|
let spans = capture
|
||||||
|
.spans()
|
||||||
|
.iter()
|
||||||
|
.flat_map(|span| match *span {
|
||||||
|
MirSpan::ExprId(expr) => {
|
||||||
|
vec![text_range(db, source_map.expr_syntax(expr).unwrap())]
|
||||||
|
}
|
||||||
|
MirSpan::PatId(pat) => {
|
||||||
|
vec![text_range(db, source_map.pat_syntax(pat).unwrap())]
|
||||||
|
}
|
||||||
|
MirSpan::BindingId(binding) => source_map
|
||||||
|
.patterns_for_binding(binding)
|
||||||
|
.iter()
|
||||||
|
.map(|pat| text_range(db, source_map.pat_syntax(*pat).unwrap()))
|
||||||
|
.collect(),
|
||||||
|
MirSpan::SelfParam => {
|
||||||
|
vec![text_range(db, source_map.self_param_syntax().unwrap())]
|
||||||
|
}
|
||||||
|
MirSpan::Unknown => Vec::new(),
|
||||||
|
})
|
||||||
|
.sorted_by_key(|it| it.start())
|
||||||
|
.map(|it| format!("{it:?}"))
|
||||||
|
.join(",");
|
||||||
|
|
||||||
|
(closure_text_range, local_text_range, spans, place, capture_ty, capture.kind())
|
||||||
|
})
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
captures_info.sort_unstable_by_key(|(closure_text_range, local_text_range, ..)| {
|
||||||
|
(closure_text_range.start(), local_text_range.clone())
|
||||||
|
});
|
||||||
|
|
||||||
|
let rendered = captures_info
|
||||||
|
.iter()
|
||||||
|
.map(|(closure_text_range, local_text_range, spans, place, capture_ty, capture_kind)| {
|
||||||
|
format!(
|
||||||
|
"{closure_text_range:?};{local_text_range};{spans} {capture_kind:?} {place} {capture_ty}"
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.join("\n");
|
||||||
|
|
||||||
|
expect.assert_eq(&rendered);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn deref_in_let() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
fn main() {
|
||||||
|
let a = &mut true;
|
||||||
|
let closure = || { let b = *a; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["53..71;20..21;66..68 ByRef(Shared) *a &'? bool"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn deref_then_ref_pattern() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
fn main() {
|
||||||
|
let a = &mut true;
|
||||||
|
let closure = || { let &mut ref b = a; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["53..79;20..21;67..72 ByRef(Shared) *a &'? bool"],
|
||||||
|
);
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
fn main() {
|
||||||
|
let a = &mut true;
|
||||||
|
let closure = || { let &mut ref mut b = a; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["53..83;20..21;67..76 ByRef(Mut { kind: Default }) *a &'? mut bool"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn unique_borrow() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
fn main() {
|
||||||
|
let a = &mut true;
|
||||||
|
let closure = || { *a = false; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["53..71;20..21;58..60 ByRef(Mut { kind: Default }) *a &'? mut bool"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn deref_ref_mut() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
fn main() {
|
||||||
|
let a = &mut true;
|
||||||
|
let closure = || { let ref mut b = *a; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["53..79;20..21;62..71 ByRef(Mut { kind: Default }) *a &'? mut bool"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn let_else_not_consuming() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
fn main() {
|
||||||
|
let a = &mut true;
|
||||||
|
let closure = || { let _ = *a else { return; }; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["53..88;20..21;66..68 ByRef(Shared) *a &'? bool"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn consume() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
struct NonCopy;
|
||||||
|
fn main() {
|
||||||
|
let a = NonCopy;
|
||||||
|
let closure = || { let b = a; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["67..84;36..37;80..81 ByValue a NonCopy"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ref_to_upvar() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
struct NonCopy;
|
||||||
|
fn main() {
|
||||||
|
let mut a = NonCopy;
|
||||||
|
let closure = || { let b = &a; };
|
||||||
|
let closure = || { let c = &mut a; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
71..89;36..41;84..86 ByRef(Shared) a &'? NonCopy
|
||||||
|
109..131;36..41;122..128 ByRef(Mut { kind: Default }) a &'? mut NonCopy"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn field() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
struct Foo { a: i32, b: i32 }
|
||||||
|
fn main() {
|
||||||
|
let a = Foo { a: 0, b: 0 };
|
||||||
|
let closure = || { let b = a.a; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["92..111;50..51;105..108 ByRef(Shared) a.a &'? i32"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn fields_different_mode() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
struct NonCopy;
|
||||||
|
struct Foo { a: i32, b: i32, c: NonCopy, d: bool }
|
||||||
|
fn main() {
|
||||||
|
let mut a = Foo { a: 0, b: 0 };
|
||||||
|
let closure = || {
|
||||||
|
let b = &a.a;
|
||||||
|
let c = &mut a.b;
|
||||||
|
let d = a.c;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
133..212;87..92;154..158 ByRef(Shared) a.a &'? i32
|
||||||
|
133..212;87..92;176..184 ByRef(Mut { kind: Default }) a.b &'? mut i32
|
||||||
|
133..212;87..92;202..205 ByValue a.c NonCopy"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn autoref() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
struct Foo;
|
||||||
|
impl Foo {
|
||||||
|
fn imm(&self) {}
|
||||||
|
fn mut_(&mut self) {}
|
||||||
|
}
|
||||||
|
fn main() {
|
||||||
|
let mut a = Foo;
|
||||||
|
let closure = || a.imm();
|
||||||
|
let closure = || a.mut_();
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
123..133;92..97;126..127 ByRef(Shared) a &'? Foo
|
||||||
|
153..164;92..97;156..157 ByRef(Mut { kind: Default }) a &'? mut Foo"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn captures_priority() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
struct NonCopy;
|
||||||
|
fn main() {
|
||||||
|
let mut a = &mut true;
|
||||||
|
// Max ByRef(Mut { kind: Default })
|
||||||
|
let closure = || {
|
||||||
|
*a = false;
|
||||||
|
let b = &mut a;
|
||||||
|
};
|
||||||
|
// Max ByValue
|
||||||
|
let mut a = NonCopy;
|
||||||
|
let closure = || {
|
||||||
|
let b = a;
|
||||||
|
let c = &mut a;
|
||||||
|
let d = &a;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
113..167;36..41;127..128,154..160 ByRef(Mut { kind: Default }) a &'? mut &'? mut bool
|
||||||
|
231..304;196..201;252..253,276..277,296..297 ByValue a NonCopy"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn let_underscore() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
fn main() {
|
||||||
|
let mut a = true;
|
||||||
|
let closure = || { let _ = a; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![""],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn match_wildcard() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
struct NonCopy;
|
||||||
|
fn main() {
|
||||||
|
let mut a = NonCopy;
|
||||||
|
let closure = || match a {
|
||||||
|
_ => {}
|
||||||
|
};
|
||||||
|
let closure = || match a {
|
||||||
|
ref b => {}
|
||||||
|
};
|
||||||
|
let closure = || match a {
|
||||||
|
ref mut b => {}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
125..163;36..41;134..135 ByRef(Shared) a &'? NonCopy
|
||||||
|
183..225;36..41;192..193 ByRef(Mut { kind: Default }) a &'? mut NonCopy"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn multiple_bindings() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
fn main() {
|
||||||
|
let mut a = false;
|
||||||
|
let mut closure = || { let (b | b) = a; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["57..80;20..25;76..77,76..77 ByRef(Shared) a &'? bool"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn multiple_usages() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
fn main() {
|
||||||
|
let mut a = false;
|
||||||
|
let mut closure = || {
|
||||||
|
let b = &a;
|
||||||
|
let c = &a;
|
||||||
|
let d = &mut a;
|
||||||
|
a = true;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["57..149;20..25;78..80,98..100,118..124,134..135 ByRef(Mut { kind: Default }) a &'? mut bool"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ref_then_deref() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
fn main() {
|
||||||
|
let mut a = false;
|
||||||
|
let mut closure = || { let b = *&mut a; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["57..80;20..25;71..77 ByRef(Mut { kind: Default }) a &'? mut bool"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ref_of_ref() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
fn main() {
|
||||||
|
let mut a = &false;
|
||||||
|
let closure = || { let b = &a; };
|
||||||
|
let closure = || { let b = &mut a; };
|
||||||
|
let a = &mut false;
|
||||||
|
let closure = || { let b = &a; };
|
||||||
|
let closure = || { let b = &mut a; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
54..72;20..25;67..69 ByRef(Shared) a &'? &'? bool
|
||||||
|
92..114;20..25;105..111 ByRef(Mut { kind: Default }) a &'? mut &'? bool
|
||||||
|
158..176;124..125;171..173 ByRef(Shared) a &'? &'? mut bool
|
||||||
|
196..218;124..125;209..215 ByRef(Mut { kind: Default }) a &'? mut &'? mut bool"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn multiple_capture_usages() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
struct A { a: i32, b: bool }
|
||||||
|
fn main() {
|
||||||
|
let mut a = A { a: 123, b: false };
|
||||||
|
let closure = |$0| {
|
||||||
|
let b = a.b;
|
||||||
|
a = A { a: 456, b: true };
|
||||||
|
};
|
||||||
|
closure();
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["99..165;49..54;120..121,133..134 ByRef(Mut { kind: Default }) a &'? mut A"],
|
||||||
|
);
|
||||||
|
}
|
|
@ -49,7 +49,7 @@ fn let_stmt_coerce() {
|
||||||
//- minicore: coerce_unsized
|
//- minicore: coerce_unsized
|
||||||
fn test() {
|
fn test() {
|
||||||
let x: &[isize] = &[1];
|
let x: &[isize] = &[1];
|
||||||
// ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize)
|
// ^^^^ adjustments: Deref(None), Borrow(Ref('?3, Not)), Pointer(Unsize)
|
||||||
let x: *const [isize] = &[1];
|
let x: *const [isize] = &[1];
|
||||||
// ^^^^ adjustments: Deref(None), Borrow(RawPtr(Not)), Pointer(Unsize)
|
// ^^^^ adjustments: Deref(None), Borrow(RawPtr(Not)), Pointer(Unsize)
|
||||||
}
|
}
|
||||||
|
@ -96,7 +96,7 @@ fn foo<T>(x: &[T]) -> &[T] { x }
|
||||||
fn test() {
|
fn test() {
|
||||||
let x = if true {
|
let x = if true {
|
||||||
foo(&[1])
|
foo(&[1])
|
||||||
// ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize)
|
// ^^^^ adjustments: Deref(None), Borrow(Ref('?8, Not)), Pointer(Unsize)
|
||||||
} else {
|
} else {
|
||||||
&[1]
|
&[1]
|
||||||
};
|
};
|
||||||
|
@ -148,7 +148,7 @@ fn foo<T>(x: &[T]) -> &[T] { x }
|
||||||
fn test(i: i32) {
|
fn test(i: i32) {
|
||||||
let x = match i {
|
let x = match i {
|
||||||
2 => foo(&[2]),
|
2 => foo(&[2]),
|
||||||
// ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize)
|
// ^^^^ adjustments: Deref(None), Borrow(Ref('?10, Not)), Pointer(Unsize)
|
||||||
1 => &[1],
|
1 => &[1],
|
||||||
_ => &[3],
|
_ => &[3],
|
||||||
};
|
};
|
||||||
|
@ -267,7 +267,7 @@ fn takes_ref_str(x: &str) {}
|
||||||
fn returns_string() -> String { loop {} }
|
fn returns_string() -> String { loop {} }
|
||||||
fn test() {
|
fn test() {
|
||||||
takes_ref_str(&{ returns_string() });
|
takes_ref_str(&{ returns_string() });
|
||||||
// ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref(Not))
|
// ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref('{error}, Not))
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
|
@ -849,8 +849,8 @@ impl core::cmp::PartialEq for Struct {
|
||||||
}
|
}
|
||||||
fn test() {
|
fn test() {
|
||||||
Struct == Struct;
|
Struct == Struct;
|
||||||
// ^^^^^^ adjustments: Borrow(Ref(Not))
|
// ^^^^^^ adjustments: Borrow(Ref('{error}, Not))
|
||||||
// ^^^^^^ adjustments: Borrow(Ref(Not))
|
// ^^^^^^ adjustments: Borrow(Ref('{error}, Not))
|
||||||
}",
|
}",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -866,7 +866,7 @@ impl core::ops::AddAssign for Struct {
|
||||||
}
|
}
|
||||||
fn test() {
|
fn test() {
|
||||||
Struct += Struct;
|
Struct += Struct;
|
||||||
// ^^^^^^ adjustments: Borrow(Ref(Mut))
|
// ^^^^^^ adjustments: Borrow(Ref('{error}, Mut))
|
||||||
// ^^^^^^ adjustments:
|
// ^^^^^^ adjustments:
|
||||||
}",
|
}",
|
||||||
);
|
);
|
||||||
|
@ -880,7 +880,7 @@ fn adjust_index() {
|
||||||
fn test() {
|
fn test() {
|
||||||
let x = [1, 2, 3];
|
let x = [1, 2, 3];
|
||||||
x[2] = 6;
|
x[2] = 6;
|
||||||
// ^ adjustments: Borrow(Ref(Mut))
|
// ^ adjustments: Borrow(Ref('?8, Mut))
|
||||||
}
|
}
|
||||||
",
|
",
|
||||||
);
|
);
|
||||||
|
@ -905,11 +905,11 @@ impl core::ops::IndexMut for StructMut {
|
||||||
}
|
}
|
||||||
fn test() {
|
fn test() {
|
||||||
Struct[0];
|
Struct[0];
|
||||||
// ^^^^^^ adjustments: Borrow(Ref(Not))
|
// ^^^^^^ adjustments: Borrow(Ref('?2, Not))
|
||||||
StructMut[0];
|
StructMut[0];
|
||||||
// ^^^^^^^^^ adjustments: Borrow(Ref(Not))
|
// ^^^^^^^^^ adjustments: Borrow(Ref('?5, Not))
|
||||||
&mut StructMut[0];
|
&mut StructMut[0];
|
||||||
// ^^^^^^^^^ adjustments: Borrow(Ref(Mut))
|
// ^^^^^^^^^ adjustments: Borrow(Ref('?8, Mut))
|
||||||
}",
|
}",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1186,11 +1186,11 @@ fn test() {
|
||||||
89..109 '{ ... }': bool
|
89..109 '{ ... }': bool
|
||||||
99..103 'true': bool
|
99..103 'true': bool
|
||||||
123..167 '{ ...o(); }': ()
|
123..167 '{ ...o(); }': ()
|
||||||
133..134 's': &'? S
|
133..134 's': &'static S
|
||||||
137..151 'unsafe { f() }': &'static S
|
137..151 'unsafe { f() }': &'static S
|
||||||
146..147 'f': fn f() -> &'static S
|
146..147 'f': fn f() -> &'static S
|
||||||
146..149 'f()': &'static S
|
146..149 'f()': &'static S
|
||||||
157..158 's': &'? S
|
157..158 's': &'static S
|
||||||
157..164 's.foo()': bool
|
157..164 's.foo()': bool
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
|
@ -1847,9 +1847,9 @@ impl Foo {
|
||||||
}
|
}
|
||||||
fn test() {
|
fn test() {
|
||||||
Foo.foo();
|
Foo.foo();
|
||||||
//^^^ adjustments: Borrow(Ref(Not))
|
//^^^ adjustments: Borrow(Ref('?1, Not))
|
||||||
(&Foo).foo();
|
(&Foo).foo();
|
||||||
// ^^^^ adjustments: Deref(None), Borrow(Ref(Not))
|
// ^^^^ adjustments: Deref(None), Borrow(Ref('?3, Not))
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
|
@ -1863,7 +1863,7 @@ fn receiver_adjustment_unsize_array() {
|
||||||
fn test() {
|
fn test() {
|
||||||
let a = [1, 2, 3];
|
let a = [1, 2, 3];
|
||||||
a.len();
|
a.len();
|
||||||
} //^ adjustments: Borrow(Ref(Not)), Pointer(Unsize)
|
} //^ adjustments: Borrow(Ref('?7, Not)), Pointer(Unsize)
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -2076,7 +2076,7 @@ impl Foo {
|
||||||
}
|
}
|
||||||
fn test() {
|
fn test() {
|
||||||
Box::new(Foo).foo();
|
Box::new(Foo).foo();
|
||||||
//^^^^^^^^^^^^^ adjustments: Deref(None), Borrow(Ref(Not))
|
//^^^^^^^^^^^^^ adjustments: Deref(None), Borrow(Ref('?3, Not))
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
|
@ -2094,7 +2094,7 @@ impl Foo {
|
||||||
use core::mem::ManuallyDrop;
|
use core::mem::ManuallyDrop;
|
||||||
fn test() {
|
fn test() {
|
||||||
ManuallyDrop::new(Foo).foo();
|
ManuallyDrop::new(Foo).foo();
|
||||||
//^^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref(Not))
|
//^^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref('?4, Not))
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
|
|
|
@ -2141,3 +2141,90 @@ fn test() {
|
||||||
}"#,
|
}"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn issue_17866() {
|
||||||
|
check_infer(
|
||||||
|
r#"
|
||||||
|
trait T {
|
||||||
|
type A;
|
||||||
|
}
|
||||||
|
|
||||||
|
type Foo = <S as T>::A;
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
Foo {};
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
60..75 '{ Foo {}; }': ()
|
||||||
|
66..72 'Foo {}': {unknown}
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn issue_17711() {
|
||||||
|
check_infer(
|
||||||
|
r#"
|
||||||
|
//- minicore: deref
|
||||||
|
use core::ops::Deref;
|
||||||
|
|
||||||
|
struct Struct<'a, T>(&'a T);
|
||||||
|
|
||||||
|
trait Trait {}
|
||||||
|
|
||||||
|
impl<'a, T: Deref<Target = impl Trait>> Struct<'a, T> {
|
||||||
|
fn foo(&self) -> &Self { self }
|
||||||
|
|
||||||
|
fn bar(&self) {
|
||||||
|
let _ = self.foo();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
137..141 'self': &'? Struct<'a, T>
|
||||||
|
152..160 '{ self }': &'? Struct<'a, T>
|
||||||
|
154..158 'self': &'? Struct<'a, T>
|
||||||
|
174..178 'self': &'? Struct<'a, T>
|
||||||
|
180..215 '{ ... }': ()
|
||||||
|
194..195 '_': &'? Struct<'?, T>
|
||||||
|
198..202 'self': &'? Struct<'a, T>
|
||||||
|
198..208 'self.foo()': &'? Struct<'?, T>
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn issue_17767() {
|
||||||
|
check_infer(
|
||||||
|
r#"
|
||||||
|
extern "C" {
|
||||||
|
type Foo<T>;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn f() -> Foo {}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
47..49 '{}': Foo
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn issue_17921() {
|
||||||
|
check_infer(
|
||||||
|
r#"
|
||||||
|
//- minicore: future
|
||||||
|
trait Foo {}
|
||||||
|
type Bar = impl Foo;
|
||||||
|
|
||||||
|
async fn f<A, B, C>() -> Bar {}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
64..66 '{}': ()
|
||||||
|
64..66 '{}': impl Future<Output = ()>
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -1201,8 +1201,8 @@ fn infer_array() {
|
||||||
209..215 '[1, 2]': [i32; 2]
|
209..215 '[1, 2]': [i32; 2]
|
||||||
210..211 '1': i32
|
210..211 '1': i32
|
||||||
213..214 '2': i32
|
213..214 '2': i32
|
||||||
225..226 'i': [&'? str; 2]
|
225..226 'i': [&'static str; 2]
|
||||||
229..239 '["a", "b"]': [&'? str; 2]
|
229..239 '["a", "b"]': [&'static str; 2]
|
||||||
230..233 '"a"': &'static str
|
230..233 '"a"': &'static str
|
||||||
235..238 '"b"': &'static str
|
235..238 '"b"': &'static str
|
||||||
250..251 'b': [[&'? str; 1]; 2]
|
250..251 'b': [[&'? str; 1]; 2]
|
||||||
|
@ -3686,3 +3686,36 @@ fn main() {
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn infer_bad_lang_item() {
|
||||||
|
check_infer(
|
||||||
|
r#"
|
||||||
|
#[lang="eq"]
|
||||||
|
pub trait Eq {
|
||||||
|
fn eq(&self, ) -> bool;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
#[lang="shr"]
|
||||||
|
pub trait Shr<RHS,Result> {
|
||||||
|
fn shr(&self, rhs: &RHS) -> Result;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test() -> bool {
|
||||||
|
1 >> 1;
|
||||||
|
1 == 1;
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
39..43 'self': &'? Self
|
||||||
|
114..118 'self': &'? Self
|
||||||
|
120..123 'rhs': &'? RHS
|
||||||
|
163..190 '{ ...= 1; }': bool
|
||||||
|
169..170 '1': i32
|
||||||
|
169..175 '1 >> 1': {unknown}
|
||||||
|
181..182 '1': i32
|
||||||
|
181..187 '1 == 1': {unknown}
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
use std::fmt::{self, Display};
|
use std::fmt::{self, Display};
|
||||||
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
use span::Edition;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
chalk_db, db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, mapping::from_chalk,
|
chalk_db, db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, mapping::from_chalk,
|
||||||
|
@ -24,7 +25,7 @@ impl DebugContext<'_> {
|
||||||
AdtId::UnionId(it) => self.0.union_data(it).name.clone(),
|
AdtId::UnionId(it) => self.0.union_data(it).name.clone(),
|
||||||
AdtId::EnumId(it) => self.0.enum_data(it).name.clone(),
|
AdtId::EnumId(it) => self.0.enum_data(it).name.clone(),
|
||||||
};
|
};
|
||||||
name.display(self.0.upcast()).fmt(f)?;
|
name.display(self.0.upcast(), Edition::LATEST).fmt(f)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -35,7 +36,7 @@ impl DebugContext<'_> {
|
||||||
) -> Result<(), fmt::Error> {
|
) -> Result<(), fmt::Error> {
|
||||||
let trait_: hir_def::TraitId = from_chalk_trait_id(id);
|
let trait_: hir_def::TraitId = from_chalk_trait_id(id);
|
||||||
let trait_data = self.0.trait_data(trait_);
|
let trait_data = self.0.trait_data(trait_);
|
||||||
trait_data.name.display(self.0.upcast()).fmt(f)?;
|
trait_data.name.display(self.0.upcast(), Edition::LATEST).fmt(f)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -54,8 +55,8 @@ impl DebugContext<'_> {
|
||||||
write!(
|
write!(
|
||||||
fmt,
|
fmt,
|
||||||
"{}::{}",
|
"{}::{}",
|
||||||
trait_data.name.display(self.0.upcast()),
|
trait_data.name.display(self.0.upcast(), Edition::LATEST),
|
||||||
type_alias_data.name.display(self.0.upcast())
|
type_alias_data.name.display(self.0.upcast(), Edition::LATEST)
|
||||||
)?;
|
)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -75,7 +76,7 @@ impl DebugContext<'_> {
|
||||||
let trait_ref = projection_ty.trait_ref(self.0);
|
let trait_ref = projection_ty.trait_ref(self.0);
|
||||||
let trait_params = trait_ref.substitution.as_slice(Interner);
|
let trait_params = trait_ref.substitution.as_slice(Interner);
|
||||||
let self_ty = trait_ref.self_type_parameter(Interner);
|
let self_ty = trait_ref.self_type_parameter(Interner);
|
||||||
write!(fmt, "<{self_ty:?} as {}", trait_name.display(self.0.upcast()))?;
|
write!(fmt, "<{self_ty:?} as {}", trait_name.display(self.0.upcast(), Edition::LATEST))?;
|
||||||
if trait_params.len() > 1 {
|
if trait_params.len() > 1 {
|
||||||
write!(
|
write!(
|
||||||
fmt,
|
fmt,
|
||||||
|
@ -83,7 +84,7 @@ impl DebugContext<'_> {
|
||||||
trait_params[1..].iter().format_with(", ", |x, f| f(&format_args!("{x:?}"))),
|
trait_params[1..].iter().format_with(", ", |x, f| f(&format_args!("{x:?}"))),
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
write!(fmt, ">::{}", type_alias_data.name.display(self.0.upcast()))?;
|
write!(fmt, ">::{}", type_alias_data.name.display(self.0.upcast(), Edition::LATEST))?;
|
||||||
|
|
||||||
let proj_params_count = projection_ty.substitution.len(Interner) - trait_params.len();
|
let proj_params_count = projection_ty.substitution.len(Interner) - trait_params.len();
|
||||||
let proj_params = &projection_ty.substitution.as_slice(Interner)[..proj_params_count];
|
let proj_params = &projection_ty.substitution.as_slice(Interner)[..proj_params_count];
|
||||||
|
@ -110,9 +111,11 @@ impl DebugContext<'_> {
|
||||||
CallableDefId::EnumVariantId(e) => self.0.enum_variant_data(e).name.clone(),
|
CallableDefId::EnumVariantId(e) => self.0.enum_variant_data(e).name.clone(),
|
||||||
};
|
};
|
||||||
match def {
|
match def {
|
||||||
CallableDefId::FunctionId(_) => write!(fmt, "{{fn {}}}", name.display(self.0.upcast())),
|
CallableDefId::FunctionId(_) => {
|
||||||
|
write!(fmt, "{{fn {}}}", name.display(self.0.upcast(), Edition::LATEST))
|
||||||
|
}
|
||||||
CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {
|
CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {
|
||||||
write!(fmt, "{{ctor {}}}", name.display(self.0.upcast()))
|
write!(fmt, "{{ctor {}}}", name.display(self.0.upcast(), Edition::LATEST))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,13 +14,14 @@ use hir_def::{
|
||||||
};
|
};
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
use intern::sym;
|
use intern::sym;
|
||||||
use stdx::panic_context;
|
use span::Edition;
|
||||||
|
use stdx::{never, panic_context};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::HirDatabase, infer::unify::InferenceTable, utils::UnevaluatedConstEvaluatorFolder, AliasEq,
|
db::HirDatabase, infer::unify::InferenceTable, utils::UnevaluatedConstEvaluatorFolder, AliasEq,
|
||||||
AliasTy, Canonical, DomainGoal, Goal, Guidance, InEnvironment, Interner, ProjectionTy,
|
AliasTy, Canonical, DomainGoal, Goal, Guidance, InEnvironment, Interner, ProjectionTy,
|
||||||
ProjectionTyExt, Solution, TraitRefExt, Ty, TyKind, WhereClause,
|
ProjectionTyExt, Solution, TraitRefExt, Ty, TyKind, TypeFlags, WhereClause,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// This controls how much 'time' we give the Chalk solver before giving up.
|
/// This controls how much 'time' we give the Chalk solver before giving up.
|
||||||
|
@ -90,6 +91,16 @@ pub(crate) fn normalize_projection_query(
|
||||||
projection: ProjectionTy,
|
projection: ProjectionTy,
|
||||||
env: Arc<TraitEnvironment>,
|
env: Arc<TraitEnvironment>,
|
||||||
) -> Ty {
|
) -> Ty {
|
||||||
|
if projection.substitution.iter(Interner).any(|arg| {
|
||||||
|
arg.ty(Interner)
|
||||||
|
.is_some_and(|ty| ty.data(Interner).flags.intersects(TypeFlags::HAS_TY_INFER))
|
||||||
|
}) {
|
||||||
|
never!(
|
||||||
|
"Invoking `normalize_projection_query` with a projection type containing inference var"
|
||||||
|
);
|
||||||
|
return TyKind::Error.intern(Interner);
|
||||||
|
}
|
||||||
|
|
||||||
let mut table = InferenceTable::new(db, env);
|
let mut table = InferenceTable::new(db, env);
|
||||||
let ty = table.normalize_projection_ty(projection);
|
let ty = table.normalize_projection_ty(projection);
|
||||||
table.resolve_completely(ty)
|
table.resolve_completely(ty)
|
||||||
|
@ -104,7 +115,7 @@ pub(crate) fn trait_solve_query(
|
||||||
) -> Option<Solution> {
|
) -> Option<Solution> {
|
||||||
let detail = match &goal.value.goal.data(Interner) {
|
let detail = match &goal.value.goal.data(Interner) {
|
||||||
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => {
|
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => {
|
||||||
db.trait_data(it.hir_trait_id()).name.display(db.upcast()).to_string()
|
db.trait_data(it.hir_trait_id()).name.display(db.upcast(), Edition::LATEST).to_string()
|
||||||
}
|
}
|
||||||
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_owned(),
|
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_owned(),
|
||||||
_ => "??".to_owned(),
|
_ => "??".to_owned(),
|
||||||
|
|
|
@ -20,7 +20,6 @@ itertools.workspace = true
|
||||||
smallvec.workspace = true
|
smallvec.workspace = true
|
||||||
tracing.workspace = true
|
tracing.workspace = true
|
||||||
triomphe.workspace = true
|
triomphe.workspace = true
|
||||||
once_cell = "1.17.1"
|
|
||||||
|
|
||||||
# local deps
|
# local deps
|
||||||
base-db.workspace = true
|
base-db.workspace = true
|
||||||
|
|
|
@ -328,11 +328,9 @@ fn doc_modpath_from_str(link: &str) -> Option<ModPath> {
|
||||||
};
|
};
|
||||||
let parts = first_segment.into_iter().chain(parts).map(|segment| match segment.parse() {
|
let parts = first_segment.into_iter().chain(parts).map(|segment| match segment.parse() {
|
||||||
Ok(idx) => Name::new_tuple_field(idx),
|
Ok(idx) => Name::new_tuple_field(idx),
|
||||||
Err(_) => Name::new(
|
Err(_) => {
|
||||||
segment.split_once('<').map_or(segment, |it| it.0),
|
Name::new(segment.split_once('<').map_or(segment, |it| it.0), SyntaxContextId::ROOT)
|
||||||
tt::IdentIsRaw::No,
|
}
|
||||||
SyntaxContextId::ROOT,
|
|
||||||
),
|
|
||||||
});
|
});
|
||||||
Some(ModPath::from_segments(kind, parts))
|
Some(ModPath::from_segments(kind, parts))
|
||||||
};
|
};
|
||||||
|
|
|
@ -84,7 +84,7 @@ impl HirDisplay for Function {
|
||||||
if let Some(abi) = &data.abi {
|
if let Some(abi) = &data.abi {
|
||||||
write!(f, "extern \"{}\" ", abi.as_str())?;
|
write!(f, "extern \"{}\" ", abi.as_str())?;
|
||||||
}
|
}
|
||||||
write!(f, "fn {}", data.name.display(f.db.upcast()))?;
|
write!(f, "fn {}", data.name.display(f.db.upcast(), f.edition()))?;
|
||||||
|
|
||||||
write_generic_params(GenericDefId::FunctionId(self.id), f)?;
|
write_generic_params(GenericDefId::FunctionId(self.id), f)?;
|
||||||
|
|
||||||
|
@ -107,7 +107,7 @@ impl HirDisplay for Function {
|
||||||
first = false;
|
first = false;
|
||||||
}
|
}
|
||||||
match local {
|
match local {
|
||||||
Some(name) => write!(f, "{}: ", name.display(f.db.upcast()))?,
|
Some(name) => write!(f, "{}: ", name.display(f.db.upcast(), f.edition()))?,
|
||||||
None => f.write_str("_: ")?,
|
None => f.write_str("_: ")?,
|
||||||
}
|
}
|
||||||
type_ref.hir_fmt(f)?;
|
type_ref.hir_fmt(f)?;
|
||||||
|
@ -177,7 +177,7 @@ fn write_impl_header(impl_: &Impl, f: &mut HirFormatter<'_>) -> Result<(), HirDi
|
||||||
|
|
||||||
if let Some(trait_) = impl_.trait_(db) {
|
if let Some(trait_) = impl_.trait_(db) {
|
||||||
let trait_data = db.trait_data(trait_.id);
|
let trait_data = db.trait_data(trait_.id);
|
||||||
write!(f, " {} for", trait_data.name.display(db.upcast()))?;
|
write!(f, " {} for", trait_data.name.display(db.upcast(), f.edition()))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
f.write_char(' ')?;
|
f.write_char(' ')?;
|
||||||
|
@ -196,7 +196,7 @@ impl HirDisplay for SelfParam {
|
||||||
{
|
{
|
||||||
f.write_char('&')?;
|
f.write_char('&')?;
|
||||||
if let Some(lifetime) = lifetime {
|
if let Some(lifetime) = lifetime {
|
||||||
write!(f, "{} ", lifetime.name.display(f.db.upcast()))?;
|
write!(f, "{} ", lifetime.name.display(f.db.upcast(), f.edition()))?;
|
||||||
}
|
}
|
||||||
if let hir_def::type_ref::Mutability::Mut = mut_ {
|
if let hir_def::type_ref::Mutability::Mut = mut_ {
|
||||||
f.write_str("mut ")?;
|
f.write_str("mut ")?;
|
||||||
|
@ -227,7 +227,7 @@ impl HirDisplay for Struct {
|
||||||
// FIXME: Render repr if its set explicitly?
|
// FIXME: Render repr if its set explicitly?
|
||||||
write_visibility(module_id, self.visibility(f.db), f)?;
|
write_visibility(module_id, self.visibility(f.db), f)?;
|
||||||
f.write_str("struct ")?;
|
f.write_str("struct ")?;
|
||||||
write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
|
write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?;
|
||||||
let def_id = GenericDefId::AdtId(AdtId::StructId(self.id));
|
let def_id = GenericDefId::AdtId(AdtId::StructId(self.id));
|
||||||
write_generic_params(def_id, f)?;
|
write_generic_params(def_id, f)?;
|
||||||
|
|
||||||
|
@ -266,7 +266,7 @@ impl HirDisplay for Enum {
|
||||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||||
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
|
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
|
||||||
f.write_str("enum ")?;
|
f.write_str("enum ")?;
|
||||||
write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
|
write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?;
|
||||||
let def_id = GenericDefId::AdtId(AdtId::EnumId(self.id));
|
let def_id = GenericDefId::AdtId(AdtId::EnumId(self.id));
|
||||||
write_generic_params(def_id, f)?;
|
write_generic_params(def_id, f)?;
|
||||||
|
|
||||||
|
@ -283,7 +283,7 @@ impl HirDisplay for Union {
|
||||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||||
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
|
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
|
||||||
f.write_str("union ")?;
|
f.write_str("union ")?;
|
||||||
write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
|
write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?;
|
||||||
let def_id = GenericDefId::AdtId(AdtId::UnionId(self.id));
|
let def_id = GenericDefId::AdtId(AdtId::UnionId(self.id));
|
||||||
write_generic_params(def_id, f)?;
|
write_generic_params(def_id, f)?;
|
||||||
|
|
||||||
|
@ -343,7 +343,7 @@ fn write_variants(
|
||||||
} else {
|
} else {
|
||||||
f.write_str("{\n")?;
|
f.write_str("{\n")?;
|
||||||
for variant in &variants[..count] {
|
for variant in &variants[..count] {
|
||||||
write!(f, " {}", variant.name(f.db).display(f.db.upcast()))?;
|
write!(f, " {}", variant.name(f.db).display(f.db.upcast(), f.edition()))?;
|
||||||
match variant.kind(f.db) {
|
match variant.kind(f.db) {
|
||||||
StructKind::Tuple => {
|
StructKind::Tuple => {
|
||||||
let fields_str =
|
let fields_str =
|
||||||
|
@ -372,21 +372,21 @@ fn write_variants(
|
||||||
impl HirDisplay for Field {
|
impl HirDisplay for Field {
|
||||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||||
write_visibility(self.parent.module(f.db).id, self.visibility(f.db), f)?;
|
write_visibility(self.parent.module(f.db).id, self.visibility(f.db), f)?;
|
||||||
write!(f, "{}: ", self.name(f.db).display(f.db.upcast()))?;
|
write!(f, "{}: ", self.name(f.db).display(f.db.upcast(), f.edition()))?;
|
||||||
self.ty(f.db).hir_fmt(f)
|
self.ty(f.db).hir_fmt(f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HirDisplay for TupleField {
|
impl HirDisplay for TupleField {
|
||||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||||
write!(f, "pub {}: ", self.name().display(f.db.upcast()))?;
|
write!(f, "pub {}: ", self.name().display(f.db.upcast(), f.edition()))?;
|
||||||
self.ty(f.db).hir_fmt(f)
|
self.ty(f.db).hir_fmt(f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HirDisplay for Variant {
|
impl HirDisplay for Variant {
|
||||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||||
write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
|
write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?;
|
||||||
let data = self.variant_data(f.db);
|
let data = self.variant_data(f.db);
|
||||||
match &*data {
|
match &*data {
|
||||||
VariantData::Unit => {}
|
VariantData::Unit => {}
|
||||||
|
@ -424,9 +424,9 @@ impl HirDisplay for ExternCrateDecl {
|
||||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||||
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
|
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
|
||||||
f.write_str("extern crate ")?;
|
f.write_str("extern crate ")?;
|
||||||
write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
|
write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?;
|
||||||
if let Some(alias) = self.alias(f.db) {
|
if let Some(alias) = self.alias(f.db) {
|
||||||
write!(f, " as {alias}",)?;
|
write!(f, " as {}", alias.display(f.edition()))?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -478,7 +478,7 @@ impl HirDisplay for TypeParam {
|
||||||
match param_data {
|
match param_data {
|
||||||
TypeOrConstParamData::TypeParamData(p) => match p.provenance {
|
TypeOrConstParamData::TypeParamData(p) => match p.provenance {
|
||||||
TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
|
TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
|
||||||
write!(f, "{}", p.name.clone().unwrap().display(f.db.upcast()))?
|
write!(f, "{}", p.name.clone().unwrap().display(f.db.upcast(), f.edition()))?
|
||||||
}
|
}
|
||||||
TypeParamProvenance::ArgumentImplTrait => {
|
TypeParamProvenance::ArgumentImplTrait => {
|
||||||
return write_bounds_like_dyn_trait_with_prefix(
|
return write_bounds_like_dyn_trait_with_prefix(
|
||||||
|
@ -491,7 +491,7 @@ impl HirDisplay for TypeParam {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TypeOrConstParamData::ConstParamData(p) => {
|
TypeOrConstParamData::ConstParamData(p) => {
|
||||||
write!(f, "{}", p.name.display(f.db.upcast()))?;
|
write!(f, "{}", p.name.display(f.db.upcast(), f.edition()))?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -525,13 +525,13 @@ impl HirDisplay for TypeParam {
|
||||||
|
|
||||||
impl HirDisplay for LifetimeParam {
|
impl HirDisplay for LifetimeParam {
|
||||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||||
write!(f, "{}", self.name(f.db).display(f.db.upcast()))
|
write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HirDisplay for ConstParam {
|
impl HirDisplay for ConstParam {
|
||||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||||
write!(f, "const {}: ", self.name(f.db).display(f.db.upcast()))?;
|
write!(f, "const {}: ", self.name(f.db).display(f.db.upcast(), f.edition()))?;
|
||||||
self.ty(f.db).hir_fmt(f)
|
self.ty(f.db).hir_fmt(f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -563,7 +563,7 @@ fn write_generic_params(
|
||||||
};
|
};
|
||||||
for (_, lifetime) in params.iter_lt() {
|
for (_, lifetime) in params.iter_lt() {
|
||||||
delim(f)?;
|
delim(f)?;
|
||||||
write!(f, "{}", lifetime.name.display(f.db.upcast()))?;
|
write!(f, "{}", lifetime.name.display(f.db.upcast(), f.edition()))?;
|
||||||
}
|
}
|
||||||
for (_, ty) in params.iter_type_or_consts() {
|
for (_, ty) in params.iter_type_or_consts() {
|
||||||
if let Some(name) = &ty.name() {
|
if let Some(name) = &ty.name() {
|
||||||
|
@ -573,7 +573,7 @@ fn write_generic_params(
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
delim(f)?;
|
delim(f)?;
|
||||||
write!(f, "{}", name.display(f.db.upcast()))?;
|
write!(f, "{}", name.display(f.db.upcast(), f.edition()))?;
|
||||||
if let Some(default) = &ty.default {
|
if let Some(default) = &ty.default {
|
||||||
f.write_str(" = ")?;
|
f.write_str(" = ")?;
|
||||||
default.hir_fmt(f)?;
|
default.hir_fmt(f)?;
|
||||||
|
@ -581,12 +581,12 @@ fn write_generic_params(
|
||||||
}
|
}
|
||||||
TypeOrConstParamData::ConstParamData(c) => {
|
TypeOrConstParamData::ConstParamData(c) => {
|
||||||
delim(f)?;
|
delim(f)?;
|
||||||
write!(f, "const {}: ", name.display(f.db.upcast()))?;
|
write!(f, "const {}: ", name.display(f.db.upcast(), f.edition()))?;
|
||||||
c.ty.hir_fmt(f)?;
|
c.ty.hir_fmt(f)?;
|
||||||
|
|
||||||
if let Some(default) = &c.default {
|
if let Some(default) = &c.default {
|
||||||
f.write_str(" = ")?;
|
f.write_str(" = ")?;
|
||||||
write!(f, "{}", default.display(f.db.upcast()))?;
|
write!(f, "{}", default.display(f.db.upcast(), f.edition()))?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -639,7 +639,7 @@ fn write_where_predicates(
|
||||||
let write_target = |target: &WherePredicateTypeTarget, f: &mut HirFormatter<'_>| match target {
|
let write_target = |target: &WherePredicateTypeTarget, f: &mut HirFormatter<'_>| match target {
|
||||||
WherePredicateTypeTarget::TypeRef(ty) => ty.hir_fmt(f),
|
WherePredicateTypeTarget::TypeRef(ty) => ty.hir_fmt(f),
|
||||||
WherePredicateTypeTarget::TypeOrConstParam(id) => match params[*id].name() {
|
WherePredicateTypeTarget::TypeOrConstParam(id) => match params[*id].name() {
|
||||||
Some(name) => write!(f, "{}", name.display(f.db.upcast())),
|
Some(name) => write!(f, "{}", name.display(f.db.upcast(), f.edition())),
|
||||||
None => f.write_str("{unnamed}"),
|
None => f.write_str("{unnamed}"),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@ -668,12 +668,13 @@ fn write_where_predicates(
|
||||||
bound.hir_fmt(f)?;
|
bound.hir_fmt(f)?;
|
||||||
}
|
}
|
||||||
Lifetime { target, bound } => {
|
Lifetime { target, bound } => {
|
||||||
let target = target.name.display(f.db.upcast());
|
let target = target.name.display(f.db.upcast(), f.edition());
|
||||||
let bound = bound.name.display(f.db.upcast());
|
let bound = bound.name.display(f.db.upcast(), f.edition());
|
||||||
write!(f, "{target}: {bound}")?;
|
write!(f, "{target}: {bound}")?;
|
||||||
}
|
}
|
||||||
ForLifetime { lifetimes, target, bound } => {
|
ForLifetime { lifetimes, target, bound } => {
|
||||||
let lifetimes = lifetimes.iter().map(|it| it.display(f.db.upcast())).join(", ");
|
let lifetimes =
|
||||||
|
lifetimes.iter().map(|it| it.display(f.db.upcast(), f.edition())).join(", ");
|
||||||
write!(f, "for<{lifetimes}> ")?;
|
write!(f, "for<{lifetimes}> ")?;
|
||||||
write_target(target, f)?;
|
write_target(target, f)?;
|
||||||
f.write_str(": ")?;
|
f.write_str(": ")?;
|
||||||
|
@ -685,7 +686,9 @@ fn write_where_predicates(
|
||||||
f.write_str(" + ")?;
|
f.write_str(" + ")?;
|
||||||
match nxt {
|
match nxt {
|
||||||
TypeBound { bound, .. } | ForLifetime { bound, .. } => bound.hir_fmt(f)?,
|
TypeBound { bound, .. } | ForLifetime { bound, .. } => bound.hir_fmt(f)?,
|
||||||
Lifetime { bound, .. } => write!(f, "{}", bound.name.display(f.db.upcast()))?,
|
Lifetime { bound, .. } => {
|
||||||
|
write!(f, "{}", bound.name.display(f.db.upcast(), f.edition()))?
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
f.write_str(",")?;
|
f.write_str(",")?;
|
||||||
|
@ -707,7 +710,7 @@ impl HirDisplay for Const {
|
||||||
let data = db.const_data(self.id);
|
let data = db.const_data(self.id);
|
||||||
f.write_str("const ")?;
|
f.write_str("const ")?;
|
||||||
match &data.name {
|
match &data.name {
|
||||||
Some(name) => write!(f, "{}: ", name.display(f.db.upcast()))?,
|
Some(name) => write!(f, "{}: ", name.display(f.db.upcast(), f.edition()))?,
|
||||||
None => f.write_str("_: ")?,
|
None => f.write_str("_: ")?,
|
||||||
}
|
}
|
||||||
data.type_ref.hir_fmt(f)?;
|
data.type_ref.hir_fmt(f)?;
|
||||||
|
@ -723,7 +726,7 @@ impl HirDisplay for Static {
|
||||||
if data.mutable {
|
if data.mutable {
|
||||||
f.write_str("mut ")?;
|
f.write_str("mut ")?;
|
||||||
}
|
}
|
||||||
write!(f, "{}: ", data.name.display(f.db.upcast()))?;
|
write!(f, "{}: ", data.name.display(f.db.upcast(), f.edition()))?;
|
||||||
data.type_ref.hir_fmt(f)?;
|
data.type_ref.hir_fmt(f)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -777,7 +780,7 @@ fn write_trait_header(trait_: &Trait, f: &mut HirFormatter<'_>) -> Result<(), Hi
|
||||||
if data.is_auto {
|
if data.is_auto {
|
||||||
f.write_str("auto ")?;
|
f.write_str("auto ")?;
|
||||||
}
|
}
|
||||||
write!(f, "trait {}", data.name.display(f.db.upcast()))?;
|
write!(f, "trait {}", data.name.display(f.db.upcast(), f.edition()))?;
|
||||||
write_generic_params(GenericDefId::TraitId(trait_.id), f)?;
|
write_generic_params(GenericDefId::TraitId(trait_.id), f)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -786,7 +789,7 @@ impl HirDisplay for TraitAlias {
|
||||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||||
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
|
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
|
||||||
let data = f.db.trait_alias_data(self.id);
|
let data = f.db.trait_alias_data(self.id);
|
||||||
write!(f, "trait {}", data.name.display(f.db.upcast()))?;
|
write!(f, "trait {}", data.name.display(f.db.upcast(), f.edition()))?;
|
||||||
let def_id = GenericDefId::TraitAliasId(self.id);
|
let def_id = GenericDefId::TraitAliasId(self.id);
|
||||||
write_generic_params(def_id, f)?;
|
write_generic_params(def_id, f)?;
|
||||||
f.write_str(" = ")?;
|
f.write_str(" = ")?;
|
||||||
|
@ -802,7 +805,7 @@ impl HirDisplay for TypeAlias {
|
||||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||||
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
|
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
|
||||||
let data = f.db.type_alias_data(self.id);
|
let data = f.db.type_alias_data(self.id);
|
||||||
write!(f, "type {}", data.name.display(f.db.upcast()))?;
|
write!(f, "type {}", data.name.display(f.db.upcast(), f.edition()))?;
|
||||||
let def_id = GenericDefId::TypeAliasId(self.id);
|
let def_id = GenericDefId::TypeAliasId(self.id);
|
||||||
write_generic_params(def_id, f)?;
|
write_generic_params(def_id, f)?;
|
||||||
if !data.bounds.is_empty() {
|
if !data.bounds.is_empty() {
|
||||||
|
@ -822,7 +825,7 @@ impl HirDisplay for Module {
|
||||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||||
// FIXME: Module doesn't have visibility saved in data.
|
// FIXME: Module doesn't have visibility saved in data.
|
||||||
match self.name(f.db) {
|
match self.name(f.db) {
|
||||||
Some(name) => write!(f, "mod {}", name.display(f.db.upcast())),
|
Some(name) => write!(f, "mod {}", name.display(f.db.upcast(), f.edition())),
|
||||||
None if self.is_crate_root() => match self.krate(f.db).display_name(f.db) {
|
None if self.is_crate_root() => match self.krate(f.db).display_name(f.db) {
|
||||||
Some(name) => write!(f, "extern crate {name}"),
|
Some(name) => write!(f, "extern crate {name}"),
|
||||||
None => f.write_str("extern crate {unknown}"),
|
None => f.write_str("extern crate {unknown}"),
|
||||||
|
@ -839,6 +842,6 @@ impl HirDisplay for Macro {
|
||||||
hir_def::MacroId::MacroRulesId(_) => f.write_str("macro_rules!"),
|
hir_def::MacroId::MacroRulesId(_) => f.write_str("macro_rules!"),
|
||||||
hir_def::MacroId::ProcMacroId(_) => f.write_str("proc_macro"),
|
hir_def::MacroId::ProcMacroId(_) => f.write_str("proc_macro"),
|
||||||
}?;
|
}?;
|
||||||
write!(f, " {}", self.name(f.db).display(f.db.upcast()))
|
write!(f, " {}", self.name(f.db).display(f.db.upcast(), f.edition()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -78,7 +78,8 @@ use hir_ty::{
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use nameres::diagnostics::DefDiagnosticKind;
|
use nameres::diagnostics::DefDiagnosticKind;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use span::{Edition, EditionedFileId, FileId, MacroCallId};
|
use smallvec::SmallVec;
|
||||||
|
use span::{Edition, EditionedFileId, FileId, MacroCallId, SyntaxContextId};
|
||||||
use stdx::{impl_from, never};
|
use stdx::{impl_from, never};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasAttrs as _, HasGenericParams, HasName},
|
ast::{self, HasAttrs as _, HasGenericParams, HasName},
|
||||||
|
@ -93,8 +94,7 @@ pub use crate::{
|
||||||
diagnostics::*,
|
diagnostics::*,
|
||||||
has_source::HasSource,
|
has_source::HasSource,
|
||||||
semantics::{
|
semantics::{
|
||||||
DescendPreference, PathResolution, Semantics, SemanticsImpl, SemanticsScope, TypeInfo,
|
PathResolution, Semantics, SemanticsImpl, SemanticsScope, TypeInfo, VisibleTraits,
|
||||||
VisibleTraits,
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
pub use hir_ty::method_resolution::TyFingerprint;
|
pub use hir_ty::method_resolution::TyFingerprint;
|
||||||
|
@ -340,13 +340,13 @@ impl ModuleDef {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn canonical_path(&self, db: &dyn HirDatabase) -> Option<String> {
|
pub fn canonical_path(&self, db: &dyn HirDatabase, edition: Edition) -> Option<String> {
|
||||||
let mut segments = vec![self.name(db)?];
|
let mut segments = vec![self.name(db)?];
|
||||||
for m in self.module(db)?.path_to_root(db) {
|
for m in self.module(db)?.path_to_root(db) {
|
||||||
segments.extend(m.name(db))
|
segments.extend(m.name(db))
|
||||||
}
|
}
|
||||||
segments.reverse();
|
segments.reverse();
|
||||||
Some(segments.iter().map(|it| it.display(db.upcast())).join("::"))
|
Some(segments.iter().map(|it| it.display(db.upcast(), edition)).join("::"))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn canonical_module_path(
|
pub fn canonical_module_path(
|
||||||
|
@ -556,13 +556,14 @@ impl Module {
|
||||||
style_lints: bool,
|
style_lints: bool,
|
||||||
) {
|
) {
|
||||||
let _p = tracing::info_span!("Module::diagnostics", name = ?self.name(db)).entered();
|
let _p = tracing::info_span!("Module::diagnostics", name = ?self.name(db)).entered();
|
||||||
|
let edition = db.crate_graph()[self.id.krate()].edition;
|
||||||
let def_map = self.id.def_map(db.upcast());
|
let def_map = self.id.def_map(db.upcast());
|
||||||
for diag in def_map.diagnostics() {
|
for diag in def_map.diagnostics() {
|
||||||
if diag.in_module != self.id.local_id {
|
if diag.in_module != self.id.local_id {
|
||||||
// FIXME: This is accidentally quadratic.
|
// FIXME: This is accidentally quadratic.
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
emit_def_diagnostic(db, acc, diag);
|
emit_def_diagnostic(db, acc, diag, edition);
|
||||||
}
|
}
|
||||||
|
|
||||||
if !self.id.is_block_module() {
|
if !self.id.is_block_module() {
|
||||||
|
@ -582,7 +583,7 @@ impl Module {
|
||||||
}
|
}
|
||||||
ModuleDef::Trait(t) => {
|
ModuleDef::Trait(t) => {
|
||||||
for diag in db.trait_data_with_diagnostics(t.id).1.iter() {
|
for diag in db.trait_data_with_diagnostics(t.id).1.iter() {
|
||||||
emit_def_diagnostic(db, acc, diag);
|
emit_def_diagnostic(db, acc, diag, edition);
|
||||||
}
|
}
|
||||||
|
|
||||||
for item in t.items(db) {
|
for item in t.items(db) {
|
||||||
|
@ -599,19 +600,19 @@ impl Module {
|
||||||
match adt {
|
match adt {
|
||||||
Adt::Struct(s) => {
|
Adt::Struct(s) => {
|
||||||
for diag in db.struct_data_with_diagnostics(s.id).1.iter() {
|
for diag in db.struct_data_with_diagnostics(s.id).1.iter() {
|
||||||
emit_def_diagnostic(db, acc, diag);
|
emit_def_diagnostic(db, acc, diag, edition);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Adt::Union(u) => {
|
Adt::Union(u) => {
|
||||||
for diag in db.union_data_with_diagnostics(u.id).1.iter() {
|
for diag in db.union_data_with_diagnostics(u.id).1.iter() {
|
||||||
emit_def_diagnostic(db, acc, diag);
|
emit_def_diagnostic(db, acc, diag, edition);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Adt::Enum(e) => {
|
Adt::Enum(e) => {
|
||||||
for v in e.variants(db) {
|
for v in e.variants(db) {
|
||||||
acc.extend(ModuleDef::Variant(v).diagnostics(db, style_lints));
|
acc.extend(ModuleDef::Variant(v).diagnostics(db, style_lints));
|
||||||
for diag in db.enum_variant_data_with_diagnostics(v.id).1.iter() {
|
for diag in db.enum_variant_data_with_diagnostics(v.id).1.iter() {
|
||||||
emit_def_diagnostic(db, acc, diag);
|
emit_def_diagnostic(db, acc, diag, edition);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -645,7 +646,7 @@ impl Module {
|
||||||
let ast_id_map = db.ast_id_map(file_id);
|
let ast_id_map = db.ast_id_map(file_id);
|
||||||
|
|
||||||
for diag in db.impl_data_with_diagnostics(impl_def.id).1.iter() {
|
for diag in db.impl_data_with_diagnostics(impl_def.id).1.iter() {
|
||||||
emit_def_diagnostic(db, acc, diag);
|
emit_def_diagnostic(db, acc, diag, edition);
|
||||||
}
|
}
|
||||||
|
|
||||||
if inherent_impls.invalid_impls().contains(&impl_def.id) {
|
if inherent_impls.invalid_impls().contains(&impl_def.id) {
|
||||||
|
@ -869,23 +870,32 @@ fn emit_macro_def_diagnostics(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>
|
||||||
never!("declarative expander for non decl-macro: {:?}", e);
|
never!("declarative expander for non decl-macro: {:?}", e);
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
let krate = HasModule::krate(&m.id, db.upcast());
|
||||||
|
let edition = db.crate_graph()[krate].edition;
|
||||||
emit_def_diagnostic_(
|
emit_def_diagnostic_(
|
||||||
db,
|
db,
|
||||||
acc,
|
acc,
|
||||||
&DefDiagnosticKind::MacroDefError { ast, message: e.to_string() },
|
&DefDiagnosticKind::MacroDefError { ast, message: e.to_string() },
|
||||||
|
edition,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emit_def_diagnostic(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, diag: &DefDiagnostic) {
|
fn emit_def_diagnostic(
|
||||||
emit_def_diagnostic_(db, acc, &diag.kind)
|
db: &dyn HirDatabase,
|
||||||
|
acc: &mut Vec<AnyDiagnostic>,
|
||||||
|
diag: &DefDiagnostic,
|
||||||
|
edition: Edition,
|
||||||
|
) {
|
||||||
|
emit_def_diagnostic_(db, acc, &diag.kind, edition)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emit_def_diagnostic_(
|
fn emit_def_diagnostic_(
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
acc: &mut Vec<AnyDiagnostic>,
|
acc: &mut Vec<AnyDiagnostic>,
|
||||||
diag: &DefDiagnosticKind,
|
diag: &DefDiagnosticKind,
|
||||||
|
edition: Edition,
|
||||||
) {
|
) {
|
||||||
match diag {
|
match diag {
|
||||||
DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates } => {
|
DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates } => {
|
||||||
|
@ -910,7 +920,7 @@ fn emit_def_diagnostic_(
|
||||||
MacroError {
|
MacroError {
|
||||||
node: InFile::new(ast.file_id, item.syntax_node_ptr()),
|
node: InFile::new(ast.file_id, item.syntax_node_ptr()),
|
||||||
precise_location: None,
|
precise_location: None,
|
||||||
message: format!("{}: {message}", path.display(db.upcast())),
|
message: format!("{}: {message}", path.display(db.upcast(), edition)),
|
||||||
error,
|
error,
|
||||||
}
|
}
|
||||||
.into(),
|
.into(),
|
||||||
|
@ -1764,7 +1774,7 @@ impl DefWithBody {
|
||||||
/// A textual representation of the HIR of this def's body for debugging purposes.
|
/// A textual representation of the HIR of this def's body for debugging purposes.
|
||||||
pub fn debug_hir(self, db: &dyn HirDatabase) -> String {
|
pub fn debug_hir(self, db: &dyn HirDatabase) -> String {
|
||||||
let body = db.body(self.id());
|
let body = db.body(self.id());
|
||||||
body.pretty_print(db.upcast(), self.id())
|
body.pretty_print(db.upcast(), self.id(), Edition::CURRENT)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A textual representation of the MIR of this def's body for debugging purposes.
|
/// A textual representation of the MIR of this def's body for debugging purposes.
|
||||||
|
@ -2259,6 +2269,8 @@ impl Function {
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
span_formatter: impl Fn(FileId, TextRange) -> String,
|
span_formatter: impl Fn(FileId, TextRange) -> String,
|
||||||
) -> String {
|
) -> String {
|
||||||
|
let krate = HasModule::krate(&self.id, db.upcast());
|
||||||
|
let edition = db.crate_graph()[krate].edition;
|
||||||
let body = match db.monomorphized_mir_body(
|
let body = match db.monomorphized_mir_body(
|
||||||
self.id.into(),
|
self.id.into(),
|
||||||
Substitution::empty(Interner),
|
Substitution::empty(Interner),
|
||||||
|
@ -2267,7 +2279,7 @@ impl Function {
|
||||||
Ok(body) => body,
|
Ok(body) => body,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
let mut r = String::new();
|
let mut r = String::new();
|
||||||
_ = e.pretty_print(&mut r, db, &span_formatter);
|
_ = e.pretty_print(&mut r, db, &span_formatter, edition);
|
||||||
return r;
|
return r;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -2276,7 +2288,7 @@ impl Function {
|
||||||
Ok(_) => "pass".to_owned(),
|
Ok(_) => "pass".to_owned(),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
let mut r = String::new();
|
let mut r = String::new();
|
||||||
_ = e.pretty_print(&mut r, db, &span_formatter);
|
_ = e.pretty_print(&mut r, db, &span_formatter, edition);
|
||||||
r
|
r
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -2510,7 +2522,11 @@ impl Const {
|
||||||
Type::from_value_def(db, self.id)
|
Type::from_value_def(db, self.id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn render_eval(self, db: &dyn HirDatabase) -> Result<String, ConstEvalError> {
|
pub fn render_eval(
|
||||||
|
self,
|
||||||
|
db: &dyn HirDatabase,
|
||||||
|
edition: Edition,
|
||||||
|
) -> Result<String, ConstEvalError> {
|
||||||
let c = db.const_eval(self.id.into(), Substitution::empty(Interner), None)?;
|
let c = db.const_eval(self.id.into(), Substitution::empty(Interner), None)?;
|
||||||
let data = &c.data(Interner);
|
let data = &c.data(Interner);
|
||||||
if let TyKind::Scalar(s) = data.ty.kind(Interner) {
|
if let TyKind::Scalar(s) = data.ty.kind(Interner) {
|
||||||
|
@ -2532,7 +2548,7 @@ impl Const {
|
||||||
if let Ok(s) = mir::render_const_using_debug_impl(db, self.id, &c) {
|
if let Ok(s) = mir::render_const_using_debug_impl(db, self.id, &c) {
|
||||||
Ok(s)
|
Ok(s)
|
||||||
} else {
|
} else {
|
||||||
Ok(format!("{}", c.display(db)))
|
Ok(format!("{}", c.display(db, edition)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3728,9 +3744,9 @@ impl ConstParam {
|
||||||
Type::new(db, self.id.parent(), db.const_param_ty(self.id))
|
Type::new(db, self.id.parent(), db.const_param_ty(self.id))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn default(self, db: &dyn HirDatabase) -> Option<ast::ConstArg> {
|
pub fn default(self, db: &dyn HirDatabase, edition: Edition) -> Option<ast::ConstArg> {
|
||||||
let arg = generic_arg_from_param(db, self.id.into())?;
|
let arg = generic_arg_from_param(db, self.id.into())?;
|
||||||
known_const_to_ast(arg.constant(Interner)?, db)
|
known_const_to_ast(arg.constant(Interner)?, db, edition)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4038,12 +4054,20 @@ impl Closure {
|
||||||
TyKind::Closure(self.id, self.subst).intern(Interner)
|
TyKind::Closure(self.id, self.subst).intern(Interner)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn display_with_id(&self, db: &dyn HirDatabase) -> String {
|
pub fn display_with_id(&self, db: &dyn HirDatabase, edition: Edition) -> String {
|
||||||
self.clone().as_ty().display(db).with_closure_style(ClosureStyle::ClosureWithId).to_string()
|
self.clone()
|
||||||
|
.as_ty()
|
||||||
|
.display(db, edition)
|
||||||
|
.with_closure_style(ClosureStyle::ClosureWithId)
|
||||||
|
.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn display_with_impl(&self, db: &dyn HirDatabase) -> String {
|
pub fn display_with_impl(&self, db: &dyn HirDatabase, edition: Edition) -> String {
|
||||||
self.clone().as_ty().display(db).with_closure_style(ClosureStyle::ImplFn).to_string()
|
self.clone()
|
||||||
|
.as_ty()
|
||||||
|
.display(db, edition)
|
||||||
|
.with_closure_style(ClosureStyle::ImplFn)
|
||||||
|
.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn captured_items(&self, db: &dyn HirDatabase) -> Vec<ClosureCapture> {
|
pub fn captured_items(&self, db: &dyn HirDatabase) -> Vec<ClosureCapture> {
|
||||||
|
@ -4090,6 +4114,15 @@ impl ClosureCapture {
|
||||||
Local { parent: self.owner, binding_id: self.capture.local() }
|
Local { parent: self.owner, binding_id: self.capture.local() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns whether this place has any field (aka. non-deref) projections.
|
||||||
|
pub fn has_field_projections(&self) -> bool {
|
||||||
|
self.capture.has_field_projections()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn usages(&self) -> CaptureUsages {
|
||||||
|
CaptureUsages { parent: self.owner, spans: self.capture.spans() }
|
||||||
|
}
|
||||||
|
|
||||||
pub fn kind(&self) -> CaptureKind {
|
pub fn kind(&self) -> CaptureKind {
|
||||||
match self.capture.kind() {
|
match self.capture.kind() {
|
||||||
hir_ty::CaptureKind::ByRef(
|
hir_ty::CaptureKind::ByRef(
|
||||||
|
@ -4105,11 +4138,21 @@ impl ClosureCapture {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Converts the place to a name that can be inserted into source code.
|
||||||
|
pub fn place_to_name(&self, db: &dyn HirDatabase) -> String {
|
||||||
|
self.capture.place_to_name(self.owner, db)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn display_place_source_code(&self, db: &dyn HirDatabase) -> String {
|
||||||
|
self.capture.display_place_source_code(self.owner, db)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn display_place(&self, db: &dyn HirDatabase) -> String {
|
pub fn display_place(&self, db: &dyn HirDatabase) -> String {
|
||||||
self.capture.display_place(self.owner, db)
|
self.capture.display_place(self.owner, db)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||||
pub enum CaptureKind {
|
pub enum CaptureKind {
|
||||||
SharedRef,
|
SharedRef,
|
||||||
UniqueSharedRef,
|
UniqueSharedRef,
|
||||||
|
@ -4117,6 +4160,74 @@ pub enum CaptureKind {
|
||||||
Move,
|
Move,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct CaptureUsages {
|
||||||
|
parent: DefWithBodyId,
|
||||||
|
spans: SmallVec<[mir::MirSpan; 3]>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CaptureUsages {
|
||||||
|
pub fn sources(&self, db: &dyn HirDatabase) -> Vec<CaptureUsageSource> {
|
||||||
|
let (body, source_map) = db.body_with_source_map(self.parent);
|
||||||
|
let mut result = Vec::with_capacity(self.spans.len());
|
||||||
|
for &span in self.spans.iter() {
|
||||||
|
let is_ref = span.is_ref_span(&body);
|
||||||
|
match span {
|
||||||
|
mir::MirSpan::ExprId(expr) => {
|
||||||
|
if let Ok(expr) = source_map.expr_syntax(expr) {
|
||||||
|
result.push(CaptureUsageSource {
|
||||||
|
is_ref,
|
||||||
|
source: expr.map(AstPtr::wrap_left),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
mir::MirSpan::PatId(pat) => {
|
||||||
|
if let Ok(pat) = source_map.pat_syntax(pat) {
|
||||||
|
result.push(CaptureUsageSource {
|
||||||
|
is_ref,
|
||||||
|
source: pat.map(AstPtr::wrap_right),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
mir::MirSpan::BindingId(binding) => result.extend(
|
||||||
|
source_map
|
||||||
|
.patterns_for_binding(binding)
|
||||||
|
.iter()
|
||||||
|
.filter_map(|&pat| source_map.pat_syntax(pat).ok())
|
||||||
|
.map(|pat| CaptureUsageSource {
|
||||||
|
is_ref,
|
||||||
|
source: pat.map(AstPtr::wrap_right),
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
mir::MirSpan::SelfParam | mir::MirSpan::Unknown => {
|
||||||
|
unreachable!("invalid capture usage span")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct CaptureUsageSource {
|
||||||
|
is_ref: bool,
|
||||||
|
source: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CaptureUsageSource {
|
||||||
|
pub fn source(&self) -> AstPtr<Either<ast::Expr, ast::Pat>> {
|
||||||
|
self.source.value
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn file_id(&self) -> HirFileId {
|
||||||
|
self.source.file_id
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_ref(&self) -> bool {
|
||||||
|
self.is_ref
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
|
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
|
||||||
pub struct Type {
|
pub struct Type {
|
||||||
env: Arc<TraitEnvironment>,
|
env: Arc<TraitEnvironment>,
|
||||||
|
@ -4355,6 +4466,22 @@ impl Type {
|
||||||
method_resolution::implements_trait(&canonical_ty, db, &self.env, trait_)
|
method_resolution::implements_trait(&canonical_ty, db, &self.env, trait_)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// This does **not** resolve `IntoFuture`, only `Future`.
|
||||||
|
pub fn future_output(self, db: &dyn HirDatabase) -> Option<Type> {
|
||||||
|
let future_output =
|
||||||
|
db.lang_item(self.env.krate, LangItem::FutureOutput)?.as_type_alias()?;
|
||||||
|
self.normalize_trait_assoc_type(db, &[], future_output.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This does **not** resolve `IntoIterator`, only `Iterator`.
|
||||||
|
pub fn iterator_item(self, db: &dyn HirDatabase) -> Option<Type> {
|
||||||
|
let iterator_trait = db.lang_item(self.env.krate, LangItem::Iterator)?.as_trait()?;
|
||||||
|
let iterator_item = db
|
||||||
|
.trait_data(iterator_trait)
|
||||||
|
.associated_type_by_name(&Name::new_symbol(sym::Item.clone(), SyntaxContextId::ROOT))?;
|
||||||
|
self.normalize_trait_assoc_type(db, &[], iterator_item.into())
|
||||||
|
}
|
||||||
|
|
||||||
/// Checks that particular type `ty` implements `std::ops::FnOnce`.
|
/// Checks that particular type `ty` implements `std::ops::FnOnce`.
|
||||||
///
|
///
|
||||||
/// This function can be used to check if a particular type is callable, since FnOnce is a
|
/// This function can be used to check if a particular type is callable, since FnOnce is a
|
||||||
|
@ -4704,18 +4831,20 @@ impl Type {
|
||||||
pub fn type_and_const_arguments<'a>(
|
pub fn type_and_const_arguments<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
db: &'a dyn HirDatabase,
|
db: &'a dyn HirDatabase,
|
||||||
|
edition: Edition,
|
||||||
) -> impl Iterator<Item = SmolStr> + 'a {
|
) -> impl Iterator<Item = SmolStr> + 'a {
|
||||||
self.ty
|
self.ty
|
||||||
.strip_references()
|
.strip_references()
|
||||||
.as_adt()
|
.as_adt()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|(_, substs)| substs.iter(Interner))
|
.flat_map(|(_, substs)| substs.iter(Interner))
|
||||||
.filter_map(|arg| {
|
.filter_map(move |arg| {
|
||||||
// arg can be either a `Ty` or `constant`
|
// arg can be either a `Ty` or `constant`
|
||||||
if let Some(ty) = arg.ty(Interner) {
|
if let Some(ty) = arg.ty(Interner) {
|
||||||
Some(format_smolstr!("{}", ty.display(db)))
|
Some(format_smolstr!("{}", ty.display(db, edition)))
|
||||||
} else {
|
} else {
|
||||||
arg.constant(Interner).map(|const_| format_smolstr!("{}", const_.display(db)))
|
arg.constant(Interner)
|
||||||
|
.map(|const_| format_smolstr!("{}", const_.display(db, edition)))
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -4724,13 +4853,17 @@ impl Type {
|
||||||
pub fn generic_parameters<'a>(
|
pub fn generic_parameters<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
db: &'a dyn HirDatabase,
|
db: &'a dyn HirDatabase,
|
||||||
|
edition: Edition,
|
||||||
) -> impl Iterator<Item = SmolStr> + 'a {
|
) -> impl Iterator<Item = SmolStr> + 'a {
|
||||||
// iterate the lifetime
|
// iterate the lifetime
|
||||||
self.as_adt()
|
self.as_adt()
|
||||||
.and_then(|a| a.lifetime(db).map(|lt| lt.name.display_no_db().to_smolstr()))
|
.and_then(|a| {
|
||||||
|
// Lifetimes do not need edition-specific handling as they cannot be escaped.
|
||||||
|
a.lifetime(db).map(|lt| lt.name.display_no_db(Edition::Edition2015).to_smolstr())
|
||||||
|
})
|
||||||
.into_iter()
|
.into_iter()
|
||||||
// add the type and const parameters
|
// add the type and const parameters
|
||||||
.chain(self.type_and_const_arguments(db))
|
.chain(self.type_and_const_arguments(db, edition))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn iterate_method_candidates_with_traits<T>(
|
pub fn iterate_method_candidates_with_traits<T>(
|
||||||
|
|
|
@ -4,6 +4,7 @@ mod source_to_def;
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
cell::RefCell,
|
cell::RefCell,
|
||||||
|
convert::Infallible,
|
||||||
fmt, iter, mem,
|
fmt, iter, mem,
|
||||||
ops::{self, ControlFlow, Not},
|
ops::{self, ControlFlow, Not},
|
||||||
};
|
};
|
||||||
|
@ -22,9 +23,11 @@ use hir_expand::{
|
||||||
builtin::{BuiltinFnLikeExpander, EagerExpander},
|
builtin::{BuiltinFnLikeExpander, EagerExpander},
|
||||||
db::ExpandDatabase,
|
db::ExpandDatabase,
|
||||||
files::InRealFile,
|
files::InRealFile,
|
||||||
|
inert_attr_macro::find_builtin_attr_idx,
|
||||||
name::AsName,
|
name::AsName,
|
||||||
FileRange, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
|
FileRange, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
|
||||||
};
|
};
|
||||||
|
use intern::Symbol;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
|
@ -48,11 +51,7 @@ use crate::{
|
||||||
Variant, VariantDef,
|
Variant, VariantDef,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub enum DescendPreference {
|
const CONTINUE_NO_BREAKS: ControlFlow<Infallible, ()> = ControlFlow::Continue(());
|
||||||
SameText,
|
|
||||||
SameKind,
|
|
||||||
None,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||||
pub enum PathResolution {
|
pub enum PathResolution {
|
||||||
|
@ -182,6 +181,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
||||||
|
|
||||||
/// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
|
/// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
|
||||||
/// descend it and find again
|
/// descend it and find again
|
||||||
|
// FIXME: Rethink this API
|
||||||
pub fn find_node_at_offset_with_descend<N: AstNode>(
|
pub fn find_node_at_offset_with_descend<N: AstNode>(
|
||||||
&self,
|
&self,
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
|
@ -190,8 +190,9 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
||||||
self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast)
|
self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
|
/// Find an AstNode by offset inside SyntaxNode, if it is inside an attribute macro call,
|
||||||
/// descend it and find again
|
/// descend it and find again
|
||||||
|
// FIXME: Rethink this API
|
||||||
pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>(
|
pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>(
|
||||||
&'slf self,
|
&'slf self,
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
|
@ -545,51 +546,53 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Retrieves all the formatting parts of the format_args! template string.
|
||||||
pub fn as_format_args_parts(
|
pub fn as_format_args_parts(
|
||||||
&self,
|
&self,
|
||||||
string: &ast::String,
|
string: &ast::String,
|
||||||
) -> Option<Vec<(TextRange, Option<PathResolution>)>> {
|
) -> Option<Vec<(TextRange, Option<PathResolution>)>> {
|
||||||
if let Some(quote) = string.open_quote_text_range() {
|
let quote = string.open_quote_text_range()?;
|
||||||
return self
|
|
||||||
.descend_into_macros(DescendPreference::SameText, string.syntax().clone())
|
let token = self.wrap_token_infile(string.syntax().clone()).into_real_file().ok()?;
|
||||||
.into_iter()
|
self.descend_into_macros_breakable(token, |token| {
|
||||||
.find_map(|token| {
|
(|| {
|
||||||
let string = ast::String::cast(token)?;
|
let token = token.value;
|
||||||
let literal =
|
let string = ast::String::cast(token)?;
|
||||||
string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
|
let literal =
|
||||||
let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?;
|
string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
|
||||||
let source_analyzer = self.analyze_no_infer(format_args.syntax())?;
|
let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?;
|
||||||
let format_args = self.wrap_node_infile(format_args);
|
let source_analyzer = self.analyze_no_infer(format_args.syntax())?;
|
||||||
let res = source_analyzer
|
let format_args = self.wrap_node_infile(format_args);
|
||||||
.as_format_args_parts(self.db, format_args.as_ref())?
|
let res = source_analyzer
|
||||||
.map(|(range, res)| (range + quote.end(), res))
|
.as_format_args_parts(self.db, format_args.as_ref())?
|
||||||
.collect();
|
.map(|(range, res)| (range + quote.end(), res))
|
||||||
Some(res)
|
.collect();
|
||||||
});
|
Some(res)
|
||||||
}
|
})()
|
||||||
None
|
.map_or(ControlFlow::Continue(()), ControlFlow::Break)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Retrieves the formatting part of the format_args! template string at the given offset.
|
||||||
pub fn check_for_format_args_template(
|
pub fn check_for_format_args_template(
|
||||||
&self,
|
&self,
|
||||||
original_token: SyntaxToken,
|
original_token: SyntaxToken,
|
||||||
offset: TextSize,
|
offset: TextSize,
|
||||||
) -> Option<(TextRange, Option<PathResolution>)> {
|
) -> Option<(TextRange, Option<PathResolution>)> {
|
||||||
if let Some(original_string) = ast::String::cast(original_token.clone()) {
|
let original_string = ast::String::cast(original_token.clone())?;
|
||||||
if let Some(quote) = original_string.open_quote_text_range() {
|
let original_token = self.wrap_token_infile(original_token).into_real_file().ok()?;
|
||||||
return self
|
let quote = original_string.open_quote_text_range()?;
|
||||||
.descend_into_macros(DescendPreference::SameText, original_token)
|
self.descend_into_macros_breakable(original_token, |token| {
|
||||||
.into_iter()
|
(|| {
|
||||||
.find_map(|token| {
|
let token = token.value;
|
||||||
self.resolve_offset_in_format_args(
|
self.resolve_offset_in_format_args(
|
||||||
ast::String::cast(token)?,
|
ast::String::cast(token)?,
|
||||||
offset.checked_sub(quote.end())?,
|
offset.checked_sub(quote.end())?,
|
||||||
)
|
)
|
||||||
})
|
.map(|(range, res)| (range + quote.end(), res))
|
||||||
.map(|(range, res)| (range + quote.end(), res));
|
})()
|
||||||
}
|
.map_or(ControlFlow::Continue(()), ControlFlow::Break)
|
||||||
}
|
})
|
||||||
None
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_offset_in_format_args(
|
fn resolve_offset_in_format_args(
|
||||||
|
@ -619,30 +622,37 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
Some(it) => it,
|
Some(it) => it,
|
||||||
None => return res,
|
None => return res,
|
||||||
};
|
};
|
||||||
|
let file = self.find_file(node.syntax());
|
||||||
|
let Some(file_id) = file.file_id.file_id() else {
|
||||||
|
return res;
|
||||||
|
};
|
||||||
|
|
||||||
if first == last {
|
if first == last {
|
||||||
// node is just the token, so descend the token
|
// node is just the token, so descend the token
|
||||||
self.descend_into_macros_impl(first, &mut |InFile { value, .. }| {
|
self.descend_into_macros_impl(
|
||||||
if let Some(node) = value
|
InRealFile::new(file_id, first),
|
||||||
.parent_ancestors()
|
&mut |InFile { value, .. }| {
|
||||||
.take_while(|it| it.text_range() == value.text_range())
|
if let Some(node) = value
|
||||||
.find_map(N::cast)
|
.parent_ancestors()
|
||||||
{
|
.take_while(|it| it.text_range() == value.text_range())
|
||||||
res.push(node)
|
.find_map(N::cast)
|
||||||
}
|
{
|
||||||
ControlFlow::Continue(())
|
res.push(node)
|
||||||
});
|
}
|
||||||
|
CONTINUE_NO_BREAKS
|
||||||
|
},
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
// Descend first and last token, then zip them to look for the node they belong to
|
// Descend first and last token, then zip them to look for the node they belong to
|
||||||
let mut scratch: SmallVec<[_; 1]> = smallvec![];
|
let mut scratch: SmallVec<[_; 1]> = smallvec![];
|
||||||
self.descend_into_macros_impl(first, &mut |token| {
|
self.descend_into_macros_impl(InRealFile::new(file_id, first), &mut |token| {
|
||||||
scratch.push(token);
|
scratch.push(token);
|
||||||
ControlFlow::Continue(())
|
CONTINUE_NO_BREAKS
|
||||||
});
|
});
|
||||||
|
|
||||||
let mut scratch = scratch.into_iter();
|
let mut scratch = scratch.into_iter();
|
||||||
self.descend_into_macros_impl(
|
self.descend_into_macros_impl(
|
||||||
last,
|
InRealFile::new(file_id, last),
|
||||||
&mut |InFile { value: last, file_id: last_fid }| {
|
&mut |InFile { value: last, file_id: last_fid }| {
|
||||||
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
|
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
|
||||||
if first_fid == last_fid {
|
if first_fid == last_fid {
|
||||||
|
@ -659,130 +669,151 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ControlFlow::Continue(())
|
CONTINUE_NO_BREAKS
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Descend the token into its macro call if it is part of one, returning the tokens in the
|
fn is_inside_macro_call(token: &SyntaxToken) -> bool {
|
||||||
/// expansion that it is associated with.
|
token.parent_ancestors().any(|ancestor| {
|
||||||
pub fn descend_into_macros(
|
if ast::MacroCall::can_cast(ancestor.kind()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
// Check if it is an item (only items can have macro attributes) that has a non-builtin attribute.
|
||||||
|
let Some(item) = ast::Item::cast(ancestor) else { return false };
|
||||||
|
item.attrs().any(|attr| {
|
||||||
|
let Some(meta) = attr.meta() else { return false };
|
||||||
|
let Some(path) = meta.path() else { return false };
|
||||||
|
let Some(attr_name) = path.as_single_name_ref() else { return true };
|
||||||
|
let attr_name = attr_name.text();
|
||||||
|
let attr_name = attr_name.as_str();
|
||||||
|
attr_name == "derive" || find_builtin_attr_idx(&Symbol::intern(attr_name)).is_none()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn descend_into_macros_exact_if_in_macro(
|
||||||
&self,
|
&self,
|
||||||
mode: DescendPreference,
|
|
||||||
token: SyntaxToken,
|
token: SyntaxToken,
|
||||||
) -> SmallVec<[SyntaxToken; 1]> {
|
) -> SmallVec<[SyntaxToken; 1]> {
|
||||||
enum Dp<'t> {
|
if Self::is_inside_macro_call(&token) {
|
||||||
SameText(&'t str),
|
self.descend_into_macros_exact(token)
|
||||||
SameKind(SyntaxKind),
|
} else {
|
||||||
None,
|
smallvec![token]
|
||||||
}
|
}
|
||||||
let fetch_kind = |token: &SyntaxToken| match token.parent() {
|
}
|
||||||
Some(node) => match node.kind() {
|
|
||||||
kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind,
|
pub fn descend_into_macros_cb(
|
||||||
_ => token.kind(),
|
&self,
|
||||||
},
|
token: SyntaxToken,
|
||||||
None => token.kind(),
|
mut cb: impl FnMut(InFile<SyntaxToken>),
|
||||||
};
|
) {
|
||||||
let mode = match mode {
|
if let Ok(token) = self.wrap_token_infile(token).into_real_file() {
|
||||||
DescendPreference::SameText => Dp::SameText(token.text()),
|
self.descend_into_macros_impl(token, &mut |t| {
|
||||||
DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)),
|
cb(t);
|
||||||
DescendPreference::None => Dp::None,
|
CONTINUE_NO_BREAKS
|
||||||
};
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
|
||||||
let mut res = smallvec![];
|
let mut res = smallvec![];
|
||||||
self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
|
if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
|
||||||
let is_a_match = match mode {
|
self.descend_into_macros_impl(token, &mut |t| {
|
||||||
Dp::SameText(text) => value.text() == text,
|
res.push(t.value);
|
||||||
Dp::SameKind(preferred_kind) => {
|
CONTINUE_NO_BREAKS
|
||||||
let kind = fetch_kind(&value);
|
});
|
||||||
kind == preferred_kind
|
}
|
||||||
// special case for derive macros
|
|
||||||
|| (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF)
|
|
||||||
}
|
|
||||||
Dp::None => true,
|
|
||||||
};
|
|
||||||
if is_a_match {
|
|
||||||
res.push(value);
|
|
||||||
}
|
|
||||||
ControlFlow::Continue(())
|
|
||||||
});
|
|
||||||
if res.is_empty() {
|
if res.is_empty() {
|
||||||
res.push(token);
|
res.push(token);
|
||||||
}
|
}
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn descend_into_macros_single(
|
pub fn descend_into_macros_breakable<T>(
|
||||||
&self,
|
&self,
|
||||||
mode: DescendPreference,
|
token: InRealFile<SyntaxToken>,
|
||||||
token: SyntaxToken,
|
mut cb: impl FnMut(InFile<SyntaxToken>) -> ControlFlow<T>,
|
||||||
) -> SyntaxToken {
|
) -> Option<T> {
|
||||||
enum Dp<'t> {
|
self.descend_into_macros_impl(token.clone(), &mut cb)
|
||||||
SameText(&'t str),
|
|
||||||
SameKind(SyntaxKind),
|
|
||||||
None,
|
|
||||||
}
|
|
||||||
let fetch_kind = |token: &SyntaxToken| match token.parent() {
|
|
||||||
Some(node) => match node.kind() {
|
|
||||||
kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind,
|
|
||||||
_ => token.kind(),
|
|
||||||
},
|
|
||||||
None => token.kind(),
|
|
||||||
};
|
|
||||||
let mode = match mode {
|
|
||||||
DescendPreference::SameText => Dp::SameText(token.text()),
|
|
||||||
DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)),
|
|
||||||
DescendPreference::None => Dp::None,
|
|
||||||
};
|
|
||||||
let mut res = token.clone();
|
|
||||||
self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
|
|
||||||
let is_a_match = match mode {
|
|
||||||
Dp::SameText(text) => value.text() == text,
|
|
||||||
Dp::SameKind(preferred_kind) => {
|
|
||||||
let kind = fetch_kind(&value);
|
|
||||||
kind == preferred_kind
|
|
||||||
// special case for derive macros
|
|
||||||
|| (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF)
|
|
||||||
}
|
|
||||||
Dp::None => true,
|
|
||||||
};
|
|
||||||
res = value;
|
|
||||||
if is_a_match {
|
|
||||||
ControlFlow::Break(())
|
|
||||||
} else {
|
|
||||||
ControlFlow::Continue(())
|
|
||||||
}
|
|
||||||
});
|
|
||||||
res
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn descend_into_macros_impl(
|
/// Descends the token into expansions, returning the tokens that matches the input
|
||||||
|
/// token's [`SyntaxKind`] and text.
|
||||||
|
pub fn descend_into_macros_exact(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
|
||||||
|
let mut r = smallvec![];
|
||||||
|
let text = token.text();
|
||||||
|
let kind = token.kind();
|
||||||
|
|
||||||
|
self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }| {
|
||||||
|
let mapped_kind = value.kind();
|
||||||
|
let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
|
||||||
|
let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
|
||||||
|
if matches {
|
||||||
|
r.push(value);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if r.is_empty() {
|
||||||
|
r.push(token);
|
||||||
|
}
|
||||||
|
r
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Descends the token into expansions, returning the first token that matches the input
|
||||||
|
/// token's [`SyntaxKind`] and text.
|
||||||
|
pub fn descend_into_macros_single_exact(&self, token: SyntaxToken) -> SyntaxToken {
|
||||||
|
let text = token.text();
|
||||||
|
let kind = token.kind();
|
||||||
|
if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
|
||||||
|
self.descend_into_macros_breakable(token.clone(), |InFile { value, file_id: _ }| {
|
||||||
|
let mapped_kind = value.kind();
|
||||||
|
let any_ident_match =
|
||||||
|
|| kind.is_any_identifier() && value.kind().is_any_identifier();
|
||||||
|
let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
|
||||||
|
if matches {
|
||||||
|
ControlFlow::Break(value)
|
||||||
|
} else {
|
||||||
|
ControlFlow::Continue(())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
.unwrap_or(token)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn descend_into_macros_impl<T>(
|
||||||
&self,
|
&self,
|
||||||
token: SyntaxToken,
|
InRealFile { value: token, file_id }: InRealFile<SyntaxToken>,
|
||||||
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
|
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<T>,
|
||||||
) {
|
) -> Option<T> {
|
||||||
let _p = tracing::info_span!("descend_into_macros_impl").entered();
|
let _p = tracing::info_span!("descend_into_macros_impl").entered();
|
||||||
let (sa, span, file_id) =
|
let (sa, span, file_id) = token
|
||||||
match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
|
.parent()
|
||||||
Some(sa) => match sa.file_id.file_id() {
|
.and_then(|parent| {
|
||||||
Some(file_id) => (
|
self.analyze_impl(InRealFile::new(file_id, &parent).into(), None, false)
|
||||||
sa,
|
})
|
||||||
self.db.real_span_map(file_id).span_for_range(token.text_range()),
|
.and_then(|sa| {
|
||||||
file_id.into(),
|
let file_id = sa.file_id.file_id()?;
|
||||||
),
|
Some((
|
||||||
None => {
|
sa,
|
||||||
stdx::never!();
|
self.db.real_span_map(file_id).span_for_range(token.text_range()),
|
||||||
return;
|
HirFileId::from(file_id),
|
||||||
}
|
))
|
||||||
},
|
})?;
|
||||||
None => return,
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut m_cache = self.macro_call_cache.borrow_mut();
|
let mut m_cache = self.macro_call_cache.borrow_mut();
|
||||||
let def_map = sa.resolver.def_map();
|
let def_map = sa.resolver.def_map();
|
||||||
|
|
||||||
|
// A stack of tokens to process, along with the file they came from
|
||||||
|
// These are tracked to know which macro calls we still have to look into
|
||||||
|
// the tokens themselves aren't that interesting as the span that is being used to map
|
||||||
|
// things down never changes.
|
||||||
let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])];
|
let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])];
|
||||||
|
|
||||||
|
// Process the expansion of a call, pushing all tokens with our span in the expansion back onto our stack
|
||||||
let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
|
let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
|
||||||
let InMacroFile { file_id, value: mapped_tokens } = self.with_ctx(|ctx| {
|
let InMacroFile { file_id, value: mapped_tokens } = self.with_ctx(|ctx| {
|
||||||
Some(
|
Some(
|
||||||
|
@ -809,7 +840,13 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
res
|
res
|
||||||
};
|
};
|
||||||
|
|
||||||
while let Some((file_id, mut tokens)) = stack.pop() {
|
// Filters out all tokens that contain the given range (usually the macro call), any such
|
||||||
|
// token is redundant as the corresponding macro call has already been processed
|
||||||
|
let filter_duplicates = |tokens: &mut SmallVec<_>, range: TextRange| {
|
||||||
|
tokens.retain(|t: &mut SyntaxToken| !range.contains_range(t.text_range()))
|
||||||
|
};
|
||||||
|
|
||||||
|
while let Some((expansion, ref mut tokens)) = stack.pop() {
|
||||||
while let Some(token) = tokens.pop() {
|
while let Some(token) = tokens.pop() {
|
||||||
let was_not_remapped = (|| {
|
let was_not_remapped = (|| {
|
||||||
// First expand into attribute invocations
|
// First expand into attribute invocations
|
||||||
|
@ -817,7 +854,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
|
token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
|
||||||
// Don't force populate the dyn cache for items that don't have an attribute anyways
|
// Don't force populate the dyn cache for items that don't have an attribute anyways
|
||||||
item.attrs().next()?;
|
item.attrs().next()?;
|
||||||
Some((ctx.item_to_macro_call(InFile::new(file_id, &item))?, item))
|
Some((ctx.item_to_macro_call(InFile::new(expansion, &item))?, item))
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
if let Some((call_id, item)) = containing_attribute_macro_call {
|
if let Some((call_id, item)) = containing_attribute_macro_call {
|
||||||
|
@ -849,9 +886,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
})
|
})
|
||||||
.unwrap_or_else(|| text_range.start());
|
.unwrap_or_else(|| text_range.start());
|
||||||
let text_range = TextRange::new(start, text_range.end());
|
let text_range = TextRange::new(start, text_range.end());
|
||||||
// remove any other token in this macro input, all their mappings are the
|
filter_duplicates(tokens, text_range);
|
||||||
// same as this one
|
|
||||||
tokens.retain(|t| !text_range.contains_range(t.text_range()));
|
|
||||||
return process_expansion_for_token(&mut stack, file_id);
|
return process_expansion_for_token(&mut stack, file_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -862,6 +897,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
.map_while(Either::<ast::TokenTree, ast::Meta>::cast)
|
.map_while(Either::<ast::TokenTree, ast::Meta>::cast)
|
||||||
.last()?;
|
.last()?;
|
||||||
match tt {
|
match tt {
|
||||||
|
// function-like macro call
|
||||||
Either::Left(tt) => {
|
Either::Left(tt) => {
|
||||||
if tt.left_delimiter_token().map_or(false, |it| it == token) {
|
if tt.left_delimiter_token().map_or(false, |it| it == token) {
|
||||||
return None;
|
return None;
|
||||||
|
@ -870,7 +906,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
|
let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
|
||||||
let mcall = InFile::new(file_id, macro_call);
|
let mcall = InFile::new(expansion, macro_call);
|
||||||
let file_id = match m_cache.get(&mcall) {
|
let file_id = match m_cache.get(&mcall) {
|
||||||
Some(&it) => it,
|
Some(&it) => it,
|
||||||
None => {
|
None => {
|
||||||
|
@ -888,9 +924,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let text_range = tt.syntax().text_range();
|
let text_range = tt.syntax().text_range();
|
||||||
// remove any other token in this macro input, all their mappings are the
|
filter_duplicates(tokens, text_range);
|
||||||
// same as this one
|
|
||||||
tokens.retain(|t| !text_range.contains_range(t.text_range()));
|
|
||||||
|
|
||||||
process_expansion_for_token(&mut stack, file_id).or(file_id
|
process_expansion_for_token(&mut stack, file_id).or(file_id
|
||||||
.eager_arg(self.db.upcast())
|
.eager_arg(self.db.upcast())
|
||||||
|
@ -899,6 +933,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
process_expansion_for_token(&mut stack, arg.as_macro_file())
|
process_expansion_for_token(&mut stack, arg.as_macro_file())
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
// derive or derive helper
|
||||||
Either::Right(meta) => {
|
Either::Right(meta) => {
|
||||||
// attribute we failed expansion for earlier, this might be a derive invocation
|
// attribute we failed expansion for earlier, this might be a derive invocation
|
||||||
// or derive helper attribute
|
// or derive helper attribute
|
||||||
|
@ -910,8 +945,8 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
// so try downmapping the token into the pseudo derive expansion
|
// so try downmapping the token into the pseudo derive expansion
|
||||||
// see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
|
// see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
|
||||||
ctx.attr_to_derive_macro_call(
|
ctx.attr_to_derive_macro_call(
|
||||||
InFile::new(file_id, &adt),
|
InFile::new(expansion, &adt),
|
||||||
InFile::new(file_id, attr.clone()),
|
InFile::new(expansion, attr.clone()),
|
||||||
)
|
)
|
||||||
.map(|(_, call_id, _)| call_id)
|
.map(|(_, call_id, _)| call_id)
|
||||||
});
|
});
|
||||||
|
@ -945,28 +980,29 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}?;
|
}?;
|
||||||
if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(file_id, &adt))) {
|
if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(expansion, &adt))) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let attr_name =
|
let attr_name =
|
||||||
attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
|
attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
|
||||||
// Not an attribute, nor a derive, so it's either a builtin or a derive helper
|
// Not an attribute, nor a derive, so it's either an intert attribute or a derive helper
|
||||||
// Try to resolve to a derive helper and downmap
|
// Try to resolve to a derive helper and downmap
|
||||||
let id = self.db.ast_id_map(file_id).ast_id(&adt);
|
let id = self.db.ast_id_map(expansion).ast_id(&adt);
|
||||||
let helpers =
|
let helpers =
|
||||||
def_map.derive_helpers_in_scope(InFile::new(file_id, id))?;
|
def_map.derive_helpers_in_scope(InFile::new(expansion, id))?;
|
||||||
|
|
||||||
if !helpers.is_empty() {
|
if !helpers.is_empty() {
|
||||||
let text_range = attr.syntax().text_range();
|
let text_range = attr.syntax().text_range();
|
||||||
// remove any other token in this macro input, all their mappings are the
|
filter_duplicates(tokens, text_range);
|
||||||
// same as this
|
|
||||||
tokens.retain(|t| !text_range.contains_range(t.text_range()));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut res = None;
|
let mut res = None;
|
||||||
for (.., derive) in
|
for (.., derive) in
|
||||||
helpers.iter().filter(|(helper, ..)| *helper == attr_name)
|
helpers.iter().filter(|(helper, ..)| *helper == attr_name)
|
||||||
{
|
{
|
||||||
|
// as there may be multiple derives registering the same helper
|
||||||
|
// name, we gotta make sure to call this for all of them!
|
||||||
|
// FIXME: We need to call `f` for all of them as well though!
|
||||||
res = res.or(process_expansion_for_token(
|
res = res.or(process_expansion_for_token(
|
||||||
&mut stack,
|
&mut stack,
|
||||||
derive.as_macro_file(),
|
derive.as_macro_file(),
|
||||||
|
@ -978,11 +1014,14 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
})()
|
})()
|
||||||
.is_none();
|
.is_none();
|
||||||
|
|
||||||
if was_not_remapped && f(InFile::new(file_id, token)).is_break() {
|
if was_not_remapped {
|
||||||
break;
|
if let ControlFlow::Break(b) = f(InFile::new(expansion, token)) {
|
||||||
|
return Some(b);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note this return type is deliberate as [`find_nodes_at_offset_with_descend`] wants to stop
|
// Note this return type is deliberate as [`find_nodes_at_offset_with_descend`] wants to stop
|
||||||
|
@ -995,7 +1034,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
offset: TextSize,
|
offset: TextSize,
|
||||||
) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
|
) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
|
||||||
node.token_at_offset(offset)
|
node.token_at_offset(offset)
|
||||||
.map(move |token| self.descend_into_macros(DescendPreference::None, token))
|
.map(move |token| self.descend_into_macros_exact(token))
|
||||||
.map(|descendants| {
|
.map(|descendants| {
|
||||||
descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
|
descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
|
||||||
})
|
})
|
||||||
|
@ -1179,7 +1218,8 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => {
|
hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => {
|
||||||
Adjust::Borrow(AutoBorrow::RawPtr(mutability(m)))
|
Adjust::Borrow(AutoBorrow::RawPtr(mutability(m)))
|
||||||
}
|
}
|
||||||
hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(m)) => {
|
hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(_, m)) => {
|
||||||
|
// FIXME: Handle lifetimes here
|
||||||
Adjust::Borrow(AutoBorrow::Ref(mutability(m)))
|
Adjust::Borrow(AutoBorrow::Ref(mutability(m)))
|
||||||
}
|
}
|
||||||
hir_ty::Adjust::Pointer(pc) => Adjust::Pointer(pc),
|
hir_ty::Adjust::Pointer(pc) => Adjust::Pointer(pc),
|
||||||
|
@ -1413,11 +1453,13 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
|
|
||||||
/// Returns none if the file of the node is not part of a crate.
|
/// Returns none if the file of the node is not part of a crate.
|
||||||
fn analyze(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
|
fn analyze(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
|
||||||
|
let node = self.find_file(node);
|
||||||
self.analyze_impl(node, None, true)
|
self.analyze_impl(node, None, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns none if the file of the node is not part of a crate.
|
/// Returns none if the file of the node is not part of a crate.
|
||||||
fn analyze_no_infer(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
|
fn analyze_no_infer(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
|
||||||
|
let node = self.find_file(node);
|
||||||
self.analyze_impl(node, None, false)
|
self.analyze_impl(node, None, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1426,17 +1468,17 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
offset: TextSize,
|
offset: TextSize,
|
||||||
) -> Option<SourceAnalyzer> {
|
) -> Option<SourceAnalyzer> {
|
||||||
|
let node = self.find_file(node);
|
||||||
self.analyze_impl(node, Some(offset), false)
|
self.analyze_impl(node, Some(offset), false)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn analyze_impl(
|
fn analyze_impl(
|
||||||
&self,
|
&self,
|
||||||
node: &SyntaxNode,
|
node: InFile<&SyntaxNode>,
|
||||||
offset: Option<TextSize>,
|
offset: Option<TextSize>,
|
||||||
infer_body: bool,
|
infer_body: bool,
|
||||||
) -> Option<SourceAnalyzer> {
|
) -> Option<SourceAnalyzer> {
|
||||||
let _p = tracing::info_span!("SemanticsImpl::analyze_impl").entered();
|
let _p = tracing::info_span!("SemanticsImpl::analyze_impl").entered();
|
||||||
let node = self.find_file(node);
|
|
||||||
|
|
||||||
let container = self.with_ctx(|ctx| ctx.find_container(node))?;
|
let container = self.with_ctx(|ctx| ctx.find_container(node))?;
|
||||||
|
|
||||||
|
@ -1481,6 +1523,11 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
InFile::new(file_id, node)
|
InFile::new(file_id, node)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn wrap_token_infile(&self, token: SyntaxToken) -> InFile<SyntaxToken> {
|
||||||
|
let InFile { file_id, .. } = self.find_file(&token.parent().unwrap());
|
||||||
|
InFile::new(file_id, token)
|
||||||
|
}
|
||||||
|
|
||||||
/// Wraps the node in a [`InFile`] with the file id it belongs to.
|
/// Wraps the node in a [`InFile`] with the file id it belongs to.
|
||||||
fn find_file<'node>(&self, node: &'node SyntaxNode) -> InFile<&'node SyntaxNode> {
|
fn find_file<'node>(&self, node: &'node SyntaxNode) -> InFile<&'node SyntaxNode> {
|
||||||
let root_node = find_root(node);
|
let root_node = find_root(node);
|
||||||
|
|
|
@ -9,6 +9,7 @@ use hir_def::{
|
||||||
};
|
};
|
||||||
use hir_expand::HirFileId;
|
use hir_expand::HirFileId;
|
||||||
use hir_ty::{db::HirDatabase, display::HirDisplay};
|
use hir_ty::{db::HirDatabase, display::HirDisplay};
|
||||||
|
use span::Edition;
|
||||||
use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, ToSmolStr};
|
use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, ToSmolStr};
|
||||||
|
|
||||||
use crate::{Module, ModuleDef, Semantics};
|
use crate::{Module, ModuleDef, Semantics};
|
||||||
|
@ -54,6 +55,7 @@ pub struct SymbolCollector<'a> {
|
||||||
symbols: Vec<FileSymbol>,
|
symbols: Vec<FileSymbol>,
|
||||||
work: Vec<SymbolCollectorWork>,
|
work: Vec<SymbolCollectorWork>,
|
||||||
current_container_name: Option<SmolStr>,
|
current_container_name: Option<SmolStr>,
|
||||||
|
edition: Edition,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given a [`ModuleId`] and a [`HirDatabase`], use the DefMap for the module's crate to collect
|
/// Given a [`ModuleId`] and a [`HirDatabase`], use the DefMap for the module's crate to collect
|
||||||
|
@ -65,10 +67,13 @@ impl<'a> SymbolCollector<'a> {
|
||||||
symbols: Default::default(),
|
symbols: Default::default(),
|
||||||
work: Default::default(),
|
work: Default::default(),
|
||||||
current_container_name: None,
|
current_container_name: None,
|
||||||
|
edition: Edition::Edition2015,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn collect(&mut self, module: Module) {
|
pub fn collect(&mut self, module: Module) {
|
||||||
|
self.edition = module.krate().edition(self.db);
|
||||||
|
|
||||||
// The initial work is the root module we're collecting, additional work will
|
// The initial work is the root module we're collecting, additional work will
|
||||||
// be populated as we traverse the module's definitions.
|
// be populated as we traverse the module's definitions.
|
||||||
self.work.push(SymbolCollectorWork { module_id: module.into(), parent: None });
|
self.work.push(SymbolCollectorWork { module_id: module.into(), parent: None });
|
||||||
|
@ -209,7 +214,8 @@ impl<'a> SymbolCollector<'a> {
|
||||||
|
|
||||||
fn collect_from_impl(&mut self, impl_id: ImplId) {
|
fn collect_from_impl(&mut self, impl_id: ImplId) {
|
||||||
let impl_data = self.db.impl_data(impl_id);
|
let impl_data = self.db.impl_data(impl_id);
|
||||||
let impl_name = Some(SmolStr::new(impl_data.self_ty.display(self.db).to_string()));
|
let impl_name =
|
||||||
|
Some(SmolStr::new(impl_data.self_ty.display(self.db, self.edition).to_string()));
|
||||||
self.with_container_name(impl_name, |s| {
|
self.with_container_name(impl_name, |s| {
|
||||||
for &assoc_item_id in impl_data.items.iter() {
|
for &assoc_item_id in impl_data.items.iter() {
|
||||||
s.push_assoc_item(assoc_item_id)
|
s.push_assoc_item(assoc_item_id)
|
||||||
|
@ -239,16 +245,16 @@ impl<'a> SymbolCollector<'a> {
|
||||||
fn def_with_body_id_name(&self, body_id: DefWithBodyId) -> Option<SmolStr> {
|
fn def_with_body_id_name(&self, body_id: DefWithBodyId) -> Option<SmolStr> {
|
||||||
match body_id {
|
match body_id {
|
||||||
DefWithBodyId::FunctionId(id) => {
|
DefWithBodyId::FunctionId(id) => {
|
||||||
Some(self.db.function_data(id).name.display_no_db().to_smolstr())
|
Some(self.db.function_data(id).name.display_no_db(self.edition).to_smolstr())
|
||||||
}
|
}
|
||||||
DefWithBodyId::StaticId(id) => {
|
DefWithBodyId::StaticId(id) => {
|
||||||
Some(self.db.static_data(id).name.display_no_db().to_smolstr())
|
Some(self.db.static_data(id).name.display_no_db(self.edition).to_smolstr())
|
||||||
}
|
}
|
||||||
DefWithBodyId::ConstId(id) => {
|
DefWithBodyId::ConstId(id) => {
|
||||||
Some(self.db.const_data(id).name.as_ref()?.display_no_db().to_smolstr())
|
Some(self.db.const_data(id).name.as_ref()?.display_no_db(self.edition).to_smolstr())
|
||||||
}
|
}
|
||||||
DefWithBodyId::VariantId(id) => {
|
DefWithBodyId::VariantId(id) => {
|
||||||
Some(self.db.enum_variant_data(id).name.display_no_db().to_smolstr())
|
Some(self.db.enum_variant_data(id).name.display_no_db(self.edition).to_smolstr())
|
||||||
}
|
}
|
||||||
DefWithBodyId::InTypeConstId(_) => Some("in type const".into()),
|
DefWithBodyId::InTypeConstId(_) => Some("in type const".into()),
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,6 +7,7 @@ use hir_ty::{
|
||||||
display::{DisplaySourceCodeError, HirDisplay},
|
display::{DisplaySourceCodeError, HirDisplay},
|
||||||
};
|
};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
use span::Edition;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Field, Function, Local, ModuleDef,
|
Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Field, Function, Local, ModuleDef,
|
||||||
|
@ -29,9 +30,10 @@ fn mod_item_path_str(
|
||||||
sema_scope: &SemanticsScope<'_>,
|
sema_scope: &SemanticsScope<'_>,
|
||||||
def: &ModuleDef,
|
def: &ModuleDef,
|
||||||
cfg: ImportPathConfig,
|
cfg: ImportPathConfig,
|
||||||
|
edition: Edition,
|
||||||
) -> Result<String, DisplaySourceCodeError> {
|
) -> Result<String, DisplaySourceCodeError> {
|
||||||
let path = mod_item_path(sema_scope, def, cfg);
|
let path = mod_item_path(sema_scope, def, cfg);
|
||||||
path.map(|it| it.display(sema_scope.db.upcast()).to_string())
|
path.map(|it| it.display(sema_scope.db.upcast(), edition).to_string())
|
||||||
.ok_or(DisplaySourceCodeError::PathNotFound)
|
.ok_or(DisplaySourceCodeError::PathNotFound)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -97,37 +99,38 @@ impl Expr {
|
||||||
sema_scope: &SemanticsScope<'_>,
|
sema_scope: &SemanticsScope<'_>,
|
||||||
many_formatter: &mut dyn FnMut(&Type) -> String,
|
many_formatter: &mut dyn FnMut(&Type) -> String,
|
||||||
cfg: ImportPathConfig,
|
cfg: ImportPathConfig,
|
||||||
|
edition: Edition,
|
||||||
) -> Result<String, DisplaySourceCodeError> {
|
) -> Result<String, DisplaySourceCodeError> {
|
||||||
let db = sema_scope.db;
|
let db = sema_scope.db;
|
||||||
let mod_item_path_str = |s, def| mod_item_path_str(s, def, cfg);
|
let mod_item_path_str = |s, def| mod_item_path_str(s, def, cfg, edition);
|
||||||
match self {
|
match self {
|
||||||
Expr::Const(it) => match it.as_assoc_item(db).map(|it| it.container(db)) {
|
Expr::Const(it) => match it.as_assoc_item(db).map(|it| it.container(db)) {
|
||||||
Some(container) => {
|
Some(container) => {
|
||||||
let container_name = container_name(container, sema_scope, cfg)?;
|
let container_name = container_name(container, sema_scope, cfg, edition)?;
|
||||||
let const_name = it
|
let const_name = it
|
||||||
.name(db)
|
.name(db)
|
||||||
.map(|c| c.display(db.upcast()).to_string())
|
.map(|c| c.display(db.upcast(), edition).to_string())
|
||||||
.unwrap_or(String::new());
|
.unwrap_or(String::new());
|
||||||
Ok(format!("{container_name}::{const_name}"))
|
Ok(format!("{container_name}::{const_name}"))
|
||||||
}
|
}
|
||||||
None => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)),
|
None => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)),
|
||||||
},
|
},
|
||||||
Expr::Static(it) => mod_item_path_str(sema_scope, &ModuleDef::Static(*it)),
|
Expr::Static(it) => mod_item_path_str(sema_scope, &ModuleDef::Static(*it)),
|
||||||
Expr::Local(it) => Ok(it.name(db).display(db.upcast()).to_string()),
|
Expr::Local(it) => Ok(it.name(db).display(db.upcast(), edition).to_string()),
|
||||||
Expr::ConstParam(it) => Ok(it.name(db).display(db.upcast()).to_string()),
|
Expr::ConstParam(it) => Ok(it.name(db).display(db.upcast(), edition).to_string()),
|
||||||
Expr::FamousType { value, .. } => Ok(value.to_string()),
|
Expr::FamousType { value, .. } => Ok(value.to_string()),
|
||||||
Expr::Function { func, params, .. } => {
|
Expr::Function { func, params, .. } => {
|
||||||
let args = params
|
let args = params
|
||||||
.iter()
|
.iter()
|
||||||
.map(|f| f.gen_source_code(sema_scope, many_formatter, cfg))
|
.map(|f| f.gen_source_code(sema_scope, many_formatter, cfg, edition))
|
||||||
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
|
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.join(", ");
|
.join(", ");
|
||||||
|
|
||||||
match func.as_assoc_item(db).map(|it| it.container(db)) {
|
match func.as_assoc_item(db).map(|it| it.container(db)) {
|
||||||
Some(container) => {
|
Some(container) => {
|
||||||
let container_name = container_name(container, sema_scope, cfg)?;
|
let container_name = container_name(container, sema_scope, cfg, edition)?;
|
||||||
let fn_name = func.name(db).display(db.upcast()).to_string();
|
let fn_name = func.name(db).display(db.upcast(), edition).to_string();
|
||||||
Ok(format!("{container_name}::{fn_name}({args})"))
|
Ok(format!("{container_name}::{fn_name}({args})"))
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
|
@ -141,12 +144,13 @@ impl Expr {
|
||||||
return Ok(many_formatter(&target.ty(db)));
|
return Ok(many_formatter(&target.ty(db)));
|
||||||
}
|
}
|
||||||
|
|
||||||
let func_name = func.name(db).display(db.upcast()).to_string();
|
let func_name = func.name(db).display(db.upcast(), edition).to_string();
|
||||||
let self_param = func.self_param(db).unwrap();
|
let self_param = func.self_param(db).unwrap();
|
||||||
let target_str = target.gen_source_code(sema_scope, many_formatter, cfg)?;
|
let target_str =
|
||||||
|
target.gen_source_code(sema_scope, many_formatter, cfg, edition)?;
|
||||||
let args = params
|
let args = params
|
||||||
.iter()
|
.iter()
|
||||||
.map(|f| f.gen_source_code(sema_scope, many_formatter, cfg))
|
.map(|f| f.gen_source_code(sema_scope, many_formatter, cfg, edition))
|
||||||
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
|
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.join(", ");
|
.join(", ");
|
||||||
|
@ -176,7 +180,7 @@ impl Expr {
|
||||||
StructKind::Tuple => {
|
StructKind::Tuple => {
|
||||||
let args = params
|
let args = params
|
||||||
.iter()
|
.iter()
|
||||||
.map(|f| f.gen_source_code(sema_scope, many_formatter, cfg))
|
.map(|f| f.gen_source_code(sema_scope, many_formatter, cfg, edition))
|
||||||
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
|
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.join(", ");
|
.join(", ");
|
||||||
|
@ -190,8 +194,8 @@ impl Expr {
|
||||||
.map(|(a, f)| {
|
.map(|(a, f)| {
|
||||||
let tmp = format!(
|
let tmp = format!(
|
||||||
"{}: {}",
|
"{}: {}",
|
||||||
f.name(db).display(db.upcast()),
|
f.name(db).display(db.upcast(), edition),
|
||||||
a.gen_source_code(sema_scope, many_formatter, cfg)?
|
a.gen_source_code(sema_scope, many_formatter, cfg, edition)?
|
||||||
);
|
);
|
||||||
Ok(tmp)
|
Ok(tmp)
|
||||||
})
|
})
|
||||||
|
@ -211,7 +215,7 @@ impl Expr {
|
||||||
StructKind::Tuple => {
|
StructKind::Tuple => {
|
||||||
let args = params
|
let args = params
|
||||||
.iter()
|
.iter()
|
||||||
.map(|a| a.gen_source_code(sema_scope, many_formatter, cfg))
|
.map(|a| a.gen_source_code(sema_scope, many_formatter, cfg, edition))
|
||||||
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
|
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.join(", ");
|
.join(", ");
|
||||||
|
@ -225,8 +229,8 @@ impl Expr {
|
||||||
.map(|(a, f)| {
|
.map(|(a, f)| {
|
||||||
let tmp = format!(
|
let tmp = format!(
|
||||||
"{}: {}",
|
"{}: {}",
|
||||||
f.name(db).display(db.upcast()),
|
f.name(db).display(db.upcast(), edition),
|
||||||
a.gen_source_code(sema_scope, many_formatter, cfg)?
|
a.gen_source_code(sema_scope, many_formatter, cfg, edition)?
|
||||||
);
|
);
|
||||||
Ok(tmp)
|
Ok(tmp)
|
||||||
})
|
})
|
||||||
|
@ -244,7 +248,7 @@ impl Expr {
|
||||||
Expr::Tuple { params, .. } => {
|
Expr::Tuple { params, .. } => {
|
||||||
let args = params
|
let args = params
|
||||||
.iter()
|
.iter()
|
||||||
.map(|a| a.gen_source_code(sema_scope, many_formatter, cfg))
|
.map(|a| a.gen_source_code(sema_scope, many_formatter, cfg, edition))
|
||||||
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
|
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.join(", ");
|
.join(", ");
|
||||||
|
@ -256,8 +260,8 @@ impl Expr {
|
||||||
return Ok(many_formatter(&expr.ty(db)));
|
return Ok(many_formatter(&expr.ty(db)));
|
||||||
}
|
}
|
||||||
|
|
||||||
let strukt = expr.gen_source_code(sema_scope, many_formatter, cfg)?;
|
let strukt = expr.gen_source_code(sema_scope, many_formatter, cfg, edition)?;
|
||||||
let field = field.name(db).display(db.upcast()).to_string();
|
let field = field.name(db).display(db.upcast(), edition).to_string();
|
||||||
Ok(format!("{strukt}.{field}"))
|
Ok(format!("{strukt}.{field}"))
|
||||||
}
|
}
|
||||||
Expr::Reference(expr) => {
|
Expr::Reference(expr) => {
|
||||||
|
@ -265,7 +269,7 @@ impl Expr {
|
||||||
return Ok(many_formatter(&expr.ty(db)));
|
return Ok(many_formatter(&expr.ty(db)));
|
||||||
}
|
}
|
||||||
|
|
||||||
let inner = expr.gen_source_code(sema_scope, many_formatter, cfg)?;
|
let inner = expr.gen_source_code(sema_scope, many_formatter, cfg, edition)?;
|
||||||
Ok(format!("&{inner}"))
|
Ok(format!("&{inner}"))
|
||||||
}
|
}
|
||||||
Expr::Many(ty) => Ok(many_formatter(ty)),
|
Expr::Many(ty) => Ok(many_formatter(ty)),
|
||||||
|
@ -353,17 +357,18 @@ fn container_name(
|
||||||
container: AssocItemContainer,
|
container: AssocItemContainer,
|
||||||
sema_scope: &SemanticsScope<'_>,
|
sema_scope: &SemanticsScope<'_>,
|
||||||
cfg: ImportPathConfig,
|
cfg: ImportPathConfig,
|
||||||
|
edition: Edition,
|
||||||
) -> Result<String, DisplaySourceCodeError> {
|
) -> Result<String, DisplaySourceCodeError> {
|
||||||
let container_name = match container {
|
let container_name = match container {
|
||||||
crate::AssocItemContainer::Trait(trait_) => {
|
crate::AssocItemContainer::Trait(trait_) => {
|
||||||
mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_), cfg)?
|
mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_), cfg, edition)?
|
||||||
}
|
}
|
||||||
crate::AssocItemContainer::Impl(imp) => {
|
crate::AssocItemContainer::Impl(imp) => {
|
||||||
let self_ty = imp.self_ty(sema_scope.db);
|
let self_ty = imp.self_ty(sema_scope.db);
|
||||||
// Should it be guaranteed that `mod_item_path` always exists?
|
// Should it be guaranteed that `mod_item_path` always exists?
|
||||||
match self_ty.as_adt().and_then(|adt| mod_item_path(sema_scope, &adt.into(), cfg)) {
|
match self_ty.as_adt().and_then(|adt| mod_item_path(sema_scope, &adt.into(), cfg)) {
|
||||||
Some(path) => path.display(sema_scope.db.upcast()).to_string(),
|
Some(path) => path.display(sema_scope.db.upcast(), edition).to_string(),
|
||||||
None => self_ty.display(sema_scope.db).to_string(),
|
None => self_ty.display(sema_scope.db, edition).to_string(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,5 +1,8 @@
|
||||||
use hir::HasSource;
|
use hir::HasSource;
|
||||||
use syntax::ast::{self, make, AstNode};
|
use syntax::{
|
||||||
|
ast::{self, make, AstNode},
|
||||||
|
Edition,
|
||||||
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
assist_context::{AssistContext, Assists},
|
assist_context::{AssistContext, Assists},
|
||||||
|
@ -150,14 +153,22 @@ fn add_missing_impl_members_inner(
|
||||||
&missing_items,
|
&missing_items,
|
||||||
trait_,
|
trait_,
|
||||||
&new_impl_def,
|
&new_impl_def,
|
||||||
target_scope,
|
&target_scope,
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some(cap) = ctx.config.snippet_cap {
|
if let Some(cap) = ctx.config.snippet_cap {
|
||||||
let mut placeholder = None;
|
let mut placeholder = None;
|
||||||
if let DefaultMethods::No = mode {
|
if let DefaultMethods::No = mode {
|
||||||
if let ast::AssocItem::Fn(func) = &first_new_item {
|
if let ast::AssocItem::Fn(func) = &first_new_item {
|
||||||
if try_gen_trait_body(ctx, func, trait_ref, &impl_def).is_none() {
|
if try_gen_trait_body(
|
||||||
|
ctx,
|
||||||
|
func,
|
||||||
|
trait_ref,
|
||||||
|
&impl_def,
|
||||||
|
target_scope.krate().edition(ctx.sema.db),
|
||||||
|
)
|
||||||
|
.is_none()
|
||||||
|
{
|
||||||
if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast)
|
if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast)
|
||||||
{
|
{
|
||||||
if m.syntax().text() == "todo!()" {
|
if m.syntax().text() == "todo!()" {
|
||||||
|
@ -182,9 +193,11 @@ fn try_gen_trait_body(
|
||||||
func: &ast::Fn,
|
func: &ast::Fn,
|
||||||
trait_ref: hir::TraitRef,
|
trait_ref: hir::TraitRef,
|
||||||
impl_def: &ast::Impl,
|
impl_def: &ast::Impl,
|
||||||
|
edition: Edition,
|
||||||
) -> Option<()> {
|
) -> Option<()> {
|
||||||
let trait_path =
|
let trait_path = make::ext::ident_path(
|
||||||
make::ext::ident_path(&trait_ref.trait_().name(ctx.db()).display(ctx.db()).to_string());
|
&trait_ref.trait_().name(ctx.db()).display(ctx.db(), edition).to_string(),
|
||||||
|
);
|
||||||
let hir_ty = ctx.sema.resolve_type(&impl_def.self_ty()?)?;
|
let hir_ty = ctx.sema.resolve_type(&impl_def.self_ty()?)?;
|
||||||
let adt = hir_ty.as_adt()?.source(ctx.db())?;
|
let adt = hir_ty.as_adt()?.source(ctx.db())?;
|
||||||
gen_trait_fn_body(func, &trait_path, &adt.value, Some(trait_ref))
|
gen_trait_fn_body(func, &trait_path, &adt.value, Some(trait_ref))
|
||||||
|
|
|
@ -445,7 +445,8 @@ fn build_pat(
|
||||||
) -> Option<ast::Pat> {
|
) -> Option<ast::Pat> {
|
||||||
match var {
|
match var {
|
||||||
ExtendedVariant::Variant(var) => {
|
ExtendedVariant::Variant(var) => {
|
||||||
let path = mod_path_to_ast(&module.find_path(db, ModuleDef::from(var), cfg)?);
|
let edition = module.krate().edition(db);
|
||||||
|
let path = mod_path_to_ast(&module.find_path(db, ModuleDef::from(var), cfg)?, edition);
|
||||||
// FIXME: use HIR for this; it doesn't currently expose struct vs. tuple vs. unit variants though
|
// FIXME: use HIR for this; it doesn't currently expose struct vs. tuple vs. unit variants though
|
||||||
Some(match var.source(db)?.value.kind() {
|
Some(match var.source(db)?.value.kind() {
|
||||||
ast::StructKind::Tuple(field_list) => {
|
ast::StructKind::Tuple(field_list) => {
|
||||||
|
|
|
@ -8,7 +8,7 @@ use ide_db::{
|
||||||
insert_use::{insert_use, insert_use_as_alias, ImportScope},
|
insert_use::{insert_use, insert_use_as_alias, ImportScope},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use syntax::{ast, AstNode, NodeOrToken, SyntaxElement};
|
use syntax::{ast, AstNode, Edition, NodeOrToken, SyntaxElement};
|
||||||
|
|
||||||
use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel};
|
use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel};
|
||||||
|
|
||||||
|
@ -120,13 +120,14 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
|
||||||
// prioritize more relevant imports
|
// prioritize more relevant imports
|
||||||
proposed_imports
|
proposed_imports
|
||||||
.sort_by_key(|import| Reverse(relevance_score(ctx, import, current_module.as_ref())));
|
.sort_by_key(|import| Reverse(relevance_score(ctx, import, current_module.as_ref())));
|
||||||
|
let edition = current_module.map(|it| it.krate().edition(ctx.db())).unwrap_or(Edition::CURRENT);
|
||||||
|
|
||||||
let group_label = group_label(import_assets.import_candidate());
|
let group_label = group_label(import_assets.import_candidate());
|
||||||
for import in proposed_imports {
|
for import in proposed_imports {
|
||||||
let import_path = import.import_path;
|
let import_path = import.import_path;
|
||||||
|
|
||||||
let (assist_id, import_name) =
|
let (assist_id, import_name) =
|
||||||
(AssistId("auto_import", AssistKind::QuickFix), import_path.display(ctx.db()));
|
(AssistId("auto_import", AssistKind::QuickFix), import_path.display(ctx.db(), edition));
|
||||||
acc.add_group(
|
acc.add_group(
|
||||||
&group_label,
|
&group_label,
|
||||||
assist_id,
|
assist_id,
|
||||||
|
@ -138,7 +139,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
|
||||||
ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
|
ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
|
||||||
ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
|
ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
|
||||||
};
|
};
|
||||||
insert_use(&scope, mod_path_to_ast(&import_path), &ctx.config.insert_use);
|
insert_use(&scope, mod_path_to_ast(&import_path, edition), &ctx.config.insert_use);
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -165,7 +166,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
|
||||||
};
|
};
|
||||||
insert_use_as_alias(
|
insert_use_as_alias(
|
||||||
&scope,
|
&scope,
|
||||||
mod_path_to_ast(&import_path),
|
mod_path_to_ast(&import_path, edition),
|
||||||
&ctx.config.insert_use,
|
&ctx.config.insert_use,
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
|
|
|
@ -339,6 +339,7 @@ fn augment_references_with_imports(
|
||||||
|
|
||||||
let cfg = ctx.config.import_path_config();
|
let cfg = ctx.config.import_path_config();
|
||||||
|
|
||||||
|
let edition = target_module.krate().edition(ctx.db());
|
||||||
references
|
references
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|FileReference { range, name, .. }| {
|
.filter_map(|FileReference { range, name, .. }| {
|
||||||
|
@ -361,7 +362,10 @@ fn augment_references_with_imports(
|
||||||
cfg,
|
cfg,
|
||||||
)
|
)
|
||||||
.map(|mod_path| {
|
.map(|mod_path| {
|
||||||
make::path_concat(mod_path_to_ast(&mod_path), make::path_from_text("Bool"))
|
make::path_concat(
|
||||||
|
mod_path_to_ast(&mod_path, edition),
|
||||||
|
make::path_from_text("Bool"),
|
||||||
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
import_scope.zip(path)
|
import_scope.zip(path)
|
||||||
|
|
|
@ -159,7 +159,7 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_>
|
||||||
};
|
};
|
||||||
// Verify this is `bool::then` that is being called.
|
// Verify this is `bool::then` that is being called.
|
||||||
let func = ctx.sema.resolve_method_call(&mcall)?;
|
let func = ctx.sema.resolve_method_call(&mcall)?;
|
||||||
if func.name(ctx.sema.db).display(ctx.db()).to_string() != "then" {
|
if !func.name(ctx.sema.db).eq_ident("then") {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let assoc = func.as_assoc_item(ctx.sema.db)?;
|
let assoc = func.as_assoc_item(ctx.sema.db)?;
|
||||||
|
|
1271
crates/ide-assists/src/handlers/convert_closure_to_fn.rs
Normal file
1271
crates/ide-assists/src/handlers/convert_closure_to_fn.rs
Normal file
File diff suppressed because it is too large
Load diff
|
@ -51,7 +51,10 @@ pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext<'_>) -
|
||||||
Some(hir::PathResolution::Def(module_def)) => module_def,
|
Some(hir::PathResolution::Def(module_def)) => module_def,
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
mod_path_to_ast(&module.find_path(ctx.db(), src_type_def, cfg)?)
|
mod_path_to_ast(
|
||||||
|
&module.find_path(ctx.db(), src_type_def, cfg)?,
|
||||||
|
module.krate().edition(ctx.db()),
|
||||||
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
let dest_type = match &ast_trait {
|
let dest_type = match &ast_trait {
|
||||||
|
|
|
@ -114,12 +114,16 @@ pub(crate) fn convert_for_loop_with_for_each(
|
||||||
|builder| {
|
|builder| {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
|
|
||||||
if let Some((expr_behind_ref, method)) =
|
if let Some((expr_behind_ref, method, krate)) =
|
||||||
is_ref_and_impls_iter_method(&ctx.sema, &iterable)
|
is_ref_and_impls_iter_method(&ctx.sema, &iterable)
|
||||||
{
|
{
|
||||||
// We have either "for x in &col" and col implements a method called iter
|
// We have either "for x in &col" and col implements a method called iter
|
||||||
// or "for x in &mut col" and col implements a method called iter_mut
|
// or "for x in &mut col" and col implements a method called iter_mut
|
||||||
format_to!(buf, "{expr_behind_ref}.{}()", method.display(ctx.db()));
|
format_to!(
|
||||||
|
buf,
|
||||||
|
"{expr_behind_ref}.{}()",
|
||||||
|
method.display(ctx.db(), krate.edition(ctx.db()))
|
||||||
|
);
|
||||||
} else if let ast::Expr::RangeExpr(..) = iterable {
|
} else if let ast::Expr::RangeExpr(..) = iterable {
|
||||||
// range expressions need to be parenthesized for the syntax to be correct
|
// range expressions need to be parenthesized for the syntax to be correct
|
||||||
format_to!(buf, "({iterable})");
|
format_to!(buf, "({iterable})");
|
||||||
|
@ -144,7 +148,7 @@ pub(crate) fn convert_for_loop_with_for_each(
|
||||||
fn is_ref_and_impls_iter_method(
|
fn is_ref_and_impls_iter_method(
|
||||||
sema: &hir::Semantics<'_, ide_db::RootDatabase>,
|
sema: &hir::Semantics<'_, ide_db::RootDatabase>,
|
||||||
iterable: &ast::Expr,
|
iterable: &ast::Expr,
|
||||||
) -> Option<(ast::Expr, hir::Name)> {
|
) -> Option<(ast::Expr, hir::Name, hir::Crate)> {
|
||||||
let ref_expr = match iterable {
|
let ref_expr = match iterable {
|
||||||
ast::Expr::RefExpr(r) => r,
|
ast::Expr::RefExpr(r) => r,
|
||||||
_ => return None,
|
_ => return None,
|
||||||
|
@ -172,7 +176,7 @@ fn is_ref_and_impls_iter_method(
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
Some((expr_behind_ref, wanted_method))
|
Some((expr_behind_ref, wanted_method, krate))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Whether iterable implements core::Iterator
|
/// Whether iterable implements core::Iterator
|
||||||
|
|
|
@ -2,8 +2,8 @@ use either::Either;
|
||||||
use ide_db::{defs::Definition, search::FileReference};
|
use ide_db::{defs::Definition, search::FileReference};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, AstNode, HasGenericParams, HasVisibility},
|
ast::{self, AstNode, HasAttrs, HasGenericParams, HasVisibility},
|
||||||
match_ast, SyntaxKind,
|
match_ast, ted, SyntaxKind,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists};
|
use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists};
|
||||||
|
@ -87,9 +87,14 @@ fn edit_struct_def(
|
||||||
) {
|
) {
|
||||||
// Note that we don't need to consider macro files in this function because this is
|
// Note that we don't need to consider macro files in this function because this is
|
||||||
// currently not triggered for struct definitions inside macro calls.
|
// currently not triggered for struct definitions inside macro calls.
|
||||||
let tuple_fields = record_fields
|
let tuple_fields = record_fields.fields().filter_map(|f| {
|
||||||
.fields()
|
let field = ast::make::tuple_field(f.visibility(), f.ty()?).clone_for_update();
|
||||||
.filter_map(|f| Some(ast::make::tuple_field(f.visibility(), f.ty()?)));
|
ted::insert_all(
|
||||||
|
ted::Position::first_child_of(field.syntax()),
|
||||||
|
f.attrs().map(|attr| attr.syntax().clone_subtree().clone_for_update().into()).collect(),
|
||||||
|
);
|
||||||
|
Some(field)
|
||||||
|
});
|
||||||
let tuple_fields = ast::make::tuple_field_list(tuple_fields);
|
let tuple_fields = ast::make::tuple_field_list(tuple_fields);
|
||||||
let record_fields_text_range = record_fields.syntax().text_range();
|
let record_fields_text_range = record_fields.syntax().text_range();
|
||||||
|
|
||||||
|
@ -975,6 +980,22 @@ impl HasAssoc for Struct {
|
||||||
let Self::Assoc { value } = a;
|
let Self::Assoc { value } = a;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn fields_with_attrs() {
|
||||||
|
check_assist(
|
||||||
|
convert_named_struct_to_tuple_struct,
|
||||||
|
r#"
|
||||||
|
pub struct $0Foo {
|
||||||
|
#[my_custom_attr]
|
||||||
|
value: u32,
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
pub struct Foo(#[my_custom_attr] u32);
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -211,7 +211,7 @@ fn augment_references_with_imports(
|
||||||
)
|
)
|
||||||
.map(|mod_path| {
|
.map(|mod_path| {
|
||||||
make::path_concat(
|
make::path_concat(
|
||||||
mod_path_to_ast(&mod_path),
|
mod_path_to_ast(&mod_path, target_module.krate().edition(ctx.db())),
|
||||||
make::path_from_text(struct_name),
|
make::path_from_text(struct_name),
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use ide_db::defs::{Definition, NameRefClass};
|
use ide_db::defs::{Definition, NameRefClass};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, AstNode, HasGenericParams, HasVisibility},
|
ast::{self, AstNode, HasAttrs, HasGenericParams, HasVisibility},
|
||||||
match_ast, SyntaxKind, SyntaxNode,
|
match_ast, ted, SyntaxKind, SyntaxNode,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists};
|
use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists};
|
||||||
|
@ -83,10 +83,14 @@ fn edit_struct_def(
|
||||||
tuple_fields: ast::TupleFieldList,
|
tuple_fields: ast::TupleFieldList,
|
||||||
names: Vec<ast::Name>,
|
names: Vec<ast::Name>,
|
||||||
) {
|
) {
|
||||||
let record_fields = tuple_fields
|
let record_fields = tuple_fields.fields().zip(names).filter_map(|(f, name)| {
|
||||||
.fields()
|
let field = ast::make::record_field(f.visibility(), name, f.ty()?).clone_for_update();
|
||||||
.zip(names)
|
ted::insert_all(
|
||||||
.filter_map(|(f, name)| Some(ast::make::record_field(f.visibility(), name, f.ty()?)));
|
ted::Position::first_child_of(field.syntax()),
|
||||||
|
f.attrs().map(|attr| attr.syntax().clone_subtree().clone_for_update().into()).collect(),
|
||||||
|
);
|
||||||
|
Some(field)
|
||||||
|
});
|
||||||
let record_fields = ast::make::record_field_list(record_fields);
|
let record_fields = ast::make::record_field_list(record_fields);
|
||||||
let tuple_fields_text_range = tuple_fields.syntax().text_range();
|
let tuple_fields_text_range = tuple_fields.syntax().text_range();
|
||||||
|
|
||||||
|
@ -904,6 +908,19 @@ where
|
||||||
T: Foo,
|
T: Foo,
|
||||||
{ pub field1: T }
|
{ pub field1: T }
|
||||||
|
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn fields_with_attrs() {
|
||||||
|
check_assist(
|
||||||
|
convert_tuple_struct_to_named_struct,
|
||||||
|
r#"
|
||||||
|
pub struct $0Foo(#[my_custom_attr] u32);
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
pub struct Foo { #[my_custom_attr] field1: u32 }
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
use std::iter::once;
|
use std::iter;
|
||||||
|
|
||||||
|
use either::Either;
|
||||||
use ide_db::syntax_helpers::node_ext::is_pattern_cond;
|
use ide_db::syntax_helpers::node_ext::is_pattern_cond;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{
|
ast::{
|
||||||
|
@ -52,18 +53,30 @@ pub(crate) fn convert_while_to_loop(acc: &mut Assists, ctx: &AssistContext<'_>)
|
||||||
|edit| {
|
|edit| {
|
||||||
let while_indent_level = IndentLevel::from_node(while_expr.syntax());
|
let while_indent_level = IndentLevel::from_node(while_expr.syntax());
|
||||||
|
|
||||||
let break_block =
|
let break_block = make::block_expr(
|
||||||
make::block_expr(once(make::expr_stmt(make::expr_break(None, None)).into()), None)
|
iter::once(make::expr_stmt(make::expr_break(None, None)).into()),
|
||||||
.indent(while_indent_level);
|
None,
|
||||||
|
)
|
||||||
|
.indent(while_indent_level);
|
||||||
let block_expr = if is_pattern_cond(while_cond.clone()) {
|
let block_expr = if is_pattern_cond(while_cond.clone()) {
|
||||||
let if_expr = make::expr_if(while_cond, while_body, Some(break_block.into()));
|
let if_expr = make::expr_if(while_cond, while_body, Some(break_block.into()));
|
||||||
let stmts = once(make::expr_stmt(if_expr).into());
|
let stmts = iter::once(make::expr_stmt(if_expr).into());
|
||||||
make::block_expr(stmts, None)
|
make::block_expr(stmts, None)
|
||||||
} else {
|
} else {
|
||||||
let if_cond = invert_boolean_expression(while_cond);
|
let if_cond = invert_boolean_expression(while_cond);
|
||||||
let if_expr = make::expr_if(if_cond, break_block, None);
|
let if_expr = make::expr_if(if_cond, break_block, None).syntax().clone().into();
|
||||||
let stmts = once(make::expr_stmt(if_expr).into()).chain(while_body.statements());
|
let elements = while_body.stmt_list().map_or_else(
|
||||||
make::block_expr(stmts, while_body.tail_expr())
|
|| Either::Left(iter::empty()),
|
||||||
|
|stmts| {
|
||||||
|
Either::Right(stmts.syntax().children_with_tokens().filter(|node_or_tok| {
|
||||||
|
// Filter out the trailing expr
|
||||||
|
!node_or_tok
|
||||||
|
.as_node()
|
||||||
|
.is_some_and(|node| ast::Expr::can_cast(node.kind()))
|
||||||
|
}))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
make::hacky_block_expr(iter::once(if_expr).chain(elements), while_body.tail_expr())
|
||||||
};
|
};
|
||||||
|
|
||||||
let replacement = make::expr_loop(block_expr.indent(while_indent_level));
|
let replacement = make::expr_loop(block_expr.indent(while_indent_level));
|
||||||
|
@ -182,6 +195,74 @@ fn main() {
|
||||||
bar();
|
bar();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn preserve_comments() {
|
||||||
|
check_assist(
|
||||||
|
convert_while_to_loop,
|
||||||
|
r#"
|
||||||
|
fn main() {
|
||||||
|
let mut i = 0;
|
||||||
|
|
||||||
|
$0while i < 5 {
|
||||||
|
// comment 1
|
||||||
|
dbg!(i);
|
||||||
|
// comment 2
|
||||||
|
i += 1;
|
||||||
|
// comment 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
fn main() {
|
||||||
|
let mut i = 0;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
if i >= 5 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
// comment 1
|
||||||
|
dbg!(i);
|
||||||
|
// comment 2
|
||||||
|
i += 1;
|
||||||
|
// comment 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
check_assist(
|
||||||
|
convert_while_to_loop,
|
||||||
|
r#"
|
||||||
|
fn main() {
|
||||||
|
let v = vec![1, 2, 3];
|
||||||
|
let iter = v.iter();
|
||||||
|
|
||||||
|
$0while let Some(i) = iter.next() {
|
||||||
|
// comment 1
|
||||||
|
dbg!(i);
|
||||||
|
// comment 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
fn main() {
|
||||||
|
let v = vec![1, 2, 3];
|
||||||
|
let iter = v.iter();
|
||||||
|
|
||||||
|
loop {
|
||||||
|
if let Some(i) = iter.next() {
|
||||||
|
// comment 1
|
||||||
|
dbg!(i);
|
||||||
|
// comment 2
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,7 +7,7 @@ use ide_db::{
|
||||||
FxHashMap, FxHashSet,
|
FxHashMap, FxHashSet,
|
||||||
};
|
};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use syntax::{ast, ted, AstNode, SmolStr, SyntaxNode, ToSmolStr};
|
use syntax::{ast, ted, AstNode, Edition, SmolStr, SyntaxNode, ToSmolStr};
|
||||||
use text_edit::TextRange;
|
use text_edit::TextRange;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -81,6 +81,7 @@ struct StructEditData {
|
||||||
has_private_members: bool,
|
has_private_members: bool,
|
||||||
is_nested: bool,
|
is_nested: bool,
|
||||||
is_ref: bool,
|
is_ref: bool,
|
||||||
|
edition: Edition,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option<StructEditData> {
|
fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option<StructEditData> {
|
||||||
|
@ -145,6 +146,7 @@ fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option<Str
|
||||||
names_in_scope,
|
names_in_scope,
|
||||||
is_nested,
|
is_nested,
|
||||||
is_ref,
|
is_ref,
|
||||||
|
edition: module.krate().edition(ctx.db()),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -180,7 +182,7 @@ fn build_assignment_edit(
|
||||||
) -> AssignmentEdit {
|
) -> AssignmentEdit {
|
||||||
let ident_pat = builder.make_mut(data.ident_pat.clone());
|
let ident_pat = builder.make_mut(data.ident_pat.clone());
|
||||||
|
|
||||||
let struct_path = mod_path_to_ast(&data.struct_def_path);
|
let struct_path = mod_path_to_ast(&data.struct_def_path, data.edition);
|
||||||
let is_ref = ident_pat.ref_token().is_some();
|
let is_ref = ident_pat.ref_token().is_some();
|
||||||
let is_mut = ident_pat.mut_token().is_some();
|
let is_mut = ident_pat.mut_token().is_some();
|
||||||
|
|
||||||
|
@ -247,7 +249,7 @@ fn generate_field_names(ctx: &AssistContext<'_>, data: &StructEditData) -> Vec<(
|
||||||
.visible_fields
|
.visible_fields
|
||||||
.iter()
|
.iter()
|
||||||
.map(|field| {
|
.map(|field| {
|
||||||
let field_name = field.name(ctx.db()).display_no_db().to_smolstr();
|
let field_name = field.name(ctx.db()).display_no_db(data.edition).to_smolstr();
|
||||||
let new_name = new_field_name(field_name.clone(), &data.names_in_scope);
|
let new_name = new_field_name(field_name.clone(), &data.names_in_scope);
|
||||||
(field_name, new_name)
|
(field_name, new_name)
|
||||||
})
|
})
|
||||||
|
|
|
@ -66,7 +66,9 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext<'_>) ->
|
||||||
|
|
||||||
let names_to_import = find_names_to_import(ctx, refs_in_target, imported_defs);
|
let names_to_import = find_names_to_import(ctx, refs_in_target, imported_defs);
|
||||||
let expanded = make::use_tree_list(names_to_import.iter().map(|n| {
|
let expanded = make::use_tree_list(names_to_import.iter().map(|n| {
|
||||||
let path = make::ext::ident_path(&n.display(ctx.db()).to_string());
|
let path = make::ext::ident_path(
|
||||||
|
&n.display(ctx.db(), current_module.krate().edition(ctx.db())).to_string(),
|
||||||
|
);
|
||||||
make::use_tree(path, None, None, false)
|
make::use_tree(path, None, None, false)
|
||||||
}))
|
}))
|
||||||
.clone_for_update();
|
.clone_for_update();
|
||||||
|
|
|
@ -1,11 +1,7 @@
|
||||||
use crate::{utils, AssistContext, Assists};
|
use crate::{utils, AssistContext, Assists};
|
||||||
use hir::DescendPreference;
|
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
assists::{AssistId, AssistKind},
|
assists::{AssistId, AssistKind},
|
||||||
syntax_helpers::{
|
syntax_helpers::format_string_exprs::{parse_format_exprs, Arg},
|
||||||
format_string::is_format_string,
|
|
||||||
format_string_exprs::{parse_format_exprs, Arg},
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
|
@ -40,13 +36,7 @@ pub(crate) fn extract_expressions_from_format_string(
|
||||||
let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?;
|
let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?;
|
||||||
let tt_delimiter = tt.left_delimiter_token()?.kind();
|
let tt_delimiter = tt.left_delimiter_token()?.kind();
|
||||||
|
|
||||||
let expanded_t = ast::String::cast(
|
let _ = ctx.sema.as_format_args_parts(&fmt_string)?;
|
||||||
ctx.sema
|
|
||||||
.descend_into_macros_single(DescendPreference::SameKind, fmt_string.syntax().clone()),
|
|
||||||
)?;
|
|
||||||
if !is_format_string(&expanded_t) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let (new_fmt, extracted_args) = parse_format_exprs(fmt_string.text()).ok()?;
|
let (new_fmt, extracted_args) = parse_format_exprs(fmt_string.text()).ok()?;
|
||||||
if extracted_args.is_empty() {
|
if extracted_args.is_empty() {
|
||||||
|
|
|
@ -3,8 +3,8 @@ use std::{iter, ops::RangeInclusive};
|
||||||
use ast::make;
|
use ast::make;
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir::{
|
use hir::{
|
||||||
DescendPreference, HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef,
|
HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef, PathResolution, Semantics,
|
||||||
PathResolution, Semantics, TypeInfo, TypeParam,
|
TypeInfo, TypeParam,
|
||||||
};
|
};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
defs::{Definition, NameRefClass},
|
defs::{Definition, NameRefClass},
|
||||||
|
@ -23,7 +23,7 @@ use syntax::{
|
||||||
self, edit::IndentLevel, edit_in_place::Indent, AstNode, AstToken, HasGenericParams,
|
self, edit::IndentLevel, edit_in_place::Indent, AstNode, AstToken, HasGenericParams,
|
||||||
HasName,
|
HasName,
|
||||||
},
|
},
|
||||||
match_ast, ted, SyntaxElement,
|
match_ast, ted, Edition, SyntaxElement,
|
||||||
SyntaxKind::{self, COMMENT},
|
SyntaxKind::{self, COMMENT},
|
||||||
SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, WalkEvent, T,
|
SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, WalkEvent, T,
|
||||||
};
|
};
|
||||||
|
@ -84,7 +84,6 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
|
||||||
};
|
};
|
||||||
|
|
||||||
let body = extraction_target(&node, range)?;
|
let body = extraction_target(&node, range)?;
|
||||||
let (container_info, contains_tail_expr) = body.analyze_container(&ctx.sema)?;
|
|
||||||
|
|
||||||
let (locals_used, self_param) = body.analyze(&ctx.sema);
|
let (locals_used, self_param) = body.analyze(&ctx.sema);
|
||||||
|
|
||||||
|
@ -92,6 +91,9 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
|
||||||
let insert_after = node_to_insert_after(&body, anchor)?;
|
let insert_after = node_to_insert_after(&body, anchor)?;
|
||||||
let semantics_scope = ctx.sema.scope(&insert_after)?;
|
let semantics_scope = ctx.sema.scope(&insert_after)?;
|
||||||
let module = semantics_scope.module();
|
let module = semantics_scope.module();
|
||||||
|
let edition = semantics_scope.krate().edition(ctx.db());
|
||||||
|
|
||||||
|
let (container_info, contains_tail_expr) = body.analyze_container(&ctx.sema, edition)?;
|
||||||
|
|
||||||
let ret_ty = body.return_ty(ctx)?;
|
let ret_ty = body.return_ty(ctx)?;
|
||||||
let control_flow = body.external_control_flow(ctx, &container_info)?;
|
let control_flow = body.external_control_flow(ctx, &container_info)?;
|
||||||
|
@ -217,7 +219,11 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some(mod_path) = mod_path {
|
if let Some(mod_path) = mod_path {
|
||||||
insert_use(&scope, mod_path_to_ast(&mod_path), &ctx.config.insert_use);
|
insert_use(
|
||||||
|
&scope,
|
||||||
|
mod_path_to_ast(&mod_path, edition),
|
||||||
|
&ctx.config.insert_use,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -238,7 +244,13 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
|
||||||
fn make_function_name(semantics_scope: &hir::SemanticsScope<'_>) -> ast::NameRef {
|
fn make_function_name(semantics_scope: &hir::SemanticsScope<'_>) -> ast::NameRef {
|
||||||
let mut names_in_scope = vec![];
|
let mut names_in_scope = vec![];
|
||||||
semantics_scope.process_all_names(&mut |name, _| {
|
semantics_scope.process_all_names(&mut |name, _| {
|
||||||
names_in_scope.push(name.display(semantics_scope.db.upcast()).to_string())
|
names_in_scope.push(
|
||||||
|
name.display(
|
||||||
|
semantics_scope.db.upcast(),
|
||||||
|
semantics_scope.krate().edition(semantics_scope.db),
|
||||||
|
)
|
||||||
|
.to_string(),
|
||||||
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
let default_name = "fun_name";
|
let default_name = "fun_name";
|
||||||
|
@ -366,6 +378,7 @@ struct ContainerInfo {
|
||||||
ret_type: Option<hir::Type>,
|
ret_type: Option<hir::Type>,
|
||||||
generic_param_lists: Vec<ast::GenericParamList>,
|
generic_param_lists: Vec<ast::GenericParamList>,
|
||||||
where_clauses: Vec<ast::WhereClause>,
|
where_clauses: Vec<ast::WhereClause>,
|
||||||
|
edition: Edition,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Control flow that is exported from extracted function
|
/// Control flow that is exported from extracted function
|
||||||
|
@ -489,8 +502,8 @@ impl Param {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_arg(&self, ctx: &AssistContext<'_>) -> ast::Expr {
|
fn to_arg(&self, ctx: &AssistContext<'_>, edition: Edition) -> ast::Expr {
|
||||||
let var = path_expr_from_local(ctx, self.var);
|
let var = path_expr_from_local(ctx, self.var, edition);
|
||||||
match self.kind() {
|
match self.kind() {
|
||||||
ParamKind::Value | ParamKind::MutValue => var,
|
ParamKind::Value | ParamKind::MutValue => var,
|
||||||
ParamKind::SharedRef => make::expr_ref(var, false),
|
ParamKind::SharedRef => make::expr_ref(var, false),
|
||||||
|
@ -498,8 +511,13 @@ impl Param {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_param(&self, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Param {
|
fn to_param(
|
||||||
let var = self.var.name(ctx.db()).display(ctx.db()).to_string();
|
&self,
|
||||||
|
ctx: &AssistContext<'_>,
|
||||||
|
module: hir::Module,
|
||||||
|
edition: Edition,
|
||||||
|
) -> ast::Param {
|
||||||
|
let var = self.var.name(ctx.db()).display(ctx.db(), edition).to_string();
|
||||||
let var_name = make::name(&var);
|
let var_name = make::name(&var);
|
||||||
let pat = match self.kind() {
|
let pat = match self.kind() {
|
||||||
ParamKind::MutValue => make::ident_pat(false, true, var_name),
|
ParamKind::MutValue => make::ident_pat(false, true, var_name),
|
||||||
|
@ -520,7 +538,7 @@ impl Param {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TryKind {
|
impl TryKind {
|
||||||
fn of_ty(ty: hir::Type, ctx: &AssistContext<'_>) -> Option<TryKind> {
|
fn of_ty(ty: hir::Type, ctx: &AssistContext<'_>, edition: Edition) -> Option<TryKind> {
|
||||||
if ty.is_unknown() {
|
if ty.is_unknown() {
|
||||||
// We favour Result for `expr?`
|
// We favour Result for `expr?`
|
||||||
return Some(TryKind::Result { ty });
|
return Some(TryKind::Result { ty });
|
||||||
|
@ -529,7 +547,7 @@ impl TryKind {
|
||||||
let name = adt.name(ctx.db());
|
let name = adt.name(ctx.db());
|
||||||
// FIXME: use lang items to determine if it is std type or user defined
|
// FIXME: use lang items to determine if it is std type or user defined
|
||||||
// E.g. if user happens to define type named `Option`, we would have false positive
|
// E.g. if user happens to define type named `Option`, we would have false positive
|
||||||
let name = &name.display(ctx.db()).to_string();
|
let name = &name.display(ctx.db(), edition).to_string();
|
||||||
match name.as_str() {
|
match name.as_str() {
|
||||||
"Option" => Some(TryKind::Option),
|
"Option" => Some(TryKind::Option),
|
||||||
"Result" => Some(TryKind::Result { ty }),
|
"Result" => Some(TryKind::Result { ty }),
|
||||||
|
@ -816,7 +834,7 @@ impl FunctionBody {
|
||||||
.descendants_with_tokens()
|
.descendants_with_tokens()
|
||||||
.filter_map(SyntaxElement::into_token)
|
.filter_map(SyntaxElement::into_token)
|
||||||
.filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self]))
|
.filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self]))
|
||||||
.flat_map(|t| sema.descend_into_macros(DescendPreference::None, t))
|
.flat_map(|t| sema.descend_into_macros_exact(t))
|
||||||
.for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast)));
|
.for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -828,6 +846,7 @@ impl FunctionBody {
|
||||||
fn analyze_container(
|
fn analyze_container(
|
||||||
&self,
|
&self,
|
||||||
sema: &Semantics<'_, RootDatabase>,
|
sema: &Semantics<'_, RootDatabase>,
|
||||||
|
edition: Edition,
|
||||||
) -> Option<(ContainerInfo, bool)> {
|
) -> Option<(ContainerInfo, bool)> {
|
||||||
let mut ancestors = self.parent()?.ancestors();
|
let mut ancestors = self.parent()?.ancestors();
|
||||||
let infer_expr_opt = |expr| sema.type_of_expr(&expr?).map(TypeInfo::adjusted);
|
let infer_expr_opt = |expr| sema.type_of_expr(&expr?).map(TypeInfo::adjusted);
|
||||||
|
@ -927,6 +946,7 @@ impl FunctionBody {
|
||||||
ret_type: ty,
|
ret_type: ty,
|
||||||
generic_param_lists,
|
generic_param_lists,
|
||||||
where_clauses,
|
where_clauses,
|
||||||
|
edition,
|
||||||
},
|
},
|
||||||
contains_tail_expr,
|
contains_tail_expr,
|
||||||
))
|
))
|
||||||
|
@ -1015,7 +1035,7 @@ impl FunctionBody {
|
||||||
let kind = match (try_expr, ret_expr, break_expr, continue_expr) {
|
let kind = match (try_expr, ret_expr, break_expr, continue_expr) {
|
||||||
(Some(_), _, None, None) => {
|
(Some(_), _, None, None) => {
|
||||||
let ret_ty = container_info.ret_type.clone()?;
|
let ret_ty = container_info.ret_type.clone()?;
|
||||||
let kind = TryKind::of_ty(ret_ty, ctx)?;
|
let kind = TryKind::of_ty(ret_ty, ctx, container_info.edition)?;
|
||||||
|
|
||||||
Some(FlowKind::Try { kind })
|
Some(FlowKind::Try { kind })
|
||||||
}
|
}
|
||||||
|
@ -1397,7 +1417,7 @@ fn fixup_call_site(builder: &mut SourceChangeBuilder, body: &FunctionBody) {
|
||||||
fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> SyntaxNode {
|
fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> SyntaxNode {
|
||||||
let ret_ty = fun.return_type(ctx);
|
let ret_ty = fun.return_type(ctx);
|
||||||
|
|
||||||
let args = make::arg_list(fun.params.iter().map(|param| param.to_arg(ctx)));
|
let args = make::arg_list(fun.params.iter().map(|param| param.to_arg(ctx, fun.mods.edition)));
|
||||||
let name = fun.name.clone();
|
let name = fun.name.clone();
|
||||||
let mut call_expr = if fun.self_param.is_some() {
|
let mut call_expr = if fun.self_param.is_some() {
|
||||||
let self_arg = make::expr_path(make::ext::ident_path("self"));
|
let self_arg = make::expr_path(make::ext::ident_path("self"));
|
||||||
|
@ -1420,13 +1440,13 @@ fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> Sy
|
||||||
[] => None,
|
[] => None,
|
||||||
[var] => {
|
[var] => {
|
||||||
let name = var.local.name(ctx.db());
|
let name = var.local.name(ctx.db());
|
||||||
let name = make::name(&name.display(ctx.db()).to_string());
|
let name = make::name(&name.display(ctx.db(), fun.mods.edition).to_string());
|
||||||
Some(ast::Pat::IdentPat(make::ident_pat(false, var.mut_usage_outside_body, name)))
|
Some(ast::Pat::IdentPat(make::ident_pat(false, var.mut_usage_outside_body, name)))
|
||||||
}
|
}
|
||||||
vars => {
|
vars => {
|
||||||
let binding_pats = vars.iter().map(|var| {
|
let binding_pats = vars.iter().map(|var| {
|
||||||
let name = var.local.name(ctx.db());
|
let name = var.local.name(ctx.db());
|
||||||
let name = make::name(&name.display(ctx.db()).to_string());
|
let name = make::name(&name.display(ctx.db(), fun.mods.edition).to_string());
|
||||||
make::ident_pat(false, var.mut_usage_outside_body, name).into()
|
make::ident_pat(false, var.mut_usage_outside_body, name).into()
|
||||||
});
|
});
|
||||||
Some(ast::Pat::TuplePat(make::tuple_pat(binding_pats)))
|
Some(ast::Pat::TuplePat(make::tuple_pat(binding_pats)))
|
||||||
|
@ -1569,8 +1589,8 @@ impl FlowHandler {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn path_expr_from_local(ctx: &AssistContext<'_>, var: Local) -> ast::Expr {
|
fn path_expr_from_local(ctx: &AssistContext<'_>, var: Local, edition: Edition) -> ast::Expr {
|
||||||
let name = var.name(ctx.db()).display(ctx.db()).to_string();
|
let name = var.name(ctx.db()).display(ctx.db(), edition).to_string();
|
||||||
make::expr_path(make::ext::ident_path(&name))
|
make::expr_path(make::ext::ident_path(&name))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1581,7 +1601,7 @@ fn format_function(
|
||||||
old_indent: IndentLevel,
|
old_indent: IndentLevel,
|
||||||
) -> ast::Fn {
|
) -> ast::Fn {
|
||||||
let fun_name = make::name(&fun.name.text());
|
let fun_name = make::name(&fun.name.text());
|
||||||
let params = fun.make_param_list(ctx, module);
|
let params = fun.make_param_list(ctx, module, fun.mods.edition);
|
||||||
let ret_ty = fun.make_ret_ty(ctx, module);
|
let ret_ty = fun.make_ret_ty(ctx, module);
|
||||||
let body = make_body(ctx, old_indent, fun);
|
let body = make_body(ctx, old_indent, fun);
|
||||||
let (generic_params, where_clause) = make_generic_params_and_where_clause(ctx, fun);
|
let (generic_params, where_clause) = make_generic_params_and_where_clause(ctx, fun);
|
||||||
|
@ -1597,6 +1617,7 @@ fn format_function(
|
||||||
fun.control_flow.is_async,
|
fun.control_flow.is_async,
|
||||||
fun.mods.is_const,
|
fun.mods.is_const,
|
||||||
fun.control_flow.is_unsafe,
|
fun.control_flow.is_unsafe,
|
||||||
|
false,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1707,9 +1728,14 @@ impl Function {
|
||||||
type_params_in_descendant_paths.chain(type_params_in_params).collect()
|
type_params_in_descendant_paths.chain(type_params_in_params).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_param_list(&self, ctx: &AssistContext<'_>, module: hir::Module) -> ast::ParamList {
|
fn make_param_list(
|
||||||
|
&self,
|
||||||
|
ctx: &AssistContext<'_>,
|
||||||
|
module: hir::Module,
|
||||||
|
edition: Edition,
|
||||||
|
) -> ast::ParamList {
|
||||||
let self_param = self.self_param.clone();
|
let self_param = self.self_param.clone();
|
||||||
let params = self.params.iter().map(|param| param.to_param(ctx, module));
|
let params = self.params.iter().map(|param| param.to_param(ctx, module, edition));
|
||||||
make::param_list(self_param, params)
|
make::param_list(self_param, params)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1842,10 +1868,12 @@ fn make_body(ctx: &AssistContext<'_>, old_indent: IndentLevel, fun: &Function) -
|
||||||
None => match fun.outliving_locals.as_slice() {
|
None => match fun.outliving_locals.as_slice() {
|
||||||
[] => {}
|
[] => {}
|
||||||
[var] => {
|
[var] => {
|
||||||
tail_expr = Some(path_expr_from_local(ctx, var.local));
|
tail_expr = Some(path_expr_from_local(ctx, var.local, fun.mods.edition));
|
||||||
}
|
}
|
||||||
vars => {
|
vars => {
|
||||||
let exprs = vars.iter().map(|var| path_expr_from_local(ctx, var.local));
|
let exprs = vars
|
||||||
|
.iter()
|
||||||
|
.map(|var| path_expr_from_local(ctx, var.local, fun.mods.edition));
|
||||||
let expr = make::expr_tuple(exprs);
|
let expr = make::expr_tuple(exprs);
|
||||||
tail_expr = Some(expr);
|
tail_expr = Some(expr);
|
||||||
}
|
}
|
||||||
|
@ -5622,7 +5650,7 @@ fn func<T: Debug>(i: Struct<'_, T>) {
|
||||||
fun_name(i);
|
fun_name(i);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn $0fun_name(i: Struct<T>) {
|
fn $0fun_name(i: Struct<'_, T>) {
|
||||||
foo(i);
|
foo(i);
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use std::iter;
|
use std::iter;
|
||||||
|
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir::{Module, ModuleDef, Name, Variant};
|
use hir::{HasCrate, Module, ModuleDef, Name, Variant};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
defs::Definition,
|
defs::Definition,
|
||||||
helpers::mod_path_to_ast,
|
helpers::mod_path_to_ast,
|
||||||
|
@ -16,7 +16,7 @@ use syntax::{
|
||||||
self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasAttrs, HasGenericParams,
|
self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasAttrs, HasGenericParams,
|
||||||
HasName, HasVisibility,
|
HasName, HasVisibility,
|
||||||
},
|
},
|
||||||
match_ast, ted, SyntaxElement,
|
match_ast, ted, Edition, SyntaxElement,
|
||||||
SyntaxKind::*,
|
SyntaxKind::*,
|
||||||
SyntaxNode, T,
|
SyntaxNode, T,
|
||||||
};
|
};
|
||||||
|
@ -58,6 +58,7 @@ pub(crate) fn extract_struct_from_enum_variant(
|
||||||
"Extract struct from enum variant",
|
"Extract struct from enum variant",
|
||||||
target,
|
target,
|
||||||
|builder| {
|
|builder| {
|
||||||
|
let edition = enum_hir.krate(ctx.db()).edition(ctx.db());
|
||||||
let variant_hir_name = variant_hir.name(ctx.db());
|
let variant_hir_name = variant_hir.name(ctx.db());
|
||||||
let enum_module_def = ModuleDef::from(enum_hir);
|
let enum_module_def = ModuleDef::from(enum_hir);
|
||||||
let usages = Definition::Variant(variant_hir).usages(&ctx.sema).all();
|
let usages = Definition::Variant(variant_hir).usages(&ctx.sema).all();
|
||||||
|
@ -82,7 +83,7 @@ pub(crate) fn extract_struct_from_enum_variant(
|
||||||
references,
|
references,
|
||||||
);
|
);
|
||||||
processed.into_iter().for_each(|(path, node, import)| {
|
processed.into_iter().for_each(|(path, node, import)| {
|
||||||
apply_references(ctx.config.insert_use, path, node, import)
|
apply_references(ctx.config.insert_use, path, node, import, edition)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
builder.edit_file(ctx.file_id());
|
builder.edit_file(ctx.file_id());
|
||||||
|
@ -98,7 +99,7 @@ pub(crate) fn extract_struct_from_enum_variant(
|
||||||
references,
|
references,
|
||||||
);
|
);
|
||||||
processed.into_iter().for_each(|(path, node, import)| {
|
processed.into_iter().for_each(|(path, node, import)| {
|
||||||
apply_references(ctx.config.insert_use, path, node, import)
|
apply_references(ctx.config.insert_use, path, node, import, edition)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -169,7 +170,7 @@ fn existing_definition(db: &RootDatabase, variant_name: &ast::Name, variant: &Va
|
||||||
),
|
),
|
||||||
_ => false,
|
_ => false,
|
||||||
})
|
})
|
||||||
.any(|(name, _)| name.display(db).to_string() == variant_name.to_string())
|
.any(|(name, _)| name.eq_ident(variant_name.text().as_str()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_generic_params(
|
fn extract_generic_params(
|
||||||
|
@ -359,9 +360,10 @@ fn apply_references(
|
||||||
segment: ast::PathSegment,
|
segment: ast::PathSegment,
|
||||||
node: SyntaxNode,
|
node: SyntaxNode,
|
||||||
import: Option<(ImportScope, hir::ModPath)>,
|
import: Option<(ImportScope, hir::ModPath)>,
|
||||||
|
edition: Edition,
|
||||||
) {
|
) {
|
||||||
if let Some((scope, path)) = import {
|
if let Some((scope, path)) = import {
|
||||||
insert_use(&scope, mod_path_to_ast(&path), &insert_use_cfg);
|
insert_use(&scope, mod_path_to_ast(&path, edition), &insert_use_cfg);
|
||||||
}
|
}
|
||||||
// deep clone to prevent cycle
|
// deep clone to prevent cycle
|
||||||
let path = make::path_from_segments(iter::once(segment.clone_subtree()), false);
|
let path = make::path_from_segments(iter::once(segment.clone_subtree()), false);
|
||||||
|
|
|
@ -20,7 +20,7 @@ use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
|
||||||
// ->
|
// ->
|
||||||
// ```
|
// ```
|
||||||
// fn main() {
|
// fn main() {
|
||||||
// let $0var_name = (1 + 2);
|
// let $0var_name = 1 + 2;
|
||||||
// var_name * 4;
|
// var_name * 4;
|
||||||
// }
|
// }
|
||||||
// ```
|
// ```
|
||||||
|
@ -58,9 +58,30 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
|
||||||
}
|
}
|
||||||
|
|
||||||
let parent = to_extract.syntax().parent().and_then(ast::Expr::cast);
|
let parent = to_extract.syntax().parent().and_then(ast::Expr::cast);
|
||||||
let needs_adjust = parent
|
// Any expression that autoderefs may need adjustment.
|
||||||
.as_ref()
|
let mut needs_adjust = parent.as_ref().map_or(false, |it| match it {
|
||||||
.map_or(false, |it| matches!(it, ast::Expr::FieldExpr(_) | ast::Expr::MethodCallExpr(_)));
|
ast::Expr::FieldExpr(_)
|
||||||
|
| ast::Expr::MethodCallExpr(_)
|
||||||
|
| ast::Expr::CallExpr(_)
|
||||||
|
| ast::Expr::AwaitExpr(_) => true,
|
||||||
|
ast::Expr::IndexExpr(index) if index.base().as_ref() == Some(&to_extract) => true,
|
||||||
|
_ => false,
|
||||||
|
});
|
||||||
|
let mut to_extract_no_ref = peel_parens(to_extract.clone());
|
||||||
|
let needs_ref = needs_adjust
|
||||||
|
&& match &to_extract_no_ref {
|
||||||
|
ast::Expr::FieldExpr(_)
|
||||||
|
| ast::Expr::IndexExpr(_)
|
||||||
|
| ast::Expr::MacroExpr(_)
|
||||||
|
| ast::Expr::ParenExpr(_)
|
||||||
|
| ast::Expr::PathExpr(_) => true,
|
||||||
|
ast::Expr::PrefixExpr(prefix) if prefix.op_kind() == Some(ast::UnaryOp::Deref) => {
|
||||||
|
to_extract_no_ref = prefix.expr()?;
|
||||||
|
needs_adjust = false;
|
||||||
|
false
|
||||||
|
}
|
||||||
|
_ => false,
|
||||||
|
};
|
||||||
|
|
||||||
let anchor = Anchor::from(&to_extract)?;
|
let anchor = Anchor::from(&to_extract)?;
|
||||||
let target = to_extract.syntax().text_range();
|
let target = to_extract.syntax().text_range();
|
||||||
|
@ -87,22 +108,28 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
|
||||||
Some(ast::Expr::RefExpr(expr)) if expr.mut_token().is_some() => {
|
Some(ast::Expr::RefExpr(expr)) if expr.mut_token().is_some() => {
|
||||||
make::ident_pat(false, true, make::name(&var_name))
|
make::ident_pat(false, true, make::name(&var_name))
|
||||||
}
|
}
|
||||||
|
_ if needs_adjust
|
||||||
|
&& !needs_ref
|
||||||
|
&& ty.as_ref().is_some_and(|ty| ty.is_mutable_reference()) =>
|
||||||
|
{
|
||||||
|
make::ident_pat(false, true, make::name(&var_name))
|
||||||
|
}
|
||||||
_ => make::ident_pat(false, false, make::name(&var_name)),
|
_ => make::ident_pat(false, false, make::name(&var_name)),
|
||||||
};
|
};
|
||||||
|
|
||||||
let to_extract = match ty.as_ref().filter(|_| needs_adjust) {
|
let to_extract_no_ref = match ty.as_ref().filter(|_| needs_ref) {
|
||||||
Some(receiver_type) if receiver_type.is_mutable_reference() => {
|
Some(receiver_type) if receiver_type.is_mutable_reference() => {
|
||||||
make::expr_ref(to_extract, true)
|
make::expr_ref(to_extract_no_ref, true)
|
||||||
}
|
}
|
||||||
Some(receiver_type) if receiver_type.is_reference() => {
|
Some(receiver_type) if receiver_type.is_reference() => {
|
||||||
make::expr_ref(to_extract, false)
|
make::expr_ref(to_extract_no_ref, false)
|
||||||
}
|
}
|
||||||
_ => to_extract,
|
_ => to_extract_no_ref,
|
||||||
};
|
};
|
||||||
|
|
||||||
let expr_replace = edit.make_syntax_mut(expr_replace);
|
let expr_replace = edit.make_syntax_mut(expr_replace);
|
||||||
let let_stmt =
|
let let_stmt =
|
||||||
make::let_stmt(ident_pat.into(), None, Some(to_extract)).clone_for_update();
|
make::let_stmt(ident_pat.into(), None, Some(to_extract_no_ref)).clone_for_update();
|
||||||
let name_expr = make::expr_path(make::ext::ident_path(&var_name)).clone_for_update();
|
let name_expr = make::expr_path(make::ext::ident_path(&var_name)).clone_for_update();
|
||||||
|
|
||||||
match anchor {
|
match anchor {
|
||||||
|
@ -202,6 +229,14 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn peel_parens(mut expr: ast::Expr) -> ast::Expr {
|
||||||
|
while let ast::Expr::ParenExpr(parens) = &expr {
|
||||||
|
let Some(expr_inside) = parens.expr() else { break };
|
||||||
|
expr = expr_inside;
|
||||||
|
}
|
||||||
|
expr
|
||||||
|
}
|
||||||
|
|
||||||
/// Check whether the node is a valid expression which can be extracted to a variable.
|
/// Check whether the node is a valid expression which can be extracted to a variable.
|
||||||
/// In general that's true for any expression, but in some cases that would produce invalid code.
|
/// In general that's true for any expression, but in some cases that would produce invalid code.
|
||||||
fn valid_target_expr(node: SyntaxNode) -> Option<ast::Expr> {
|
fn valid_target_expr(node: SyntaxNode) -> Option<ast::Expr> {
|
||||||
|
@ -1220,6 +1255,45 @@ fn foo(s: &S) {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_var_index_deref() {
|
||||||
|
check_assist(
|
||||||
|
extract_variable,
|
||||||
|
r#"
|
||||||
|
//- minicore: index
|
||||||
|
struct X;
|
||||||
|
|
||||||
|
impl std::ops::Index<usize> for X {
|
||||||
|
type Output = i32;
|
||||||
|
fn index(&self) -> &Self::Output { 0 }
|
||||||
|
}
|
||||||
|
|
||||||
|
struct S {
|
||||||
|
sub: X
|
||||||
|
}
|
||||||
|
|
||||||
|
fn foo(s: &S) {
|
||||||
|
$0s.sub$0[0];
|
||||||
|
}"#,
|
||||||
|
r#"
|
||||||
|
struct X;
|
||||||
|
|
||||||
|
impl std::ops::Index<usize> for X {
|
||||||
|
type Output = i32;
|
||||||
|
fn index(&self) -> &Self::Output { 0 }
|
||||||
|
}
|
||||||
|
|
||||||
|
struct S {
|
||||||
|
sub: X
|
||||||
|
}
|
||||||
|
|
||||||
|
fn foo(s: &S) {
|
||||||
|
let $0sub = &s.sub;
|
||||||
|
sub[0];
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_extract_var_reference_parameter_deep_nesting() {
|
fn test_extract_var_reference_parameter_deep_nesting() {
|
||||||
check_assist(
|
check_assist(
|
||||||
|
@ -1461,4 +1535,60 @@ fn foo() {
|
||||||
}"#,
|
}"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn generates_no_ref_on_calls() {
|
||||||
|
check_assist(
|
||||||
|
extract_variable,
|
||||||
|
r#"
|
||||||
|
struct S;
|
||||||
|
impl S {
|
||||||
|
fn do_work(&mut self) {}
|
||||||
|
}
|
||||||
|
fn bar() -> S { S }
|
||||||
|
fn foo() {
|
||||||
|
$0bar()$0.do_work();
|
||||||
|
}"#,
|
||||||
|
r#"
|
||||||
|
struct S;
|
||||||
|
impl S {
|
||||||
|
fn do_work(&mut self) {}
|
||||||
|
}
|
||||||
|
fn bar() -> S { S }
|
||||||
|
fn foo() {
|
||||||
|
let mut $0bar = bar();
|
||||||
|
bar.do_work();
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn generates_no_ref_for_deref() {
|
||||||
|
check_assist(
|
||||||
|
extract_variable,
|
||||||
|
r#"
|
||||||
|
struct S;
|
||||||
|
impl S {
|
||||||
|
fn do_work(&mut self) {}
|
||||||
|
}
|
||||||
|
fn bar() -> S { S }
|
||||||
|
fn foo() {
|
||||||
|
let v = &mut &mut bar();
|
||||||
|
$0(**v)$0.do_work();
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
struct S;
|
||||||
|
impl S {
|
||||||
|
fn do_work(&mut self) {}
|
||||||
|
}
|
||||||
|
fn bar() -> S { S }
|
||||||
|
fn foo() {
|
||||||
|
let v = &mut &mut bar();
|
||||||
|
let $0s = *v;
|
||||||
|
s.do_work();
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,8 +45,9 @@ pub(crate) fn fill_record_pattern_fields(acc: &mut Assists, ctx: &AssistContext<
|
||||||
let new_field_list =
|
let new_field_list =
|
||||||
make::record_pat_field_list(old_field_list.fields(), None).clone_for_update();
|
make::record_pat_field_list(old_field_list.fields(), None).clone_for_update();
|
||||||
for (f, _) in missing_fields.iter() {
|
for (f, _) in missing_fields.iter() {
|
||||||
|
let edition = ctx.sema.scope(record_pat.syntax())?.krate().edition(ctx.db());
|
||||||
let field = make::record_pat_field_shorthand(make::name_ref(
|
let field = make::record_pat_field_shorthand(make::name_ref(
|
||||||
&f.name(ctx.sema.db).display_no_db().to_smolstr(),
|
&f.name(ctx.sema.db).display_no_db(edition).to_smolstr(),
|
||||||
));
|
));
|
||||||
new_field_list.add_field(field.clone_for_update());
|
new_field_list.add_field(field.clone_for_update());
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@ use hir::{
|
||||||
use ide_db::FileId;
|
use ide_db::FileId;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, edit_in_place::HasVisibilityEdit, make, HasVisibility as _},
|
ast::{self, edit_in_place::HasVisibilityEdit, make, HasVisibility as _},
|
||||||
AstNode, TextRange, ToSmolStr,
|
AstNode, TextRange,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||||
|
@ -48,7 +48,7 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>)
|
||||||
let (_, def) = module
|
let (_, def) = module
|
||||||
.scope(ctx.db(), None)
|
.scope(ctx.db(), None)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.find(|(name, _)| name.display_no_db().to_smolstr() == name_ref.text().as_str())?;
|
.find(|(name, _)| name.eq_ident(name_ref.text().as_str()))?;
|
||||||
let ScopeDef::ModuleDef(def) = def else {
|
let ScopeDef::ModuleDef(def) = def else {
|
||||||
return None;
|
return None;
|
||||||
};
|
};
|
||||||
|
@ -71,7 +71,10 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>)
|
||||||
let assist_label = match target_name {
|
let assist_label = match target_name {
|
||||||
None => format!("Change visibility to {missing_visibility}"),
|
None => format!("Change visibility to {missing_visibility}"),
|
||||||
Some(name) => {
|
Some(name) => {
|
||||||
format!("Change visibility of {} to {missing_visibility}", name.display(ctx.db()))
|
format!(
|
||||||
|
"Change visibility of {} to {missing_visibility}",
|
||||||
|
name.display(ctx.db(), current_module.krate().edition(ctx.db()))
|
||||||
|
)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -92,6 +95,7 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>
|
||||||
let (record_field_def, _, _) = ctx.sema.resolve_record_field(&record_field)?;
|
let (record_field_def, _, _) = ctx.sema.resolve_record_field(&record_field)?;
|
||||||
|
|
||||||
let current_module = ctx.sema.scope(record_field.syntax())?.module();
|
let current_module = ctx.sema.scope(record_field.syntax())?.module();
|
||||||
|
let current_edition = current_module.krate().edition(ctx.db());
|
||||||
let visibility = record_field_def.visibility(ctx.db());
|
let visibility = record_field_def.visibility(ctx.db());
|
||||||
if visibility.is_visible_from(ctx.db(), current_module.into()) {
|
if visibility.is_visible_from(ctx.db(), current_module.into()) {
|
||||||
return None;
|
return None;
|
||||||
|
@ -123,8 +127,8 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>
|
||||||
let target_name = record_field_def.name(ctx.db());
|
let target_name = record_field_def.name(ctx.db());
|
||||||
let assist_label = format!(
|
let assist_label = format!(
|
||||||
"Change visibility of {}.{} to {missing_visibility}",
|
"Change visibility of {}.{} to {missing_visibility}",
|
||||||
parent_name.display(ctx.db()),
|
parent_name.display(ctx.db(), current_edition),
|
||||||
target_name.display(ctx.db())
|
target_name.display(ctx.db(), current_edition)
|
||||||
);
|
);
|
||||||
|
|
||||||
acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |edit| {
|
acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |edit| {
|
||||||
|
|
|
@ -51,6 +51,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
|
||||||
let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
|
let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
|
||||||
let strukt_name = strukt.name()?;
|
let strukt_name = strukt.name()?;
|
||||||
let current_module = ctx.sema.scope(strukt.syntax())?.module();
|
let current_module = ctx.sema.scope(strukt.syntax())?.module();
|
||||||
|
let current_edition = current_module.krate().edition(ctx.db());
|
||||||
|
|
||||||
let (field_name, field_ty, target) = match ctx.find_node_at_offset::<ast::RecordField>() {
|
let (field_name, field_ty, target) = match ctx.find_node_at_offset::<ast::RecordField>() {
|
||||||
Some(field) => {
|
Some(field) => {
|
||||||
|
@ -89,7 +90,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
|
||||||
methods.sort_by(|(a, _), (b, _)| a.cmp(b));
|
methods.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||||
for (name, method) in methods {
|
for (name, method) in methods {
|
||||||
let adt = ast::Adt::Struct(strukt.clone());
|
let adt = ast::Adt::Struct(strukt.clone());
|
||||||
let name = name.display(ctx.db()).to_string();
|
let name = name.display(ctx.db(), current_edition).to_string();
|
||||||
// if `find_struct_impl` returns None, that means that a function named `name` already exists.
|
// if `find_struct_impl` returns None, that means that a function named `name` already exists.
|
||||||
let Some(impl_def) = find_struct_impl(ctx, &adt, std::slice::from_ref(&name)) else {
|
let Some(impl_def) = find_struct_impl(ctx, &adt, std::slice::from_ref(&name)) else {
|
||||||
continue;
|
continue;
|
||||||
|
@ -121,6 +122,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
|
||||||
let is_async = method_source.async_token().is_some();
|
let is_async = method_source.async_token().is_some();
|
||||||
let is_const = method_source.const_token().is_some();
|
let is_const = method_source.const_token().is_some();
|
||||||
let is_unsafe = method_source.unsafe_token().is_some();
|
let is_unsafe = method_source.unsafe_token().is_some();
|
||||||
|
let is_gen = method_source.gen_token().is_some();
|
||||||
|
|
||||||
let fn_name = make::name(&name);
|
let fn_name = make::name(&name);
|
||||||
|
|
||||||
|
@ -153,6 +155,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
|
||||||
is_async,
|
is_async,
|
||||||
is_const,
|
is_const,
|
||||||
is_unsafe,
|
is_unsafe,
|
||||||
|
is_gen,
|
||||||
)
|
)
|
||||||
.clone_for_update();
|
.clone_for_update();
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ use syntax::{
|
||||||
WherePred,
|
WherePred,
|
||||||
},
|
},
|
||||||
ted::{self, Position},
|
ted::{self, Position},
|
||||||
AstNode, NodeOrToken, SmolStr, SyntaxKind, ToSmolStr,
|
AstNode, Edition, NodeOrToken, SmolStr, SyntaxKind, ToSmolStr,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Assist: generate_delegate_trait
|
// Assist: generate_delegate_trait
|
||||||
|
@ -109,6 +109,7 @@ struct Field {
|
||||||
ty: ast::Type,
|
ty: ast::Type,
|
||||||
range: syntax::TextRange,
|
range: syntax::TextRange,
|
||||||
impls: Vec<Delegee>,
|
impls: Vec<Delegee>,
|
||||||
|
edition: Edition,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Field {
|
impl Field {
|
||||||
|
@ -119,6 +120,7 @@ impl Field {
|
||||||
let db = ctx.sema.db;
|
let db = ctx.sema.db;
|
||||||
|
|
||||||
let module = ctx.sema.file_to_module_def(ctx.file_id())?;
|
let module = ctx.sema.file_to_module_def(ctx.file_id())?;
|
||||||
|
let edition = module.krate().edition(ctx.db());
|
||||||
|
|
||||||
let (name, range, ty) = match f {
|
let (name, range, ty) = match f {
|
||||||
Either::Left(f) => {
|
Either::Left(f) => {
|
||||||
|
@ -147,7 +149,7 @@ impl Field {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(Field { name, ty, range, impls })
|
Some(Field { name, ty, range, impls, edition })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -163,18 +165,18 @@ enum Delegee {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Delegee {
|
impl Delegee {
|
||||||
fn signature(&self, db: &dyn HirDatabase) -> String {
|
fn signature(&self, db: &dyn HirDatabase, edition: Edition) -> String {
|
||||||
let mut s = String::new();
|
let mut s = String::new();
|
||||||
|
|
||||||
let (Delegee::Bound(it) | Delegee::Impls(it, _)) = self;
|
let (Delegee::Bound(it) | Delegee::Impls(it, _)) = self;
|
||||||
|
|
||||||
for m in it.module(db).path_to_root(db).iter().rev() {
|
for m in it.module(db).path_to_root(db).iter().rev() {
|
||||||
if let Some(name) = m.name(db) {
|
if let Some(name) = m.name(db) {
|
||||||
s.push_str(&format!("{}::", name.display_no_db().to_smolstr()));
|
s.push_str(&format!("{}::", name.display_no_db(edition).to_smolstr()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
s.push_str(&it.name(db).display_no_db().to_smolstr());
|
s.push_str(&it.name(db).display_no_db(edition).to_smolstr());
|
||||||
s
|
s
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -212,9 +214,11 @@ impl Struct {
|
||||||
// if self.hir_ty.impls_trait(db, trait_, &[]) {
|
// if self.hir_ty.impls_trait(db, trait_, &[]) {
|
||||||
// continue;
|
// continue;
|
||||||
// }
|
// }
|
||||||
let signature = delegee.signature(db);
|
let signature = delegee.signature(db, field.edition);
|
||||||
|
|
||||||
let Some(delegate) = generate_impl(ctx, self, &field.ty, &field.name, delegee) else {
|
let Some(delegate) =
|
||||||
|
generate_impl(ctx, self, &field.ty, &field.name, delegee, field.edition)
|
||||||
|
else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -240,6 +244,7 @@ fn generate_impl(
|
||||||
field_ty: &ast::Type,
|
field_ty: &ast::Type,
|
||||||
field_name: &str,
|
field_name: &str,
|
||||||
delegee: &Delegee,
|
delegee: &Delegee,
|
||||||
|
edition: Edition,
|
||||||
) -> Option<ast::Impl> {
|
) -> Option<ast::Impl> {
|
||||||
let delegate: ast::Impl;
|
let delegate: ast::Impl;
|
||||||
let db = ctx.db();
|
let db = ctx.db();
|
||||||
|
@ -259,7 +264,7 @@ fn generate_impl(
|
||||||
strukt_params.clone(),
|
strukt_params.clone(),
|
||||||
strukt_params.map(|params| params.to_generic_args()),
|
strukt_params.map(|params| params.to_generic_args()),
|
||||||
delegee.is_auto(db),
|
delegee.is_auto(db),
|
||||||
make::ty(&delegee.name(db).display_no_db().to_smolstr()),
|
make::ty(&delegee.name(db).display_no_db(edition).to_smolstr()),
|
||||||
strukt_ty,
|
strukt_ty,
|
||||||
bound_def.where_clause(),
|
bound_def.where_clause(),
|
||||||
ast_strukt.where_clause(),
|
ast_strukt.where_clause(),
|
||||||
|
@ -350,7 +355,7 @@ fn generate_impl(
|
||||||
let type_gen_args = strukt_params.clone().map(|params| params.to_generic_args());
|
let type_gen_args = strukt_params.clone().map(|params| params.to_generic_args());
|
||||||
|
|
||||||
let path_type =
|
let path_type =
|
||||||
make::ty(&trait_.name(db).display_no_db().to_smolstr()).clone_for_update();
|
make::ty(&trait_.name(db).display_no_db(edition).to_smolstr()).clone_for_update();
|
||||||
transform_impl(ctx, ast_strukt, &old_impl, &transform_args, path_type.syntax())?;
|
transform_impl(ctx, ast_strukt, &old_impl, &transform_args, path_type.syntax())?;
|
||||||
|
|
||||||
// 3) Generate delegate trait impl
|
// 3) Generate delegate trait impl
|
||||||
|
@ -735,6 +740,7 @@ fn func_assoc_item(
|
||||||
item.async_token().is_some(),
|
item.async_token().is_some(),
|
||||||
item.const_token().is_some(),
|
item.const_token().is_some(),
|
||||||
item.unsafe_token().is_some(),
|
item.unsafe_token().is_some(),
|
||||||
|
item.gen_token().is_some(),
|
||||||
)
|
)
|
||||||
.clone_for_update();
|
.clone_for_update();
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ use hir::{ModPath, ModuleDef};
|
||||||
use ide_db::{famous_defs::FamousDefs, RootDatabase};
|
use ide_db::{famous_defs::FamousDefs, RootDatabase};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasName},
|
ast::{self, HasName},
|
||||||
AstNode, SyntaxNode,
|
AstNode, Edition, SyntaxNode,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -77,6 +77,7 @@ fn generate_record_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
|
||||||
field_name.syntax(),
|
field_name.syntax(),
|
||||||
deref_type_to_generate,
|
deref_type_to_generate,
|
||||||
trait_path,
|
trait_path,
|
||||||
|
module.krate().edition(ctx.db()),
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -117,6 +118,7 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()
|
||||||
field_list_index,
|
field_list_index,
|
||||||
deref_type_to_generate,
|
deref_type_to_generate,
|
||||||
trait_path,
|
trait_path,
|
||||||
|
module.krate().edition(ctx.db()),
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -130,6 +132,7 @@ fn generate_edit(
|
||||||
field_name: impl Display,
|
field_name: impl Display,
|
||||||
deref_type: DerefType,
|
deref_type: DerefType,
|
||||||
trait_path: ModPath,
|
trait_path: ModPath,
|
||||||
|
edition: Edition,
|
||||||
) {
|
) {
|
||||||
let start_offset = strukt.syntax().text_range().end();
|
let start_offset = strukt.syntax().text_range().end();
|
||||||
let impl_code = match deref_type {
|
let impl_code = match deref_type {
|
||||||
|
@ -147,8 +150,11 @@ fn generate_edit(
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
let strukt_adt = ast::Adt::Struct(strukt);
|
let strukt_adt = ast::Adt::Struct(strukt);
|
||||||
let deref_impl =
|
let deref_impl = generate_trait_impl_text(
|
||||||
generate_trait_impl_text(&strukt_adt, &trait_path.display(db).to_string(), &impl_code);
|
&strukt_adt,
|
||||||
|
&trait_path.display(db, edition).to_string(),
|
||||||
|
&impl_code,
|
||||||
|
);
|
||||||
edit.insert(start_offset, deref_impl);
|
edit.insert(start_offset, deref_impl);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@ use stdx::{format_to, to_lower_snake_case};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
algo::skip_whitespace_token,
|
algo::skip_whitespace_token,
|
||||||
ast::{self, edit::IndentLevel, HasDocComments, HasGenericArgs, HasName},
|
ast::{self, edit::IndentLevel, HasDocComments, HasGenericArgs, HasName},
|
||||||
match_ast, AstNode, AstToken,
|
match_ast, AstNode, AstToken, Edition,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::assist_context::{AssistContext, Assists};
|
use crate::assist_context::{AssistContext, Assists};
|
||||||
|
@ -139,7 +139,8 @@ fn make_example_for_fn(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<St
|
||||||
|
|
||||||
let mut example = String::new();
|
let mut example = String::new();
|
||||||
|
|
||||||
let use_path = build_path(ast_func, ctx)?;
|
let edition = ctx.sema.scope(ast_func.syntax())?.krate().edition(ctx.db());
|
||||||
|
let use_path = build_path(ast_func, ctx, edition)?;
|
||||||
let is_unsafe = ast_func.unsafe_token().is_some();
|
let is_unsafe = ast_func.unsafe_token().is_some();
|
||||||
let param_list = ast_func.param_list()?;
|
let param_list = ast_func.param_list()?;
|
||||||
let ref_mut_params = ref_mut_params(¶m_list);
|
let ref_mut_params = ref_mut_params(¶m_list);
|
||||||
|
@ -472,13 +473,13 @@ fn string_vec_from(string_array: &[&str]) -> Vec<String> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Helper function to build the path of the module in the which is the node
|
/// Helper function to build the path of the module in the which is the node
|
||||||
fn build_path(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<String> {
|
fn build_path(ast_func: &ast::Fn, ctx: &AssistContext<'_>, edition: Edition) -> Option<String> {
|
||||||
let crate_name = crate_name(ast_func, ctx)?;
|
let crate_name = crate_name(ast_func, ctx)?;
|
||||||
let leaf = self_partial_type(ast_func)
|
let leaf = self_partial_type(ast_func)
|
||||||
.or_else(|| ast_func.name().map(|n| n.to_string()))
|
.or_else(|| ast_func.name().map(|n| n.to_string()))
|
||||||
.unwrap_or_else(|| "*".into());
|
.unwrap_or_else(|| "*".into());
|
||||||
let module_def: ModuleDef = ctx.sema.to_def(ast_func)?.module(ctx.db()).into();
|
let module_def: ModuleDef = ctx.sema.to_def(ast_func)?.module(ctx.db()).into();
|
||||||
match module_def.canonical_path(ctx.db()) {
|
match module_def.canonical_path(ctx.db(), edition) {
|
||||||
Some(path) => Some(format!("{crate_name}::{path}::{leaf}")),
|
Some(path) => Some(format!("{crate_name}::{path}::{leaf}")),
|
||||||
None => Some(format!("{crate_name}::{leaf}")),
|
None => Some(format!("{crate_name}::{leaf}")),
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,7 @@ use syntax::{
|
||||||
self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, BlockExpr, CallExpr,
|
self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, BlockExpr, CallExpr,
|
||||||
HasArgList, HasGenericParams, HasModuleItem, HasTypeBounds,
|
HasArgList, HasGenericParams, HasModuleItem, HasTypeBounds,
|
||||||
},
|
},
|
||||||
ted, SyntaxKind, SyntaxNode, TextRange, T,
|
ted, Edition, SyntaxKind, SyntaxNode, TextRange, T,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -175,6 +175,7 @@ fn add_func_to_accumulator(
|
||||||
edit.edit_file(file);
|
edit.edit_file(file);
|
||||||
|
|
||||||
let target = function_builder.target.clone();
|
let target = function_builder.target.clone();
|
||||||
|
let edition = function_builder.target_edition;
|
||||||
let func = function_builder.render(ctx.config.snippet_cap, edit);
|
let func = function_builder.render(ctx.config.snippet_cap, edit);
|
||||||
|
|
||||||
if let Some(adt) =
|
if let Some(adt) =
|
||||||
|
@ -183,7 +184,7 @@ fn add_func_to_accumulator(
|
||||||
{
|
{
|
||||||
let name = make::ty_path(make::ext::ident_path(&format!(
|
let name = make::ty_path(make::ext::ident_path(&format!(
|
||||||
"{}",
|
"{}",
|
||||||
adt.name(ctx.db()).display(ctx.db())
|
adt.name(ctx.db()).display(ctx.db(), edition)
|
||||||
)));
|
)));
|
||||||
|
|
||||||
// FIXME: adt may have generic params.
|
// FIXME: adt may have generic params.
|
||||||
|
@ -222,6 +223,7 @@ struct FunctionBuilder {
|
||||||
should_focus_return_type: bool,
|
should_focus_return_type: bool,
|
||||||
visibility: Visibility,
|
visibility: Visibility,
|
||||||
is_async: bool,
|
is_async: bool,
|
||||||
|
target_edition: Edition,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FunctionBuilder {
|
impl FunctionBuilder {
|
||||||
|
@ -237,6 +239,7 @@ impl FunctionBuilder {
|
||||||
) -> Option<Self> {
|
) -> Option<Self> {
|
||||||
let target_module =
|
let target_module =
|
||||||
target_module.or_else(|| ctx.sema.scope(target.syntax()).map(|it| it.module()))?;
|
target_module.or_else(|| ctx.sema.scope(target.syntax()).map(|it| it.module()))?;
|
||||||
|
let target_edition = target_module.krate().edition(ctx.db());
|
||||||
|
|
||||||
let current_module = ctx.sema.scope(call.syntax())?.module();
|
let current_module = ctx.sema.scope(call.syntax())?.module();
|
||||||
let visibility = calculate_necessary_visibility(current_module, target_module, ctx);
|
let visibility = calculate_necessary_visibility(current_module, target_module, ctx);
|
||||||
|
@ -258,7 +261,9 @@ impl FunctionBuilder {
|
||||||
|
|
||||||
// If generated function has the name "new" and is an associated function, we generate fn body
|
// If generated function has the name "new" and is an associated function, we generate fn body
|
||||||
// as a constructor and assume a "Self" return type.
|
// as a constructor and assume a "Self" return type.
|
||||||
if let Some(body) = make_fn_body_as_new_function(ctx, &fn_name.text(), adt_info) {
|
if let Some(body) =
|
||||||
|
make_fn_body_as_new_function(ctx, &fn_name.text(), adt_info, target_edition)
|
||||||
|
{
|
||||||
ret_type = Some(make::ret_type(make::ty_path(make::ext::ident_path("Self"))));
|
ret_type = Some(make::ret_type(make::ty_path(make::ext::ident_path("Self"))));
|
||||||
should_focus_return_type = false;
|
should_focus_return_type = false;
|
||||||
fn_body = body;
|
fn_body = body;
|
||||||
|
@ -288,6 +293,7 @@ impl FunctionBuilder {
|
||||||
should_focus_return_type,
|
should_focus_return_type,
|
||||||
visibility,
|
visibility,
|
||||||
is_async,
|
is_async,
|
||||||
|
target_edition,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -299,6 +305,8 @@ impl FunctionBuilder {
|
||||||
target_module: Module,
|
target_module: Module,
|
||||||
target: GeneratedFunctionTarget,
|
target: GeneratedFunctionTarget,
|
||||||
) -> Option<Self> {
|
) -> Option<Self> {
|
||||||
|
let target_edition = target_module.krate().edition(ctx.db());
|
||||||
|
|
||||||
let current_module = ctx.sema.scope(call.syntax())?.module();
|
let current_module = ctx.sema.scope(call.syntax())?.module();
|
||||||
let visibility = calculate_necessary_visibility(current_module, target_module, ctx);
|
let visibility = calculate_necessary_visibility(current_module, target_module, ctx);
|
||||||
|
|
||||||
|
@ -336,6 +344,7 @@ impl FunctionBuilder {
|
||||||
should_focus_return_type,
|
should_focus_return_type,
|
||||||
visibility,
|
visibility,
|
||||||
is_async,
|
is_async,
|
||||||
|
target_edition,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -356,6 +365,7 @@ impl FunctionBuilder {
|
||||||
self.is_async,
|
self.is_async,
|
||||||
false, // FIXME : const and unsafe are not handled yet.
|
false, // FIXME : const and unsafe are not handled yet.
|
||||||
false,
|
false,
|
||||||
|
false,
|
||||||
)
|
)
|
||||||
.clone_for_update();
|
.clone_for_update();
|
||||||
|
|
||||||
|
@ -425,6 +435,7 @@ fn make_fn_body_as_new_function(
|
||||||
ctx: &AssistContext<'_>,
|
ctx: &AssistContext<'_>,
|
||||||
fn_name: &str,
|
fn_name: &str,
|
||||||
adt_info: &Option<AdtInfo>,
|
adt_info: &Option<AdtInfo>,
|
||||||
|
edition: Edition,
|
||||||
) -> Option<ast::BlockExpr> {
|
) -> Option<ast::BlockExpr> {
|
||||||
if fn_name != "new" {
|
if fn_name != "new" {
|
||||||
return None;
|
return None;
|
||||||
|
@ -441,7 +452,10 @@ fn make_fn_body_as_new_function(
|
||||||
.iter()
|
.iter()
|
||||||
.map(|field| {
|
.map(|field| {
|
||||||
make::record_expr_field(
|
make::record_expr_field(
|
||||||
make::name_ref(&format!("{}", field.name(ctx.db()).display(ctx.db()))),
|
make::name_ref(&format!(
|
||||||
|
"{}",
|
||||||
|
field.name(ctx.db()).display(ctx.db(), edition)
|
||||||
|
)),
|
||||||
Some(placeholder_expr.clone()),
|
Some(placeholder_expr.clone()),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -1102,8 +1116,9 @@ fn fn_arg_type(
|
||||||
|
|
||||||
if ty.is_reference() || ty.is_mutable_reference() {
|
if ty.is_reference() || ty.is_mutable_reference() {
|
||||||
let famous_defs = &FamousDefs(&ctx.sema, ctx.sema.scope(fn_arg.syntax())?.krate());
|
let famous_defs = &FamousDefs(&ctx.sema, ctx.sema.scope(fn_arg.syntax())?.krate());
|
||||||
|
let target_edition = target_module.krate().edition(ctx.db());
|
||||||
convert_reference_type(ty.strip_references(), ctx.db(), famous_defs)
|
convert_reference_type(ty.strip_references(), ctx.db(), famous_defs)
|
||||||
.map(|conversion| conversion.convert_type(ctx.db()).to_string())
|
.map(|conversion| conversion.convert_type(ctx.db(), target_edition).to_string())
|
||||||
.or_else(|| ty.display_source_code(ctx.db(), target_module.into(), true).ok())
|
.or_else(|| ty.display_source_code(ctx.db(), target_module.into(), true).ok())
|
||||||
} else {
|
} else {
|
||||||
ty.display_source_code(ctx.db(), target_module.into(), true).ok()
|
ty.display_source_code(ctx.db(), target_module.into(), true).ok()
|
||||||
|
|
|
@ -233,7 +233,7 @@ fn generate_getter_from_info(
|
||||||
.map(|conversion| {
|
.map(|conversion| {
|
||||||
cov_mark::hit!(convert_reference_type);
|
cov_mark::hit!(convert_reference_type);
|
||||||
(
|
(
|
||||||
conversion.convert_type(ctx.db()),
|
conversion.convert_type(ctx.db(), krate.edition(ctx.db())),
|
||||||
conversion.getter(record_field_info.field_name.to_string()),
|
conversion.getter(record_field_info.field_name.to_string()),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -261,7 +261,19 @@ fn generate_getter_from_info(
|
||||||
let ret_type = Some(make::ret_type(ty));
|
let ret_type = Some(make::ret_type(ty));
|
||||||
let body = make::block_expr([], Some(body));
|
let body = make::block_expr([], Some(body));
|
||||||
|
|
||||||
make::fn_(strukt.visibility(), fn_name, None, None, params, body, ret_type, false, false, false)
|
make::fn_(
|
||||||
|
strukt.visibility(),
|
||||||
|
fn_name,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
params,
|
||||||
|
body,
|
||||||
|
ret_type,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_setter_from_info(info: &AssistInfo, record_field_info: &RecordFieldInfo) -> ast::Fn {
|
fn generate_setter_from_info(info: &AssistInfo, record_field_info: &RecordFieldInfo) -> ast::Fn {
|
||||||
|
@ -285,7 +297,19 @@ fn generate_setter_from_info(info: &AssistInfo, record_field_info: &RecordFieldI
|
||||||
let body = make::block_expr([assign_stmt.into()], None);
|
let body = make::block_expr([assign_stmt.into()], None);
|
||||||
|
|
||||||
// Make the setter fn
|
// Make the setter fn
|
||||||
make::fn_(strukt.visibility(), fn_name, None, None, params, body, None, false, false, false)
|
make::fn_(
|
||||||
|
strukt.visibility(),
|
||||||
|
fn_name,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
params,
|
||||||
|
body,
|
||||||
|
None,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_and_parse(
|
fn extract_and_parse(
|
||||||
|
|
|
@ -64,10 +64,13 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
|
||||||
ctx.config.import_path_config(),
|
ctx.config.import_path_config(),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
let edition = current_module.krate().edition(ctx.db());
|
||||||
|
|
||||||
let expr = use_trivial_constructor(
|
let expr = use_trivial_constructor(
|
||||||
ctx.sema.db,
|
ctx.sema.db,
|
||||||
ide_db::helpers::mod_path_to_ast(&type_path),
|
ide_db::helpers::mod_path_to_ast(&type_path, edition),
|
||||||
&ty,
|
&ty,
|
||||||
|
edition,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
Some(make::record_expr_field(make::name_ref(&name.text()), Some(expr)))
|
Some(make::record_expr_field(make::name_ref(&name.text()), Some(expr)))
|
||||||
|
@ -112,6 +115,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
|
false,
|
||||||
)
|
)
|
||||||
.clone_for_update();
|
.clone_for_update();
|
||||||
fn_.indent(1.into());
|
fn_.indent(1.into());
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue