mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-10 12:18:49 +00:00
⬆️ rust-analyzer
This commit is contained in:
commit
e92557b76e
217 changed files with 12639 additions and 3059 deletions
2
Cargo.lock
generated
2
Cargo.lock
generated
|
@ -572,6 +572,7 @@ dependencies = [
|
||||||
"chalk-recursive",
|
"chalk-recursive",
|
||||||
"chalk-solve",
|
"chalk-solve",
|
||||||
"cov-mark",
|
"cov-mark",
|
||||||
|
"either",
|
||||||
"ena",
|
"ena",
|
||||||
"expect-test",
|
"expect-test",
|
||||||
"hir-def",
|
"hir-def",
|
||||||
|
@ -1714,6 +1715,7 @@ name = "syntax"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cov-mark",
|
"cov-mark",
|
||||||
|
"either",
|
||||||
"expect-test",
|
"expect-test",
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"itertools",
|
"itertools",
|
||||||
|
|
|
@ -76,7 +76,7 @@ impl fmt::Display for FlycheckConfig {
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct FlycheckHandle {
|
pub struct FlycheckHandle {
|
||||||
// XXX: drop order is significant
|
// XXX: drop order is significant
|
||||||
sender: Sender<Restart>,
|
sender: Sender<StateChange>,
|
||||||
_thread: jod_thread::JoinHandle,
|
_thread: jod_thread::JoinHandle,
|
||||||
id: usize,
|
id: usize,
|
||||||
}
|
}
|
||||||
|
@ -89,7 +89,7 @@ impl FlycheckHandle {
|
||||||
workspace_root: AbsPathBuf,
|
workspace_root: AbsPathBuf,
|
||||||
) -> FlycheckHandle {
|
) -> FlycheckHandle {
|
||||||
let actor = FlycheckActor::new(id, sender, config, workspace_root);
|
let actor = FlycheckActor::new(id, sender, config, workspace_root);
|
||||||
let (sender, receiver) = unbounded::<Restart>();
|
let (sender, receiver) = unbounded::<StateChange>();
|
||||||
let thread = jod_thread::Builder::new()
|
let thread = jod_thread::Builder::new()
|
||||||
.name("Flycheck".to_owned())
|
.name("Flycheck".to_owned())
|
||||||
.spawn(move || actor.run(receiver))
|
.spawn(move || actor.run(receiver))
|
||||||
|
@ -99,12 +99,12 @@ impl FlycheckHandle {
|
||||||
|
|
||||||
/// Schedule a re-start of the cargo check worker.
|
/// Schedule a re-start of the cargo check worker.
|
||||||
pub fn restart(&self) {
|
pub fn restart(&self) {
|
||||||
self.sender.send(Restart::Yes).unwrap();
|
self.sender.send(StateChange::Restart).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Stop this cargo check worker.
|
/// Stop this cargo check worker.
|
||||||
pub fn cancel(&self) {
|
pub fn cancel(&self) {
|
||||||
self.sender.send(Restart::No).unwrap();
|
self.sender.send(StateChange::Cancel).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn id(&self) -> usize {
|
pub fn id(&self) -> usize {
|
||||||
|
@ -149,9 +149,9 @@ pub enum Progress {
|
||||||
DidFailToRestart(String),
|
DidFailToRestart(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
enum Restart {
|
enum StateChange {
|
||||||
Yes,
|
Restart,
|
||||||
No,
|
Cancel,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A [`FlycheckActor`] is a single check instance of a workspace.
|
/// A [`FlycheckActor`] is a single check instance of a workspace.
|
||||||
|
@ -172,7 +172,7 @@ struct FlycheckActor {
|
||||||
}
|
}
|
||||||
|
|
||||||
enum Event {
|
enum Event {
|
||||||
Restart(Restart),
|
RequestStateChange(StateChange),
|
||||||
CheckEvent(Option<CargoMessage>),
|
CheckEvent(Option<CargoMessage>),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -191,30 +191,31 @@ impl FlycheckActor {
|
||||||
self.send(Message::Progress { id: self.id, progress });
|
self.send(Message::Progress { id: self.id, progress });
|
||||||
}
|
}
|
||||||
|
|
||||||
fn next_event(&self, inbox: &Receiver<Restart>) -> Option<Event> {
|
fn next_event(&self, inbox: &Receiver<StateChange>) -> Option<Event> {
|
||||||
let check_chan = self.cargo_handle.as_ref().map(|cargo| &cargo.receiver);
|
let check_chan = self.cargo_handle.as_ref().map(|cargo| &cargo.receiver);
|
||||||
if let Ok(msg) = inbox.try_recv() {
|
if let Ok(msg) = inbox.try_recv() {
|
||||||
// give restarts a preference so check outputs don't block a restart or stop
|
// give restarts a preference so check outputs don't block a restart or stop
|
||||||
return Some(Event::Restart(msg));
|
return Some(Event::RequestStateChange(msg));
|
||||||
}
|
}
|
||||||
select! {
|
select! {
|
||||||
recv(inbox) -> msg => msg.ok().map(Event::Restart),
|
recv(inbox) -> msg => msg.ok().map(Event::RequestStateChange),
|
||||||
recv(check_chan.unwrap_or(&never())) -> msg => Some(Event::CheckEvent(msg.ok())),
|
recv(check_chan.unwrap_or(&never())) -> msg => Some(Event::CheckEvent(msg.ok())),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run(mut self, inbox: Receiver<Restart>) {
|
fn run(mut self, inbox: Receiver<StateChange>) {
|
||||||
'event: while let Some(event) = self.next_event(&inbox) {
|
'event: while let Some(event) = self.next_event(&inbox) {
|
||||||
match event {
|
match event {
|
||||||
Event::Restart(Restart::No) => {
|
Event::RequestStateChange(StateChange::Cancel) => {
|
||||||
|
tracing::debug!(flycheck_id = self.id, "flycheck cancelled");
|
||||||
self.cancel_check_process();
|
self.cancel_check_process();
|
||||||
}
|
}
|
||||||
Event::Restart(Restart::Yes) => {
|
Event::RequestStateChange(StateChange::Restart) => {
|
||||||
// Cancel the previously spawned process
|
// Cancel the previously spawned process
|
||||||
self.cancel_check_process();
|
self.cancel_check_process();
|
||||||
while let Ok(restart) = inbox.recv_timeout(Duration::from_millis(50)) {
|
while let Ok(restart) = inbox.recv_timeout(Duration::from_millis(50)) {
|
||||||
// restart chained with a stop, so just cancel
|
// restart chained with a stop, so just cancel
|
||||||
if let Restart::No = restart {
|
if let StateChange::Cancel = restart {
|
||||||
continue 'event;
|
continue 'event;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -255,10 +256,20 @@ impl FlycheckActor {
|
||||||
}
|
}
|
||||||
Event::CheckEvent(Some(message)) => match message {
|
Event::CheckEvent(Some(message)) => match message {
|
||||||
CargoMessage::CompilerArtifact(msg) => {
|
CargoMessage::CompilerArtifact(msg) => {
|
||||||
|
tracing::trace!(
|
||||||
|
flycheck_id = self.id,
|
||||||
|
artifact = msg.target.name,
|
||||||
|
"artifact received"
|
||||||
|
);
|
||||||
self.report_progress(Progress::DidCheckCrate(msg.target.name));
|
self.report_progress(Progress::DidCheckCrate(msg.target.name));
|
||||||
}
|
}
|
||||||
|
|
||||||
CargoMessage::Diagnostic(msg) => {
|
CargoMessage::Diagnostic(msg) => {
|
||||||
|
tracing::trace!(
|
||||||
|
flycheck_id = self.id,
|
||||||
|
message = msg.message,
|
||||||
|
"diagnostic received"
|
||||||
|
);
|
||||||
self.send(Message::AddDiagnostic {
|
self.send(Message::AddDiagnostic {
|
||||||
id: self.id,
|
id: self.id,
|
||||||
workspace_root: self.root.clone(),
|
workspace_root: self.root.clone(),
|
||||||
|
@ -445,42 +456,56 @@ impl CargoActor {
|
||||||
// simply skip a line if it doesn't parse, which just ignores any
|
// simply skip a line if it doesn't parse, which just ignores any
|
||||||
// erroneous output.
|
// erroneous output.
|
||||||
|
|
||||||
let mut error = String::new();
|
let mut stdout_errors = String::new();
|
||||||
let mut read_at_least_one_message = false;
|
let mut stderr_errors = String::new();
|
||||||
|
let mut read_at_least_one_stdout_message = false;
|
||||||
|
let mut read_at_least_one_stderr_message = false;
|
||||||
|
let process_line = |line: &str, error: &mut String| {
|
||||||
|
// Try to deserialize a message from Cargo or Rustc.
|
||||||
|
let mut deserializer = serde_json::Deserializer::from_str(line);
|
||||||
|
deserializer.disable_recursion_limit();
|
||||||
|
if let Ok(message) = JsonMessage::deserialize(&mut deserializer) {
|
||||||
|
match message {
|
||||||
|
// Skip certain kinds of messages to only spend time on what's useful
|
||||||
|
JsonMessage::Cargo(message) => match message {
|
||||||
|
cargo_metadata::Message::CompilerArtifact(artifact) if !artifact.fresh => {
|
||||||
|
self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap();
|
||||||
|
}
|
||||||
|
cargo_metadata::Message::CompilerMessage(msg) => {
|
||||||
|
self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap();
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
},
|
||||||
|
JsonMessage::Rustc(message) => {
|
||||||
|
self.sender.send(CargoMessage::Diagnostic(message)).unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
error.push_str(line);
|
||||||
|
error.push('\n');
|
||||||
|
return false;
|
||||||
|
};
|
||||||
let output = streaming_output(
|
let output = streaming_output(
|
||||||
self.stdout,
|
self.stdout,
|
||||||
self.stderr,
|
self.stderr,
|
||||||
&mut |line| {
|
&mut |line| {
|
||||||
read_at_least_one_message = true;
|
if process_line(line, &mut stdout_errors) {
|
||||||
|
read_at_least_one_stdout_message = true;
|
||||||
// Try to deserialize a message from Cargo or Rustc.
|
|
||||||
let mut deserializer = serde_json::Deserializer::from_str(line);
|
|
||||||
deserializer.disable_recursion_limit();
|
|
||||||
if let Ok(message) = JsonMessage::deserialize(&mut deserializer) {
|
|
||||||
match message {
|
|
||||||
// Skip certain kinds of messages to only spend time on what's useful
|
|
||||||
JsonMessage::Cargo(message) => match message {
|
|
||||||
cargo_metadata::Message::CompilerArtifact(artifact)
|
|
||||||
if !artifact.fresh =>
|
|
||||||
{
|
|
||||||
self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap();
|
|
||||||
}
|
|
||||||
cargo_metadata::Message::CompilerMessage(msg) => {
|
|
||||||
self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap();
|
|
||||||
}
|
|
||||||
_ => (),
|
|
||||||
},
|
|
||||||
JsonMessage::Rustc(message) => {
|
|
||||||
self.sender.send(CargoMessage::Diagnostic(message)).unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
&mut |line| {
|
&mut |line| {
|
||||||
error.push_str(line);
|
if process_line(line, &mut stderr_errors) {
|
||||||
error.push('\n');
|
read_at_least_one_stderr_message = true;
|
||||||
|
}
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let read_at_least_one_message =
|
||||||
|
read_at_least_one_stdout_message || read_at_least_one_stderr_message;
|
||||||
|
let mut error = stdout_errors;
|
||||||
|
error.push_str(&stderr_errors);
|
||||||
match output {
|
match output {
|
||||||
Ok(_) => Ok((read_at_least_one_message, error)),
|
Ok(_) => Ok((read_at_least_one_message, error)),
|
||||||
Err(e) => Err(io::Error::new(e.kind(), format!("{e:?}: {error}"))),
|
Err(e) => Err(io::Error::new(e.kind(), format!("{e:?}: {error}"))),
|
||||||
|
|
|
@ -300,6 +300,7 @@ impl AttrsWithOwner {
|
||||||
AdtId::UnionId(it) => attrs_from_item_tree(it.lookup(db).id, db),
|
AdtId::UnionId(it) => attrs_from_item_tree(it.lookup(db).id, db),
|
||||||
},
|
},
|
||||||
AttrDefId::TraitId(it) => attrs_from_item_tree(it.lookup(db).id, db),
|
AttrDefId::TraitId(it) => attrs_from_item_tree(it.lookup(db).id, db),
|
||||||
|
AttrDefId::TraitAliasId(it) => attrs_from_item_tree(it.lookup(db).id, db),
|
||||||
AttrDefId::MacroId(it) => match it {
|
AttrDefId::MacroId(it) => match it {
|
||||||
MacroId::Macro2Id(it) => attrs_from_item_tree(it.lookup(db).id, db),
|
MacroId::Macro2Id(it) => attrs_from_item_tree(it.lookup(db).id, db),
|
||||||
MacroId::MacroRulesId(it) => attrs_from_item_tree(it.lookup(db).id, db),
|
MacroId::MacroRulesId(it) => attrs_from_item_tree(it.lookup(db).id, db),
|
||||||
|
@ -315,26 +316,14 @@ impl AttrsWithOwner {
|
||||||
let src = it.parent().child_source(db);
|
let src = it.parent().child_source(db);
|
||||||
RawAttrs::from_attrs_owner(
|
RawAttrs::from_attrs_owner(
|
||||||
db.upcast(),
|
db.upcast(),
|
||||||
src.with_value(src.value[it.local_id()].as_ref().either(
|
src.with_value(&src.value[it.local_id()]),
|
||||||
|it| match it {
|
|
||||||
ast::TypeOrConstParam::Type(it) => it as _,
|
|
||||||
ast::TypeOrConstParam::Const(it) => it as _,
|
|
||||||
},
|
|
||||||
|it| it as _,
|
|
||||||
)),
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
GenericParamId::TypeParamId(it) => {
|
GenericParamId::TypeParamId(it) => {
|
||||||
let src = it.parent().child_source(db);
|
let src = it.parent().child_source(db);
|
||||||
RawAttrs::from_attrs_owner(
|
RawAttrs::from_attrs_owner(
|
||||||
db.upcast(),
|
db.upcast(),
|
||||||
src.with_value(src.value[it.local_id()].as_ref().either(
|
src.with_value(&src.value[it.local_id()]),
|
||||||
|it| match it {
|
|
||||||
ast::TypeOrConstParam::Type(it) => it as _,
|
|
||||||
ast::TypeOrConstParam::Const(it) => it as _,
|
|
||||||
},
|
|
||||||
|it| it as _,
|
|
||||||
)),
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
GenericParamId::LifetimeParamId(it) => {
|
GenericParamId::LifetimeParamId(it) => {
|
||||||
|
@ -404,6 +393,7 @@ impl AttrsWithOwner {
|
||||||
AttrDefId::StaticId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
|
AttrDefId::StaticId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
|
||||||
AttrDefId::ConstId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
|
AttrDefId::ConstId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
|
||||||
AttrDefId::TraitId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
|
AttrDefId::TraitId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
|
||||||
|
AttrDefId::TraitAliasId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
|
||||||
AttrDefId::TypeAliasId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
|
AttrDefId::TypeAliasId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
|
||||||
AttrDefId::MacroId(id) => match id {
|
AttrDefId::MacroId(id) => match id {
|
||||||
MacroId::Macro2Id(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
|
MacroId::Macro2Id(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
|
||||||
|
@ -412,28 +402,14 @@ impl AttrsWithOwner {
|
||||||
},
|
},
|
||||||
AttrDefId::ImplId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
|
AttrDefId::ImplId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
|
||||||
AttrDefId::GenericParamId(id) => match id {
|
AttrDefId::GenericParamId(id) => match id {
|
||||||
GenericParamId::ConstParamId(id) => {
|
GenericParamId::ConstParamId(id) => id
|
||||||
id.parent().child_source(db).map(|source| match &source[id.local_id()] {
|
.parent()
|
||||||
Either::Left(ast::TypeOrConstParam::Type(id)) => {
|
.child_source(db)
|
||||||
ast::AnyHasAttrs::new(id.clone())
|
.map(|source| ast::AnyHasAttrs::new(source[id.local_id()].clone())),
|
||||||
}
|
GenericParamId::TypeParamId(id) => id
|
||||||
Either::Left(ast::TypeOrConstParam::Const(id)) => {
|
.parent()
|
||||||
ast::AnyHasAttrs::new(id.clone())
|
.child_source(db)
|
||||||
}
|
.map(|source| ast::AnyHasAttrs::new(source[id.local_id()].clone())),
|
||||||
Either::Right(id) => ast::AnyHasAttrs::new(id.clone()),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
GenericParamId::TypeParamId(id) => {
|
|
||||||
id.parent().child_source(db).map(|source| match &source[id.local_id()] {
|
|
||||||
Either::Left(ast::TypeOrConstParam::Type(id)) => {
|
|
||||||
ast::AnyHasAttrs::new(id.clone())
|
|
||||||
}
|
|
||||||
Either::Left(ast::TypeOrConstParam::Const(id)) => {
|
|
||||||
ast::AnyHasAttrs::new(id.clone())
|
|
||||||
}
|
|
||||||
Either::Right(id) => ast::AnyHasAttrs::new(id.clone()),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
GenericParamId::LifetimeParamId(id) => id
|
GenericParamId::LifetimeParamId(id) => id
|
||||||
.parent
|
.parent
|
||||||
.child_source(db)
|
.child_source(db)
|
||||||
|
|
|
@ -24,7 +24,7 @@ use syntax::{ast, AstPtr, SyntaxNode, SyntaxNodePtr};
|
||||||
use crate::{
|
use crate::{
|
||||||
attr::Attrs,
|
attr::Attrs,
|
||||||
db::DefDatabase,
|
db::DefDatabase,
|
||||||
expr::{dummy_expr_id, Expr, ExprId, Label, LabelId, Pat, PatId},
|
expr::{dummy_expr_id, Binding, BindingId, Expr, ExprId, Label, LabelId, Pat, PatId},
|
||||||
item_scope::BuiltinShadowMode,
|
item_scope::BuiltinShadowMode,
|
||||||
macro_id_to_def_id,
|
macro_id_to_def_id,
|
||||||
nameres::DefMap,
|
nameres::DefMap,
|
||||||
|
@ -270,7 +270,7 @@ pub struct Mark {
|
||||||
pub struct Body {
|
pub struct Body {
|
||||||
pub exprs: Arena<Expr>,
|
pub exprs: Arena<Expr>,
|
||||||
pub pats: Arena<Pat>,
|
pub pats: Arena<Pat>,
|
||||||
pub or_pats: FxHashMap<PatId, Arc<[PatId]>>,
|
pub bindings: Arena<Binding>,
|
||||||
pub labels: Arena<Label>,
|
pub labels: Arena<Label>,
|
||||||
/// The patterns for the function's parameters. While the parameter types are
|
/// The patterns for the function's parameters. While the parameter types are
|
||||||
/// part of the function signature, the patterns are not (they don't change
|
/// part of the function signature, the patterns are not (they don't change
|
||||||
|
@ -409,18 +409,6 @@ impl Body {
|
||||||
.map(move |&block| (block, db.block_def_map(block).expect("block ID without DefMap")))
|
.map(move |&block| (block, db.block_def_map(block).expect("block ID without DefMap")))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pattern_representative(&self, pat: PatId) -> PatId {
|
|
||||||
self.or_pats.get(&pat).and_then(|pats| pats.first().copied()).unwrap_or(pat)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Retrieves all ident patterns this pattern shares the ident with.
|
|
||||||
pub fn ident_patterns_for<'slf>(&'slf self, pat: &'slf PatId) -> &'slf [PatId] {
|
|
||||||
match self.or_pats.get(pat) {
|
|
||||||
Some(pats) => pats,
|
|
||||||
None => std::slice::from_ref(pat),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn pretty_print(&self, db: &dyn DefDatabase, owner: DefWithBodyId) -> String {
|
pub fn pretty_print(&self, db: &dyn DefDatabase, owner: DefWithBodyId) -> String {
|
||||||
pretty::print_body_hir(db, self, owner)
|
pretty::print_body_hir(db, self, owner)
|
||||||
}
|
}
|
||||||
|
@ -435,13 +423,14 @@ impl Body {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn shrink_to_fit(&mut self) {
|
fn shrink_to_fit(&mut self) {
|
||||||
let Self { _c: _, body_expr: _, block_scopes, or_pats, exprs, labels, params, pats } = self;
|
let Self { _c: _, body_expr: _, block_scopes, exprs, labels, params, pats, bindings } =
|
||||||
|
self;
|
||||||
block_scopes.shrink_to_fit();
|
block_scopes.shrink_to_fit();
|
||||||
or_pats.shrink_to_fit();
|
|
||||||
exprs.shrink_to_fit();
|
exprs.shrink_to_fit();
|
||||||
labels.shrink_to_fit();
|
labels.shrink_to_fit();
|
||||||
params.shrink_to_fit();
|
params.shrink_to_fit();
|
||||||
pats.shrink_to_fit();
|
pats.shrink_to_fit();
|
||||||
|
bindings.shrink_to_fit();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -451,7 +440,7 @@ impl Default for Body {
|
||||||
body_expr: dummy_expr_id(),
|
body_expr: dummy_expr_id(),
|
||||||
exprs: Default::default(),
|
exprs: Default::default(),
|
||||||
pats: Default::default(),
|
pats: Default::default(),
|
||||||
or_pats: Default::default(),
|
bindings: Default::default(),
|
||||||
labels: Default::default(),
|
labels: Default::default(),
|
||||||
params: Default::default(),
|
params: Default::default(),
|
||||||
block_scopes: Default::default(),
|
block_scopes: Default::default(),
|
||||||
|
@ -484,6 +473,14 @@ impl Index<LabelId> for Body {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Index<BindingId> for Body {
|
||||||
|
type Output = Binding;
|
||||||
|
|
||||||
|
fn index(&self, b: BindingId) -> &Binding {
|
||||||
|
&self.bindings[b]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// FIXME: Change `node_` prefix to something more reasonable.
|
// FIXME: Change `node_` prefix to something more reasonable.
|
||||||
// Perhaps `expr_syntax` and `expr_id`?
|
// Perhaps `expr_syntax` and `expr_id`?
|
||||||
impl BodySourceMap {
|
impl BodySourceMap {
|
||||||
|
|
|
@ -15,6 +15,7 @@ use la_arena::Arena;
|
||||||
use once_cell::unsync::OnceCell;
|
use once_cell::unsync::OnceCell;
|
||||||
use profile::Count;
|
use profile::Count;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
|
use smallvec::SmallVec;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{
|
ast::{
|
||||||
self, ArrayExprKind, AstChildren, HasArgList, HasLoopBody, HasName, LiteralKind,
|
self, ArrayExprKind, AstChildren, HasArgList, HasLoopBody, HasName, LiteralKind,
|
||||||
|
@ -30,14 +31,14 @@ use crate::{
|
||||||
builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint},
|
builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint},
|
||||||
db::DefDatabase,
|
db::DefDatabase,
|
||||||
expr::{
|
expr::{
|
||||||
dummy_expr_id, Array, BindingAnnotation, ClosureKind, Expr, ExprId, FloatTypeWrapper,
|
dummy_expr_id, Array, Binding, BindingAnnotation, BindingId, ClosureKind, Expr, ExprId,
|
||||||
Label, LabelId, Literal, MatchArm, Movability, Pat, PatId, RecordFieldPat, RecordLitField,
|
FloatTypeWrapper, Label, LabelId, Literal, MatchArm, Movability, Pat, PatId,
|
||||||
Statement,
|
RecordFieldPat, RecordLitField, Statement,
|
||||||
},
|
},
|
||||||
item_scope::BuiltinShadowMode,
|
item_scope::BuiltinShadowMode,
|
||||||
path::{GenericArgs, Path},
|
path::{GenericArgs, Path},
|
||||||
type_ref::{Mutability, Rawness, TypeRef},
|
type_ref::{Mutability, Rawness, TypeRef},
|
||||||
AdtId, BlockLoc, ModuleDefId, UnresolvedMacro,
|
AdtId, BlockId, BlockLoc, ModuleDefId, UnresolvedMacro,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub struct LowerCtx<'a> {
|
pub struct LowerCtx<'a> {
|
||||||
|
@ -87,16 +88,14 @@ pub(super) fn lower(
|
||||||
body: Body {
|
body: Body {
|
||||||
exprs: Arena::default(),
|
exprs: Arena::default(),
|
||||||
pats: Arena::default(),
|
pats: Arena::default(),
|
||||||
|
bindings: Arena::default(),
|
||||||
labels: Arena::default(),
|
labels: Arena::default(),
|
||||||
params: Vec::new(),
|
params: Vec::new(),
|
||||||
body_expr: dummy_expr_id(),
|
body_expr: dummy_expr_id(),
|
||||||
block_scopes: Vec::new(),
|
block_scopes: Vec::new(),
|
||||||
_c: Count::new(),
|
_c: Count::new(),
|
||||||
or_pats: Default::default(),
|
|
||||||
},
|
},
|
||||||
expander,
|
expander,
|
||||||
name_to_pat_grouping: Default::default(),
|
|
||||||
is_lowering_inside_or_pat: false,
|
|
||||||
is_lowering_assignee_expr: false,
|
is_lowering_assignee_expr: false,
|
||||||
is_lowering_generator: false,
|
is_lowering_generator: false,
|
||||||
}
|
}
|
||||||
|
@ -109,13 +108,26 @@ struct ExprCollector<'a> {
|
||||||
ast_id_map: Arc<AstIdMap>,
|
ast_id_map: Arc<AstIdMap>,
|
||||||
body: Body,
|
body: Body,
|
||||||
source_map: BodySourceMap,
|
source_map: BodySourceMap,
|
||||||
// a poor-mans union-find?
|
|
||||||
name_to_pat_grouping: FxHashMap<Name, Vec<PatId>>,
|
|
||||||
is_lowering_inside_or_pat: bool,
|
|
||||||
is_lowering_assignee_expr: bool,
|
is_lowering_assignee_expr: bool,
|
||||||
is_lowering_generator: bool,
|
is_lowering_generator: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
struct BindingList {
|
||||||
|
map: FxHashMap<Name, BindingId>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BindingList {
|
||||||
|
fn find(
|
||||||
|
&mut self,
|
||||||
|
ec: &mut ExprCollector<'_>,
|
||||||
|
name: Name,
|
||||||
|
mode: BindingAnnotation,
|
||||||
|
) -> BindingId {
|
||||||
|
*self.map.entry(name).or_insert_with_key(|n| ec.alloc_binding(n.clone(), mode))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl ExprCollector<'_> {
|
impl ExprCollector<'_> {
|
||||||
fn collect(
|
fn collect(
|
||||||
mut self,
|
mut self,
|
||||||
|
@ -127,17 +139,16 @@ impl ExprCollector<'_> {
|
||||||
param_list.self_param().filter(|_| attr_enabled.next().unwrap_or(false))
|
param_list.self_param().filter(|_| attr_enabled.next().unwrap_or(false))
|
||||||
{
|
{
|
||||||
let ptr = AstPtr::new(&self_param);
|
let ptr = AstPtr::new(&self_param);
|
||||||
let param_pat = self.alloc_pat(
|
let binding_id = self.alloc_binding(
|
||||||
Pat::Bind {
|
name![self],
|
||||||
name: name![self],
|
BindingAnnotation::new(
|
||||||
mode: BindingAnnotation::new(
|
self_param.mut_token().is_some() && self_param.amp_token().is_none(),
|
||||||
self_param.mut_token().is_some() && self_param.amp_token().is_none(),
|
false,
|
||||||
false,
|
),
|
||||||
),
|
|
||||||
subpat: None,
|
|
||||||
},
|
|
||||||
Either::Right(ptr),
|
|
||||||
);
|
);
|
||||||
|
let param_pat =
|
||||||
|
self.alloc_pat(Pat::Bind { id: binding_id, subpat: None }, Either::Right(ptr));
|
||||||
|
self.add_definition_to_binding(binding_id, param_pat);
|
||||||
self.body.params.push(param_pat);
|
self.body.params.push(param_pat);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -179,6 +190,9 @@ impl ExprCollector<'_> {
|
||||||
id
|
id
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn alloc_binding(&mut self, name: Name, mode: BindingAnnotation) -> BindingId {
|
||||||
|
self.body.bindings.alloc(Binding { name, mode, definitions: SmallVec::new() })
|
||||||
|
}
|
||||||
fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId {
|
fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId {
|
||||||
let src = self.expander.to_source(ptr);
|
let src = self.expander.to_source(ptr);
|
||||||
let id = self.make_pat(pat, src.clone());
|
let id = self.make_pat(pat, src.clone());
|
||||||
|
@ -238,33 +252,32 @@ impl ExprCollector<'_> {
|
||||||
}
|
}
|
||||||
ast::Expr::BlockExpr(e) => match e.modifier() {
|
ast::Expr::BlockExpr(e) => match e.modifier() {
|
||||||
Some(ast::BlockModifier::Try(_)) => {
|
Some(ast::BlockModifier::Try(_)) => {
|
||||||
let body = self.collect_block(e);
|
self.collect_block_(e, |id, statements, tail| Expr::TryBlock {
|
||||||
self.alloc_expr(Expr::TryBlock { body }, syntax_ptr)
|
id,
|
||||||
|
statements,
|
||||||
|
tail,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
Some(ast::BlockModifier::Unsafe(_)) => {
|
Some(ast::BlockModifier::Unsafe(_)) => {
|
||||||
let body = self.collect_block(e);
|
self.collect_block_(e, |id, statements, tail| Expr::Unsafe {
|
||||||
self.alloc_expr(Expr::Unsafe { body }, syntax_ptr)
|
id,
|
||||||
|
statements,
|
||||||
|
tail,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
// FIXME: we need to record these effects somewhere...
|
|
||||||
Some(ast::BlockModifier::Label(label)) => {
|
Some(ast::BlockModifier::Label(label)) => {
|
||||||
let label = self.collect_label(label);
|
let label = self.collect_label(label);
|
||||||
let res = self.collect_block(e);
|
self.collect_block_(e, |id, statements, tail| Expr::Block {
|
||||||
match &mut self.body.exprs[res] {
|
id,
|
||||||
Expr::Block { label: block_label, .. } => {
|
statements,
|
||||||
*block_label = Some(label);
|
tail,
|
||||||
}
|
label: Some(label),
|
||||||
_ => unreachable!(),
|
})
|
||||||
}
|
|
||||||
res
|
|
||||||
}
|
|
||||||
Some(ast::BlockModifier::Async(_)) => {
|
|
||||||
let body = self.collect_block(e);
|
|
||||||
self.alloc_expr(Expr::Async { body }, syntax_ptr)
|
|
||||||
}
|
|
||||||
Some(ast::BlockModifier::Const(_)) => {
|
|
||||||
let body = self.collect_block(e);
|
|
||||||
self.alloc_expr(Expr::Const { body }, syntax_ptr)
|
|
||||||
}
|
}
|
||||||
|
Some(ast::BlockModifier::Async(_)) => self
|
||||||
|
.collect_block_(e, |id, statements, tail| Expr::Async { id, statements, tail }),
|
||||||
|
Some(ast::BlockModifier::Const(_)) => self
|
||||||
|
.collect_block_(e, |id, statements, tail| Expr::Const { id, statements, tail }),
|
||||||
None => self.collect_block(e),
|
None => self.collect_block(e),
|
||||||
},
|
},
|
||||||
ast::Expr::LoopExpr(e) => {
|
ast::Expr::LoopExpr(e) => {
|
||||||
|
@ -737,6 +750,19 @@ impl ExprCollector<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_block(&mut self, block: ast::BlockExpr) -> ExprId {
|
fn collect_block(&mut self, block: ast::BlockExpr) -> ExprId {
|
||||||
|
self.collect_block_(block, |id, statements, tail| Expr::Block {
|
||||||
|
id,
|
||||||
|
statements,
|
||||||
|
tail,
|
||||||
|
label: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn collect_block_(
|
||||||
|
&mut self,
|
||||||
|
block: ast::BlockExpr,
|
||||||
|
mk_block: impl FnOnce(BlockId, Box<[Statement]>, Option<ExprId>) -> Expr,
|
||||||
|
) -> ExprId {
|
||||||
let file_local_id = self.ast_id_map.ast_id(&block);
|
let file_local_id = self.ast_id_map.ast_id(&block);
|
||||||
let ast_id = AstId::new(self.expander.current_file_id, file_local_id);
|
let ast_id = AstId::new(self.expander.current_file_id, file_local_id);
|
||||||
let block_loc =
|
let block_loc =
|
||||||
|
@ -769,15 +795,8 @@ impl ExprCollector<'_> {
|
||||||
});
|
});
|
||||||
|
|
||||||
let syntax_node_ptr = AstPtr::new(&block.into());
|
let syntax_node_ptr = AstPtr::new(&block.into());
|
||||||
let expr_id = self.alloc_expr(
|
let expr_id = self
|
||||||
Expr::Block {
|
.alloc_expr(mk_block(block_id, statements.into_boxed_slice(), tail), syntax_node_ptr);
|
||||||
id: block_id,
|
|
||||||
statements: statements.into_boxed_slice(),
|
|
||||||
tail,
|
|
||||||
label: None,
|
|
||||||
},
|
|
||||||
syntax_node_ptr,
|
|
||||||
);
|
|
||||||
|
|
||||||
self.expander.def_map = prev_def_map;
|
self.expander.def_map = prev_def_map;
|
||||||
self.expander.module = prev_local_module;
|
self.expander.module = prev_local_module;
|
||||||
|
@ -799,13 +818,7 @@ impl ExprCollector<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_pat(&mut self, pat: ast::Pat) -> PatId {
|
fn collect_pat(&mut self, pat: ast::Pat) -> PatId {
|
||||||
let pat_id = self.collect_pat_(pat);
|
self.collect_pat_(pat, &mut BindingList::default())
|
||||||
for (_, pats) in self.name_to_pat_grouping.drain() {
|
|
||||||
let pats = Arc::<[_]>::from(pats);
|
|
||||||
self.body.or_pats.extend(pats.iter().map(|&pat| (pat, pats.clone())));
|
|
||||||
}
|
|
||||||
self.is_lowering_inside_or_pat = false;
|
|
||||||
pat_id
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_pat_opt(&mut self, pat: Option<ast::Pat>) -> PatId {
|
fn collect_pat_opt(&mut self, pat: Option<ast::Pat>) -> PatId {
|
||||||
|
@ -815,16 +828,18 @@ impl ExprCollector<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_pat_(&mut self, pat: ast::Pat) -> PatId {
|
fn collect_pat_(&mut self, pat: ast::Pat, binding_list: &mut BindingList) -> PatId {
|
||||||
let pattern = match &pat {
|
let pattern = match &pat {
|
||||||
ast::Pat::IdentPat(bp) => {
|
ast::Pat::IdentPat(bp) => {
|
||||||
let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing);
|
let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing);
|
||||||
|
|
||||||
let key = self.is_lowering_inside_or_pat.then(|| name.clone());
|
|
||||||
let annotation =
|
let annotation =
|
||||||
BindingAnnotation::new(bp.mut_token().is_some(), bp.ref_token().is_some());
|
BindingAnnotation::new(bp.mut_token().is_some(), bp.ref_token().is_some());
|
||||||
let subpat = bp.pat().map(|subpat| self.collect_pat_(subpat));
|
let subpat = bp.pat().map(|subpat| self.collect_pat_(subpat, binding_list));
|
||||||
let pattern = if annotation == BindingAnnotation::Unannotated && subpat.is_none() {
|
|
||||||
|
let is_simple_ident_pat =
|
||||||
|
annotation == BindingAnnotation::Unannotated && subpat.is_none();
|
||||||
|
let (binding, pattern) = if is_simple_ident_pat {
|
||||||
// This could also be a single-segment path pattern. To
|
// This could also be a single-segment path pattern. To
|
||||||
// decide that, we need to try resolving the name.
|
// decide that, we need to try resolving the name.
|
||||||
let (resolved, _) = self.expander.def_map.resolve_path(
|
let (resolved, _) = self.expander.def_map.resolve_path(
|
||||||
|
@ -834,12 +849,12 @@ impl ExprCollector<'_> {
|
||||||
BuiltinShadowMode::Other,
|
BuiltinShadowMode::Other,
|
||||||
);
|
);
|
||||||
match resolved.take_values() {
|
match resolved.take_values() {
|
||||||
Some(ModuleDefId::ConstId(_)) => Pat::Path(name.into()),
|
Some(ModuleDefId::ConstId(_)) => (None, Pat::Path(name.into())),
|
||||||
Some(ModuleDefId::EnumVariantId(_)) => {
|
Some(ModuleDefId::EnumVariantId(_)) => {
|
||||||
// this is only really valid for unit variants, but
|
// this is only really valid for unit variants, but
|
||||||
// shadowing other enum variants with a pattern is
|
// shadowing other enum variants with a pattern is
|
||||||
// an error anyway
|
// an error anyway
|
||||||
Pat::Path(name.into())
|
(None, Pat::Path(name.into()))
|
||||||
}
|
}
|
||||||
Some(ModuleDefId::AdtId(AdtId::StructId(s)))
|
Some(ModuleDefId::AdtId(AdtId::StructId(s)))
|
||||||
if self.db.struct_data(s).variant_data.kind() != StructKind::Record =>
|
if self.db.struct_data(s).variant_data.kind() != StructKind::Record =>
|
||||||
|
@ -847,30 +862,34 @@ impl ExprCollector<'_> {
|
||||||
// Funnily enough, record structs *can* be shadowed
|
// Funnily enough, record structs *can* be shadowed
|
||||||
// by pattern bindings (but unit or tuple structs
|
// by pattern bindings (but unit or tuple structs
|
||||||
// can't).
|
// can't).
|
||||||
Pat::Path(name.into())
|
(None, Pat::Path(name.into()))
|
||||||
}
|
}
|
||||||
// shadowing statics is an error as well, so we just ignore that case here
|
// shadowing statics is an error as well, so we just ignore that case here
|
||||||
_ => Pat::Bind { name, mode: annotation, subpat },
|
_ => {
|
||||||
|
let id = binding_list.find(self, name, annotation);
|
||||||
|
(Some(id), Pat::Bind { id, subpat })
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Pat::Bind { name, mode: annotation, subpat }
|
let id = binding_list.find(self, name, annotation);
|
||||||
|
(Some(id), Pat::Bind { id, subpat })
|
||||||
};
|
};
|
||||||
|
|
||||||
let ptr = AstPtr::new(&pat);
|
let ptr = AstPtr::new(&pat);
|
||||||
let pat = self.alloc_pat(pattern, Either::Left(ptr));
|
let pat = self.alloc_pat(pattern, Either::Left(ptr));
|
||||||
if let Some(key) = key {
|
if let Some(binding_id) = binding {
|
||||||
self.name_to_pat_grouping.entry(key).or_default().push(pat);
|
self.add_definition_to_binding(binding_id, pat);
|
||||||
}
|
}
|
||||||
return pat;
|
return pat;
|
||||||
}
|
}
|
||||||
ast::Pat::TupleStructPat(p) => {
|
ast::Pat::TupleStructPat(p) => {
|
||||||
let path =
|
let path =
|
||||||
p.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new);
|
p.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new);
|
||||||
let (args, ellipsis) = self.collect_tuple_pat(p.fields());
|
let (args, ellipsis) = self.collect_tuple_pat(p.fields(), binding_list);
|
||||||
Pat::TupleStruct { path, args, ellipsis }
|
Pat::TupleStruct { path, args, ellipsis }
|
||||||
}
|
}
|
||||||
ast::Pat::RefPat(p) => {
|
ast::Pat::RefPat(p) => {
|
||||||
let pat = self.collect_pat_opt(p.pat());
|
let pat = self.collect_pat_opt_(p.pat(), binding_list);
|
||||||
let mutability = Mutability::from_mutable(p.mut_token().is_some());
|
let mutability = Mutability::from_mutable(p.mut_token().is_some());
|
||||||
Pat::Ref { pat, mutability }
|
Pat::Ref { pat, mutability }
|
||||||
}
|
}
|
||||||
|
@ -880,13 +899,12 @@ impl ExprCollector<'_> {
|
||||||
path.map(Pat::Path).unwrap_or(Pat::Missing)
|
path.map(Pat::Path).unwrap_or(Pat::Missing)
|
||||||
}
|
}
|
||||||
ast::Pat::OrPat(p) => {
|
ast::Pat::OrPat(p) => {
|
||||||
self.is_lowering_inside_or_pat = true;
|
let pats = p.pats().map(|p| self.collect_pat_(p, binding_list)).collect();
|
||||||
let pats = p.pats().map(|p| self.collect_pat_(p)).collect();
|
|
||||||
Pat::Or(pats)
|
Pat::Or(pats)
|
||||||
}
|
}
|
||||||
ast::Pat::ParenPat(p) => return self.collect_pat_opt_(p.pat()),
|
ast::Pat::ParenPat(p) => return self.collect_pat_opt_(p.pat(), binding_list),
|
||||||
ast::Pat::TuplePat(p) => {
|
ast::Pat::TuplePat(p) => {
|
||||||
let (args, ellipsis) = self.collect_tuple_pat(p.fields());
|
let (args, ellipsis) = self.collect_tuple_pat(p.fields(), binding_list);
|
||||||
Pat::Tuple { args, ellipsis }
|
Pat::Tuple { args, ellipsis }
|
||||||
}
|
}
|
||||||
ast::Pat::WildcardPat(_) => Pat::Wild,
|
ast::Pat::WildcardPat(_) => Pat::Wild,
|
||||||
|
@ -899,7 +917,7 @@ impl ExprCollector<'_> {
|
||||||
.fields()
|
.fields()
|
||||||
.filter_map(|f| {
|
.filter_map(|f| {
|
||||||
let ast_pat = f.pat()?;
|
let ast_pat = f.pat()?;
|
||||||
let pat = self.collect_pat_(ast_pat);
|
let pat = self.collect_pat_(ast_pat, binding_list);
|
||||||
let name = f.field_name()?.as_name();
|
let name = f.field_name()?.as_name();
|
||||||
Some(RecordFieldPat { name, pat })
|
Some(RecordFieldPat { name, pat })
|
||||||
})
|
})
|
||||||
|
@ -918,9 +936,15 @@ impl ExprCollector<'_> {
|
||||||
|
|
||||||
// FIXME properly handle `RestPat`
|
// FIXME properly handle `RestPat`
|
||||||
Pat::Slice {
|
Pat::Slice {
|
||||||
prefix: prefix.into_iter().map(|p| self.collect_pat_(p)).collect(),
|
prefix: prefix
|
||||||
slice: slice.map(|p| self.collect_pat_(p)),
|
.into_iter()
|
||||||
suffix: suffix.into_iter().map(|p| self.collect_pat_(p)).collect(),
|
.map(|p| self.collect_pat_(p, binding_list))
|
||||||
|
.collect(),
|
||||||
|
slice: slice.map(|p| self.collect_pat_(p, binding_list)),
|
||||||
|
suffix: suffix
|
||||||
|
.into_iter()
|
||||||
|
.map(|p| self.collect_pat_(p, binding_list))
|
||||||
|
.collect(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::Pat::LiteralPat(lit) => {
|
ast::Pat::LiteralPat(lit) => {
|
||||||
|
@ -943,7 +967,7 @@ impl ExprCollector<'_> {
|
||||||
Pat::Missing
|
Pat::Missing
|
||||||
}
|
}
|
||||||
ast::Pat::BoxPat(boxpat) => {
|
ast::Pat::BoxPat(boxpat) => {
|
||||||
let inner = self.collect_pat_opt_(boxpat.pat());
|
let inner = self.collect_pat_opt_(boxpat.pat(), binding_list);
|
||||||
Pat::Box { inner }
|
Pat::Box { inner }
|
||||||
}
|
}
|
||||||
ast::Pat::ConstBlockPat(const_block_pat) => {
|
ast::Pat::ConstBlockPat(const_block_pat) => {
|
||||||
|
@ -960,7 +984,7 @@ impl ExprCollector<'_> {
|
||||||
let src = self.expander.to_source(Either::Left(AstPtr::new(&pat)));
|
let src = self.expander.to_source(Either::Left(AstPtr::new(&pat)));
|
||||||
let pat =
|
let pat =
|
||||||
self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| {
|
self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| {
|
||||||
this.collect_pat_opt_(expanded_pat)
|
this.collect_pat_opt_(expanded_pat, binding_list)
|
||||||
});
|
});
|
||||||
self.source_map.pat_map.insert(src, pat);
|
self.source_map.pat_map.insert(src, pat);
|
||||||
return pat;
|
return pat;
|
||||||
|
@ -974,21 +998,25 @@ impl ExprCollector<'_> {
|
||||||
self.alloc_pat(pattern, Either::Left(ptr))
|
self.alloc_pat(pattern, Either::Left(ptr))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_pat_opt_(&mut self, pat: Option<ast::Pat>) -> PatId {
|
fn collect_pat_opt_(&mut self, pat: Option<ast::Pat>, binding_list: &mut BindingList) -> PatId {
|
||||||
match pat {
|
match pat {
|
||||||
Some(pat) => self.collect_pat_(pat),
|
Some(pat) => self.collect_pat_(pat, binding_list),
|
||||||
None => self.missing_pat(),
|
None => self.missing_pat(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_tuple_pat(&mut self, args: AstChildren<ast::Pat>) -> (Box<[PatId]>, Option<usize>) {
|
fn collect_tuple_pat(
|
||||||
|
&mut self,
|
||||||
|
args: AstChildren<ast::Pat>,
|
||||||
|
binding_list: &mut BindingList,
|
||||||
|
) -> (Box<[PatId]>, Option<usize>) {
|
||||||
// Find the location of the `..`, if there is one. Note that we do not
|
// Find the location of the `..`, if there is one. Note that we do not
|
||||||
// consider the possibility of there being multiple `..` here.
|
// consider the possibility of there being multiple `..` here.
|
||||||
let ellipsis = args.clone().position(|p| matches!(p, ast::Pat::RestPat(_)));
|
let ellipsis = args.clone().position(|p| matches!(p, ast::Pat::RestPat(_)));
|
||||||
// We want to skip the `..` pattern here, since we account for it above.
|
// We want to skip the `..` pattern here, since we account for it above.
|
||||||
let args = args
|
let args = args
|
||||||
.filter(|p| !matches!(p, ast::Pat::RestPat(_)))
|
.filter(|p| !matches!(p, ast::Pat::RestPat(_)))
|
||||||
.map(|p| self.collect_pat_(p))
|
.map(|p| self.collect_pat_(p, binding_list))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
(args, ellipsis)
|
(args, ellipsis)
|
||||||
|
@ -1017,6 +1045,10 @@ impl ExprCollector<'_> {
|
||||||
None => Some(()),
|
None => Some(()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn add_definition_to_binding(&mut self, binding_id: BindingId, pat_id: PatId) {
|
||||||
|
self.body.bindings[binding_id].definitions.push(pat_id);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<ast::LiteralKind> for Literal {
|
impl From<ast::LiteralKind> for Literal {
|
||||||
|
|
|
@ -5,7 +5,7 @@ use std::fmt::{self, Write};
|
||||||
use syntax::ast::HasName;
|
use syntax::ast::HasName;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
expr::{Array, BindingAnnotation, ClosureKind, Literal, Movability, Statement},
|
expr::{Array, BindingAnnotation, BindingId, ClosureKind, Literal, Movability, Statement},
|
||||||
pretty::{print_generic_args, print_path, print_type_ref},
|
pretty::{print_generic_args, print_path, print_type_ref},
|
||||||
type_ref::TypeRef,
|
type_ref::TypeRef,
|
||||||
};
|
};
|
||||||
|
@ -292,18 +292,6 @@ impl<'a> Printer<'a> {
|
||||||
self.print_expr(*expr);
|
self.print_expr(*expr);
|
||||||
w!(self, "?");
|
w!(self, "?");
|
||||||
}
|
}
|
||||||
Expr::TryBlock { body } => {
|
|
||||||
w!(self, "try ");
|
|
||||||
self.print_expr(*body);
|
|
||||||
}
|
|
||||||
Expr::Async { body } => {
|
|
||||||
w!(self, "async ");
|
|
||||||
self.print_expr(*body);
|
|
||||||
}
|
|
||||||
Expr::Const { body } => {
|
|
||||||
w!(self, "const ");
|
|
||||||
self.print_expr(*body);
|
|
||||||
}
|
|
||||||
Expr::Cast { expr, type_ref } => {
|
Expr::Cast { expr, type_ref } => {
|
||||||
self.print_expr(*expr);
|
self.print_expr(*expr);
|
||||||
w!(self, " as ");
|
w!(self, " as ");
|
||||||
|
@ -402,10 +390,6 @@ impl<'a> Printer<'a> {
|
||||||
}
|
}
|
||||||
w!(self, ")");
|
w!(self, ")");
|
||||||
}
|
}
|
||||||
Expr::Unsafe { body } => {
|
|
||||||
w!(self, "unsafe ");
|
|
||||||
self.print_expr(*body);
|
|
||||||
}
|
|
||||||
Expr::Array(arr) => {
|
Expr::Array(arr) => {
|
||||||
w!(self, "[");
|
w!(self, "[");
|
||||||
if !matches!(arr, Array::ElementList { elements, .. } if elements.is_empty()) {
|
if !matches!(arr, Array::ElementList { elements, .. } if elements.is_empty()) {
|
||||||
|
@ -428,27 +412,49 @@ impl<'a> Printer<'a> {
|
||||||
}
|
}
|
||||||
Expr::Literal(lit) => self.print_literal(lit),
|
Expr::Literal(lit) => self.print_literal(lit),
|
||||||
Expr::Block { id: _, statements, tail, label } => {
|
Expr::Block { id: _, statements, tail, label } => {
|
||||||
self.whitespace();
|
let label = label.map(|lbl| format!("{}: ", self.body[lbl].name));
|
||||||
if let Some(lbl) = label {
|
self.print_block(label.as_deref(), statements, tail);
|
||||||
w!(self, "{}: ", self.body[*lbl].name);
|
}
|
||||||
}
|
Expr::Unsafe { id: _, statements, tail } => {
|
||||||
w!(self, "{{");
|
self.print_block(Some("unsafe "), statements, tail);
|
||||||
if !statements.is_empty() || tail.is_some() {
|
}
|
||||||
self.indented(|p| {
|
Expr::TryBlock { id: _, statements, tail } => {
|
||||||
for stmt in &**statements {
|
self.print_block(Some("try "), statements, tail);
|
||||||
p.print_stmt(stmt);
|
}
|
||||||
}
|
Expr::Async { id: _, statements, tail } => {
|
||||||
if let Some(tail) = tail {
|
self.print_block(Some("async "), statements, tail);
|
||||||
p.print_expr(*tail);
|
}
|
||||||
}
|
Expr::Const { id: _, statements, tail } => {
|
||||||
p.newline();
|
self.print_block(Some("const "), statements, tail);
|
||||||
});
|
|
||||||
}
|
|
||||||
w!(self, "}}");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn print_block(
|
||||||
|
&mut self,
|
||||||
|
label: Option<&str>,
|
||||||
|
statements: &Box<[Statement]>,
|
||||||
|
tail: &Option<la_arena::Idx<Expr>>,
|
||||||
|
) {
|
||||||
|
self.whitespace();
|
||||||
|
if let Some(lbl) = label {
|
||||||
|
w!(self, "{}", lbl);
|
||||||
|
}
|
||||||
|
w!(self, "{{");
|
||||||
|
if !statements.is_empty() || tail.is_some() {
|
||||||
|
self.indented(|p| {
|
||||||
|
for stmt in &**statements {
|
||||||
|
p.print_stmt(stmt);
|
||||||
|
}
|
||||||
|
if let Some(tail) = tail {
|
||||||
|
p.print_expr(*tail);
|
||||||
|
}
|
||||||
|
p.newline();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
w!(self, "}}");
|
||||||
|
}
|
||||||
|
|
||||||
fn print_pat(&mut self, pat: PatId) {
|
fn print_pat(&mut self, pat: PatId) {
|
||||||
let pat = &self.body[pat];
|
let pat = &self.body[pat];
|
||||||
|
|
||||||
|
@ -518,14 +524,8 @@ impl<'a> Printer<'a> {
|
||||||
}
|
}
|
||||||
Pat::Path(path) => self.print_path(path),
|
Pat::Path(path) => self.print_path(path),
|
||||||
Pat::Lit(expr) => self.print_expr(*expr),
|
Pat::Lit(expr) => self.print_expr(*expr),
|
||||||
Pat::Bind { mode, name, subpat } => {
|
Pat::Bind { id, subpat } => {
|
||||||
let mode = match mode {
|
self.print_binding(*id);
|
||||||
BindingAnnotation::Unannotated => "",
|
|
||||||
BindingAnnotation::Mutable => "mut ",
|
|
||||||
BindingAnnotation::Ref => "ref ",
|
|
||||||
BindingAnnotation::RefMut => "ref mut ",
|
|
||||||
};
|
|
||||||
w!(self, "{}{}", mode, name);
|
|
||||||
if let Some(pat) = subpat {
|
if let Some(pat) = subpat {
|
||||||
self.whitespace();
|
self.whitespace();
|
||||||
self.print_pat(*pat);
|
self.print_pat(*pat);
|
||||||
|
@ -629,4 +629,15 @@ impl<'a> Printer<'a> {
|
||||||
fn print_path(&mut self, path: &Path) {
|
fn print_path(&mut self, path: &Path) {
|
||||||
print_path(path, self).unwrap();
|
print_path(path, self).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn print_binding(&mut self, id: BindingId) {
|
||||||
|
let Binding { name, mode, .. } = &self.body.bindings[id];
|
||||||
|
let mode = match mode {
|
||||||
|
BindingAnnotation::Unannotated => "",
|
||||||
|
BindingAnnotation::Mutable => "mut ",
|
||||||
|
BindingAnnotation::Ref => "ref ",
|
||||||
|
BindingAnnotation::RefMut => "ref mut ",
|
||||||
|
};
|
||||||
|
w!(self, "{}{}", mode, name);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,7 +8,7 @@ use rustc_hash::FxHashMap;
|
||||||
use crate::{
|
use crate::{
|
||||||
body::Body,
|
body::Body,
|
||||||
db::DefDatabase,
|
db::DefDatabase,
|
||||||
expr::{Expr, ExprId, LabelId, Pat, PatId, Statement},
|
expr::{Binding, BindingId, Expr, ExprId, LabelId, Pat, PatId, Statement},
|
||||||
BlockId, DefWithBodyId,
|
BlockId, DefWithBodyId,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ pub struct ExprScopes {
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
pub struct ScopeEntry {
|
pub struct ScopeEntry {
|
||||||
name: Name,
|
name: Name,
|
||||||
pat: PatId,
|
binding: BindingId,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ScopeEntry {
|
impl ScopeEntry {
|
||||||
|
@ -31,8 +31,8 @@ impl ScopeEntry {
|
||||||
&self.name
|
&self.name
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pat(&self) -> PatId {
|
pub fn binding(&self) -> BindingId {
|
||||||
self.pat
|
self.binding
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -66,6 +66,7 @@ impl ExprScopes {
|
||||||
self.scopes[scope].label.clone()
|
self.scopes[scope].label.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns the scopes in ascending order.
|
||||||
pub fn scope_chain(&self, scope: Option<ScopeId>) -> impl Iterator<Item = ScopeId> + '_ {
|
pub fn scope_chain(&self, scope: Option<ScopeId>) -> impl Iterator<Item = ScopeId> + '_ {
|
||||||
std::iter::successors(scope, move |&scope| self.scopes[scope].parent)
|
std::iter::successors(scope, move |&scope| self.scopes[scope].parent)
|
||||||
}
|
}
|
||||||
|
@ -125,18 +126,23 @@ impl ExprScopes {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_bindings(&mut self, body: &Body, scope: ScopeId, pat: PatId) {
|
fn add_bindings(&mut self, body: &Body, scope: ScopeId, binding: BindingId) {
|
||||||
|
let Binding { name, .. } = &body.bindings[binding];
|
||||||
|
let entry = ScopeEntry { name: name.clone(), binding };
|
||||||
|
self.scopes[scope].entries.push(entry);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_pat_bindings(&mut self, body: &Body, scope: ScopeId, pat: PatId) {
|
||||||
let pattern = &body[pat];
|
let pattern = &body[pat];
|
||||||
if let Pat::Bind { name, .. } = pattern {
|
if let Pat::Bind { id, .. } = pattern {
|
||||||
let entry = ScopeEntry { name: name.clone(), pat };
|
self.add_bindings(body, scope, *id);
|
||||||
self.scopes[scope].entries.push(entry);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pattern.walk_child_pats(|pat| self.add_bindings(body, scope, pat));
|
pattern.walk_child_pats(|pat| self.add_pat_bindings(body, scope, pat));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_params_bindings(&mut self, body: &Body, scope: ScopeId, params: &[PatId]) {
|
fn add_params_bindings(&mut self, body: &Body, scope: ScopeId, params: &[PatId]) {
|
||||||
params.iter().for_each(|pat| self.add_bindings(body, scope, *pat));
|
params.iter().for_each(|pat| self.add_pat_bindings(body, scope, *pat));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_scope(&mut self, node: ExprId, scope: ScopeId) {
|
fn set_scope(&mut self, node: ExprId, scope: ScopeId) {
|
||||||
|
@ -169,7 +175,7 @@ fn compute_block_scopes(
|
||||||
}
|
}
|
||||||
|
|
||||||
*scope = scopes.new_scope(*scope);
|
*scope = scopes.new_scope(*scope);
|
||||||
scopes.add_bindings(body, *scope, *pat);
|
scopes.add_pat_bindings(body, *scope, *pat);
|
||||||
}
|
}
|
||||||
Statement::Expr { expr, .. } => {
|
Statement::Expr { expr, .. } => {
|
||||||
compute_expr_scopes(*expr, body, scopes, scope);
|
compute_expr_scopes(*expr, body, scopes, scope);
|
||||||
|
@ -194,10 +200,20 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
|
||||||
scopes.set_scope(expr, scope);
|
scopes.set_scope(expr, scope);
|
||||||
compute_block_scopes(statements, *tail, body, scopes, &mut scope);
|
compute_block_scopes(statements, *tail, body, scopes, &mut scope);
|
||||||
}
|
}
|
||||||
|
Expr::Unsafe { id, statements, tail }
|
||||||
|
| Expr::Async { id, statements, tail }
|
||||||
|
| Expr::Const { id, statements, tail }
|
||||||
|
| Expr::TryBlock { id, statements, tail } => {
|
||||||
|
let mut scope = scopes.new_block_scope(*scope, *id, None);
|
||||||
|
// Overwrite the old scope for the block expr, so that every block scope can be found
|
||||||
|
// via the block itself (important for blocks that only contain items, no expressions).
|
||||||
|
scopes.set_scope(expr, scope);
|
||||||
|
compute_block_scopes(statements, *tail, body, scopes, &mut scope);
|
||||||
|
}
|
||||||
Expr::For { iterable, pat, body: body_expr, label } => {
|
Expr::For { iterable, pat, body: body_expr, label } => {
|
||||||
compute_expr_scopes(*iterable, body, scopes, scope);
|
compute_expr_scopes(*iterable, body, scopes, scope);
|
||||||
let mut scope = scopes.new_labeled_scope(*scope, make_label(label));
|
let mut scope = scopes.new_labeled_scope(*scope, make_label(label));
|
||||||
scopes.add_bindings(body, scope, *pat);
|
scopes.add_pat_bindings(body, scope, *pat);
|
||||||
compute_expr_scopes(*body_expr, body, scopes, &mut scope);
|
compute_expr_scopes(*body_expr, body, scopes, &mut scope);
|
||||||
}
|
}
|
||||||
Expr::While { condition, body: body_expr, label } => {
|
Expr::While { condition, body: body_expr, label } => {
|
||||||
|
@ -218,7 +234,7 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
|
||||||
compute_expr_scopes(*expr, body, scopes, scope);
|
compute_expr_scopes(*expr, body, scopes, scope);
|
||||||
for arm in arms.iter() {
|
for arm in arms.iter() {
|
||||||
let mut scope = scopes.new_scope(*scope);
|
let mut scope = scopes.new_scope(*scope);
|
||||||
scopes.add_bindings(body, scope, arm.pat);
|
scopes.add_pat_bindings(body, scope, arm.pat);
|
||||||
if let Some(guard) = arm.guard {
|
if let Some(guard) = arm.guard {
|
||||||
scope = scopes.new_scope(scope);
|
scope = scopes.new_scope(scope);
|
||||||
compute_expr_scopes(guard, body, scopes, &mut scope);
|
compute_expr_scopes(guard, body, scopes, &mut scope);
|
||||||
|
@ -237,7 +253,7 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
|
||||||
&Expr::Let { pat, expr } => {
|
&Expr::Let { pat, expr } => {
|
||||||
compute_expr_scopes(expr, body, scopes, scope);
|
compute_expr_scopes(expr, body, scopes, scope);
|
||||||
*scope = scopes.new_scope(*scope);
|
*scope = scopes.new_scope(*scope);
|
||||||
scopes.add_bindings(body, *scope, pat);
|
scopes.add_pat_bindings(body, *scope, pat);
|
||||||
}
|
}
|
||||||
e => e.walk_child_exprs(|e| compute_expr_scopes(e, body, scopes, scope)),
|
e => e.walk_child_exprs(|e| compute_expr_scopes(e, body, scopes, scope)),
|
||||||
};
|
};
|
||||||
|
@ -439,7 +455,7 @@ fn foo() {
|
||||||
let function = find_function(&db, file_id);
|
let function = find_function(&db, file_id);
|
||||||
|
|
||||||
let scopes = db.expr_scopes(function.into());
|
let scopes = db.expr_scopes(function.into());
|
||||||
let (_body, source_map) = db.body_with_source_map(function.into());
|
let (body, source_map) = db.body_with_source_map(function.into());
|
||||||
|
|
||||||
let expr_scope = {
|
let expr_scope = {
|
||||||
let expr_ast = name_ref.syntax().ancestors().find_map(ast::Expr::cast).unwrap();
|
let expr_ast = name_ref.syntax().ancestors().find_map(ast::Expr::cast).unwrap();
|
||||||
|
@ -449,7 +465,9 @@ fn foo() {
|
||||||
};
|
};
|
||||||
|
|
||||||
let resolved = scopes.resolve_name_in_scope(expr_scope, &name_ref.as_name()).unwrap();
|
let resolved = scopes.resolve_name_in_scope(expr_scope, &name_ref.as_name()).unwrap();
|
||||||
let pat_src = source_map.pat_syntax(resolved.pat()).unwrap();
|
let pat_src = source_map
|
||||||
|
.pat_syntax(*body.bindings[resolved.binding()].definitions.first().unwrap())
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let local_name = pat_src.value.either(
|
let local_name = pat_src.value.either(
|
||||||
|it| it.syntax_node_ptr().to_node(file.syntax()),
|
|it| it.syntax_node_ptr().to_node(file.syntax()),
|
||||||
|
|
|
@ -395,3 +395,25 @@ fn foo() {
|
||||||
"#]],
|
"#]],
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn trailing_expr_macro_expands_stmts() {
|
||||||
|
check_at(
|
||||||
|
r#"
|
||||||
|
macro_rules! foo {
|
||||||
|
() => { const FOO: u32 = 0;const BAR: u32 = 0; };
|
||||||
|
}
|
||||||
|
fn f() {$0
|
||||||
|
foo!{}
|
||||||
|
};
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
block scope
|
||||||
|
BAR: v
|
||||||
|
FOO: v
|
||||||
|
|
||||||
|
crate
|
||||||
|
f: v
|
||||||
|
"#]],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
//! When *constructing* `hir`, we start at some parent syntax node and recursively
|
//! When *constructing* `hir`, we start at some parent syntax node and recursively
|
||||||
//! lower the children.
|
//! lower the children.
|
||||||
//!
|
//!
|
||||||
//! This modules allows one to go in the opposite direction: start with a syntax
|
//! This module allows one to go in the opposite direction: start with a syntax
|
||||||
//! node for a *child*, and get its hir.
|
//! node for a *child*, and get its hir.
|
||||||
|
|
||||||
use either::Either;
|
use either::Either;
|
||||||
|
@ -145,6 +145,7 @@ impl ChildBySource for ItemScope {
|
||||||
ModuleDefId::StaticId(id) => insert!(map[keys::STATIC].insert(id)),
|
ModuleDefId::StaticId(id) => insert!(map[keys::STATIC].insert(id)),
|
||||||
ModuleDefId::TypeAliasId(id) => insert!(map[keys::TYPE_ALIAS].insert(id)),
|
ModuleDefId::TypeAliasId(id) => insert!(map[keys::TYPE_ALIAS].insert(id)),
|
||||||
ModuleDefId::TraitId(id) => insert!(map[keys::TRAIT].insert(id)),
|
ModuleDefId::TraitId(id) => insert!(map[keys::TRAIT].insert(id)),
|
||||||
|
ModuleDefId::TraitAliasId(id) => insert!(map[keys::TRAIT_ALIAS].insert(id)),
|
||||||
ModuleDefId::AdtId(adt) => match adt {
|
ModuleDefId::AdtId(adt) => match adt {
|
||||||
AdtId::StructId(id) => insert!(map[keys::STRUCT].insert(id)),
|
AdtId::StructId(id) => insert!(map[keys::STRUCT].insert(id)),
|
||||||
AdtId::UnionId(id) => insert!(map[keys::UNION].insert(id)),
|
AdtId::UnionId(id) => insert!(map[keys::UNION].insert(id)),
|
||||||
|
|
|
@ -22,7 +22,7 @@ use crate::{
|
||||||
visibility::RawVisibility,
|
visibility::RawVisibility,
|
||||||
AssocItemId, AstIdWithPath, ConstId, ConstLoc, FunctionId, FunctionLoc, HasModule, ImplId,
|
AssocItemId, AstIdWithPath, ConstId, ConstLoc, FunctionId, FunctionLoc, HasModule, ImplId,
|
||||||
Intern, ItemContainerId, ItemLoc, Lookup, Macro2Id, MacroRulesId, ModuleId, ProcMacroId,
|
Intern, ItemContainerId, ItemLoc, Lookup, Macro2Id, MacroRulesId, ModuleId, ProcMacroId,
|
||||||
StaticId, TraitId, TypeAliasId, TypeAliasLoc,
|
StaticId, TraitAliasId, TraitId, TypeAliasId, TypeAliasLoc,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
@ -245,19 +245,11 @@ impl TraitData {
|
||||||
attrs.by_key("rustc_skip_array_during_method_dispatch").exists();
|
attrs.by_key("rustc_skip_array_during_method_dispatch").exists();
|
||||||
let rustc_has_incoherent_inherent_impls =
|
let rustc_has_incoherent_inherent_impls =
|
||||||
attrs.by_key("rustc_has_incoherent_inherent_impls").exists();
|
attrs.by_key("rustc_has_incoherent_inherent_impls").exists();
|
||||||
let (items, attribute_calls, diagnostics) = match &tr_def.items {
|
let mut collector =
|
||||||
Some(items) => {
|
AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::TraitId(tr));
|
||||||
let mut collector = AssocItemCollector::new(
|
collector.collect(&item_tree, tree_id.tree_id(), &tr_def.items);
|
||||||
db,
|
let (items, attribute_calls, diagnostics) = collector.finish();
|
||||||
module_id,
|
|
||||||
tree_id.file_id(),
|
|
||||||
ItemContainerId::TraitId(tr),
|
|
||||||
);
|
|
||||||
collector.collect(&item_tree, tree_id.tree_id(), items);
|
|
||||||
collector.finish()
|
|
||||||
}
|
|
||||||
None => Default::default(),
|
|
||||||
};
|
|
||||||
(
|
(
|
||||||
Arc::new(TraitData {
|
Arc::new(TraitData {
|
||||||
name,
|
name,
|
||||||
|
@ -299,6 +291,23 @@ impl TraitData {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub struct TraitAliasData {
|
||||||
|
pub name: Name,
|
||||||
|
pub visibility: RawVisibility,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TraitAliasData {
|
||||||
|
pub(crate) fn trait_alias_query(db: &dyn DefDatabase, id: TraitAliasId) -> Arc<TraitAliasData> {
|
||||||
|
let loc = id.lookup(db);
|
||||||
|
let item_tree = loc.id.item_tree(db);
|
||||||
|
let alias = &item_tree[loc.id.value];
|
||||||
|
let visibility = item_tree[alias.visibility].clone();
|
||||||
|
|
||||||
|
Arc::new(TraitAliasData { name: alias.name.clone(), visibility })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub struct ImplData {
|
pub struct ImplData {
|
||||||
pub target_trait: Option<Interned<TraitRef>>,
|
pub target_trait: Option<Interned<TraitRef>>,
|
||||||
|
|
|
@ -14,7 +14,7 @@ use crate::{
|
||||||
body::{scope::ExprScopes, Body, BodySourceMap},
|
body::{scope::ExprScopes, Body, BodySourceMap},
|
||||||
data::{
|
data::{
|
||||||
ConstData, FunctionData, ImplData, Macro2Data, MacroRulesData, ProcMacroData, StaticData,
|
ConstData, FunctionData, ImplData, Macro2Data, MacroRulesData, ProcMacroData, StaticData,
|
||||||
TraitData, TypeAliasData,
|
TraitAliasData, TraitData, TypeAliasData,
|
||||||
},
|
},
|
||||||
generics::GenericParams,
|
generics::GenericParams,
|
||||||
import_map::ImportMap,
|
import_map::ImportMap,
|
||||||
|
@ -25,8 +25,8 @@ use crate::{
|
||||||
AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, ExternBlockId,
|
AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, ExternBlockId,
|
||||||
ExternBlockLoc, FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalEnumVariantId,
|
ExternBlockLoc, FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalEnumVariantId,
|
||||||
LocalFieldId, Macro2Id, Macro2Loc, MacroRulesId, MacroRulesLoc, ProcMacroId, ProcMacroLoc,
|
LocalFieldId, Macro2Id, Macro2Loc, MacroRulesId, MacroRulesLoc, ProcMacroId, ProcMacroLoc,
|
||||||
StaticId, StaticLoc, StructId, StructLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc,
|
StaticId, StaticLoc, StructId, StructLoc, TraitAliasId, TraitAliasLoc, TraitId, TraitLoc,
|
||||||
UnionId, UnionLoc, VariantId,
|
TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, VariantId,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[salsa::query_group(InternDatabaseStorage)]
|
#[salsa::query_group(InternDatabaseStorage)]
|
||||||
|
@ -46,6 +46,8 @@ pub trait InternDatabase: SourceDatabase {
|
||||||
#[salsa::interned]
|
#[salsa::interned]
|
||||||
fn intern_trait(&self, loc: TraitLoc) -> TraitId;
|
fn intern_trait(&self, loc: TraitLoc) -> TraitId;
|
||||||
#[salsa::interned]
|
#[salsa::interned]
|
||||||
|
fn intern_trait_alias(&self, loc: TraitAliasLoc) -> TraitAliasId;
|
||||||
|
#[salsa::interned]
|
||||||
fn intern_type_alias(&self, loc: TypeAliasLoc) -> TypeAliasId;
|
fn intern_type_alias(&self, loc: TypeAliasLoc) -> TypeAliasId;
|
||||||
#[salsa::interned]
|
#[salsa::interned]
|
||||||
fn intern_impl(&self, loc: ImplLoc) -> ImplId;
|
fn intern_impl(&self, loc: ImplLoc) -> ImplId;
|
||||||
|
@ -125,6 +127,9 @@ pub trait DefDatabase: InternDatabase + AstDatabase + Upcast<dyn AstDatabase> {
|
||||||
#[salsa::invoke(TraitData::trait_data_with_diagnostics_query)]
|
#[salsa::invoke(TraitData::trait_data_with_diagnostics_query)]
|
||||||
fn trait_data_with_diagnostics(&self, tr: TraitId) -> (Arc<TraitData>, Arc<[DefDiagnostic]>);
|
fn trait_data_with_diagnostics(&self, tr: TraitId) -> (Arc<TraitData>, Arc<[DefDiagnostic]>);
|
||||||
|
|
||||||
|
#[salsa::invoke(TraitAliasData::trait_alias_query)]
|
||||||
|
fn trait_alias_data(&self, e: TraitAliasId) -> Arc<TraitAliasData>;
|
||||||
|
|
||||||
#[salsa::invoke(TypeAliasData::type_alias_data_query)]
|
#[salsa::invoke(TypeAliasData::type_alias_data_query)]
|
||||||
fn type_alias_data(&self, e: TypeAliasId) -> Arc<TypeAliasData>;
|
fn type_alias_data(&self, e: TypeAliasId) -> Arc<TypeAliasData>;
|
||||||
|
|
||||||
|
|
|
@ -17,6 +17,7 @@ use std::fmt;
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
use la_arena::{Idx, RawIdx};
|
use la_arena::{Idx, RawIdx};
|
||||||
|
use smallvec::SmallVec;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint},
|
builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint},
|
||||||
|
@ -29,6 +30,8 @@ pub use syntax::ast::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, Unar
|
||||||
|
|
||||||
pub type ExprId = Idx<Expr>;
|
pub type ExprId = Idx<Expr>;
|
||||||
|
|
||||||
|
pub type BindingId = Idx<Binding>;
|
||||||
|
|
||||||
/// FIXME: this is a hacky function which should be removed
|
/// FIXME: this is a hacky function which should be removed
|
||||||
pub(crate) fn dummy_expr_id() -> ExprId {
|
pub(crate) fn dummy_expr_id() -> ExprId {
|
||||||
ExprId::from_raw(RawIdx::from(u32::MAX))
|
ExprId::from_raw(RawIdx::from(u32::MAX))
|
||||||
|
@ -52,13 +55,21 @@ pub type LabelId = Idx<Label>;
|
||||||
// We convert float values into bits and that's how we don't need to deal with f32 and f64.
|
// We convert float values into bits and that's how we don't need to deal with f32 and f64.
|
||||||
// For PartialEq, bits comparison should work, as ordering is not important
|
// For PartialEq, bits comparison should work, as ordering is not important
|
||||||
// https://github.com/rust-lang/rust-analyzer/issues/12380#issuecomment-1137284360
|
// https://github.com/rust-lang/rust-analyzer/issues/12380#issuecomment-1137284360
|
||||||
#[derive(Default, Debug, Clone, Eq, PartialEq)]
|
#[derive(Default, Debug, Clone, Copy, Eq, PartialEq)]
|
||||||
pub struct FloatTypeWrapper(u64);
|
pub struct FloatTypeWrapper(u64);
|
||||||
|
|
||||||
impl FloatTypeWrapper {
|
impl FloatTypeWrapper {
|
||||||
pub fn new(value: f64) -> Self {
|
pub fn new(value: f64) -> Self {
|
||||||
Self(value.to_bits())
|
Self(value.to_bits())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn into_f64(self) -> f64 {
|
||||||
|
f64::from_bits(self.0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn into_f32(self) -> f32 {
|
||||||
|
f64::from_bits(self.0) as f32
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for FloatTypeWrapper {
|
impl fmt::Display for FloatTypeWrapper {
|
||||||
|
@ -101,6 +112,26 @@ pub enum Expr {
|
||||||
tail: Option<ExprId>,
|
tail: Option<ExprId>,
|
||||||
label: Option<LabelId>,
|
label: Option<LabelId>,
|
||||||
},
|
},
|
||||||
|
TryBlock {
|
||||||
|
id: BlockId,
|
||||||
|
statements: Box<[Statement]>,
|
||||||
|
tail: Option<ExprId>,
|
||||||
|
},
|
||||||
|
Async {
|
||||||
|
id: BlockId,
|
||||||
|
statements: Box<[Statement]>,
|
||||||
|
tail: Option<ExprId>,
|
||||||
|
},
|
||||||
|
Const {
|
||||||
|
id: BlockId,
|
||||||
|
statements: Box<[Statement]>,
|
||||||
|
tail: Option<ExprId>,
|
||||||
|
},
|
||||||
|
Unsafe {
|
||||||
|
id: BlockId,
|
||||||
|
statements: Box<[Statement]>,
|
||||||
|
tail: Option<ExprId>,
|
||||||
|
},
|
||||||
Loop {
|
Loop {
|
||||||
body: ExprId,
|
body: ExprId,
|
||||||
label: Option<LabelId>,
|
label: Option<LabelId>,
|
||||||
|
@ -164,15 +195,6 @@ pub enum Expr {
|
||||||
Try {
|
Try {
|
||||||
expr: ExprId,
|
expr: ExprId,
|
||||||
},
|
},
|
||||||
TryBlock {
|
|
||||||
body: ExprId,
|
|
||||||
},
|
|
||||||
Async {
|
|
||||||
body: ExprId,
|
|
||||||
},
|
|
||||||
Const {
|
|
||||||
body: ExprId,
|
|
||||||
},
|
|
||||||
Cast {
|
Cast {
|
||||||
expr: ExprId,
|
expr: ExprId,
|
||||||
type_ref: Interned<TypeRef>,
|
type_ref: Interned<TypeRef>,
|
||||||
|
@ -214,9 +236,6 @@ pub enum Expr {
|
||||||
exprs: Box<[ExprId]>,
|
exprs: Box<[ExprId]>,
|
||||||
is_assignee_expr: bool,
|
is_assignee_expr: bool,
|
||||||
},
|
},
|
||||||
Unsafe {
|
|
||||||
body: ExprId,
|
|
||||||
},
|
|
||||||
Array(Array),
|
Array(Array),
|
||||||
Literal(Literal),
|
Literal(Literal),
|
||||||
Underscore,
|
Underscore,
|
||||||
|
@ -282,13 +301,20 @@ impl Expr {
|
||||||
Expr::Let { expr, .. } => {
|
Expr::Let { expr, .. } => {
|
||||||
f(*expr);
|
f(*expr);
|
||||||
}
|
}
|
||||||
Expr::Block { statements, tail, .. } => {
|
Expr::Block { statements, tail, .. }
|
||||||
|
| Expr::TryBlock { statements, tail, .. }
|
||||||
|
| Expr::Unsafe { statements, tail, .. }
|
||||||
|
| Expr::Async { statements, tail, .. }
|
||||||
|
| Expr::Const { statements, tail, .. } => {
|
||||||
for stmt in statements.iter() {
|
for stmt in statements.iter() {
|
||||||
match stmt {
|
match stmt {
|
||||||
Statement::Let { initializer, .. } => {
|
Statement::Let { initializer, else_branch, .. } => {
|
||||||
if let &Some(expr) = initializer {
|
if let &Some(expr) = initializer {
|
||||||
f(expr);
|
f(expr);
|
||||||
}
|
}
|
||||||
|
if let &Some(expr) = else_branch {
|
||||||
|
f(expr);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Statement::Expr { expr: expression, .. } => f(*expression),
|
Statement::Expr { expr: expression, .. } => f(*expression),
|
||||||
}
|
}
|
||||||
|
@ -297,10 +323,6 @@ impl Expr {
|
||||||
f(expr);
|
f(expr);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Expr::TryBlock { body }
|
|
||||||
| Expr::Unsafe { body }
|
|
||||||
| Expr::Async { body }
|
|
||||||
| Expr::Const { body } => f(*body),
|
|
||||||
Expr::Loop { body, .. } => f(*body),
|
Expr::Loop { body, .. } => f(*body),
|
||||||
Expr::While { condition, body, .. } => {
|
Expr::While { condition, body, .. } => {
|
||||||
f(*condition);
|
f(*condition);
|
||||||
|
@ -414,6 +436,13 @@ impl BindingAnnotation {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
|
pub struct Binding {
|
||||||
|
pub name: Name,
|
||||||
|
pub mode: BindingAnnotation,
|
||||||
|
pub definitions: SmallVec<[PatId; 1]>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
pub struct RecordFieldPat {
|
pub struct RecordFieldPat {
|
||||||
pub name: Name,
|
pub name: Name,
|
||||||
|
@ -432,7 +461,7 @@ pub enum Pat {
|
||||||
Slice { prefix: Box<[PatId]>, slice: Option<PatId>, suffix: Box<[PatId]> },
|
Slice { prefix: Box<[PatId]>, slice: Option<PatId>, suffix: Box<[PatId]> },
|
||||||
Path(Box<Path>),
|
Path(Box<Path>),
|
||||||
Lit(ExprId),
|
Lit(ExprId),
|
||||||
Bind { mode: BindingAnnotation, name: Name, subpat: Option<PatId> },
|
Bind { id: BindingId, subpat: Option<PatId> },
|
||||||
TupleStruct { path: Option<Box<Path>>, args: Box<[PatId]>, ellipsis: Option<usize> },
|
TupleStruct { path: Option<Box<Path>>, args: Box<[PatId]>, ellipsis: Option<usize> },
|
||||||
Ref { pat: PatId, mutability: Mutability },
|
Ref { pat: PatId, mutability: Mutability },
|
||||||
Box { inner: PatId },
|
Box { inner: PatId },
|
||||||
|
|
|
@ -187,6 +187,7 @@ impl GenericParams {
|
||||||
GenericDefId::AdtId(AdtId::EnumId(id)) => id_to_generics!(id),
|
GenericDefId::AdtId(AdtId::EnumId(id)) => id_to_generics!(id),
|
||||||
GenericDefId::AdtId(AdtId::UnionId(id)) => id_to_generics!(id),
|
GenericDefId::AdtId(AdtId::UnionId(id)) => id_to_generics!(id),
|
||||||
GenericDefId::TraitId(id) => id_to_generics!(id),
|
GenericDefId::TraitId(id) => id_to_generics!(id),
|
||||||
|
GenericDefId::TraitAliasId(id) => id_to_generics!(id),
|
||||||
GenericDefId::TypeAliasId(id) => id_to_generics!(id),
|
GenericDefId::TypeAliasId(id) => id_to_generics!(id),
|
||||||
GenericDefId::ImplId(id) => id_to_generics!(id),
|
GenericDefId::ImplId(id) => id_to_generics!(id),
|
||||||
GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => {
|
GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => {
|
||||||
|
@ -207,12 +208,10 @@ impl GenericParams {
|
||||||
pub(crate) fn fill_bounds(
|
pub(crate) fn fill_bounds(
|
||||||
&mut self,
|
&mut self,
|
||||||
lower_ctx: &LowerCtx<'_>,
|
lower_ctx: &LowerCtx<'_>,
|
||||||
node: &dyn ast::HasTypeBounds,
|
type_bounds: Option<ast::TypeBoundList>,
|
||||||
target: Either<TypeRef, LifetimeRef>,
|
target: Either<TypeRef, LifetimeRef>,
|
||||||
) {
|
) {
|
||||||
for bound in
|
for bound in type_bounds.iter().flat_map(|type_bound_list| type_bound_list.bounds()) {
|
||||||
node.type_bound_list().iter().flat_map(|type_bound_list| type_bound_list.bounds())
|
|
||||||
{
|
|
||||||
self.add_where_predicate_from_bound(lower_ctx, bound, None, target.clone());
|
self.add_where_predicate_from_bound(lower_ctx, bound, None, target.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -233,7 +232,11 @@ impl GenericParams {
|
||||||
};
|
};
|
||||||
self.type_or_consts.alloc(param.into());
|
self.type_or_consts.alloc(param.into());
|
||||||
let type_ref = TypeRef::Path(name.into());
|
let type_ref = TypeRef::Path(name.into());
|
||||||
self.fill_bounds(lower_ctx, &type_param, Either::Left(type_ref));
|
self.fill_bounds(
|
||||||
|
lower_ctx,
|
||||||
|
type_param.type_bound_list(),
|
||||||
|
Either::Left(type_ref),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
ast::TypeOrConstParam::Const(const_param) => {
|
ast::TypeOrConstParam::Const(const_param) => {
|
||||||
let name = const_param.name().map_or_else(Name::missing, |it| it.as_name());
|
let name = const_param.name().map_or_else(Name::missing, |it| it.as_name());
|
||||||
|
@ -255,7 +258,11 @@ impl GenericParams {
|
||||||
let param = LifetimeParamData { name: name.clone() };
|
let param = LifetimeParamData { name: name.clone() };
|
||||||
self.lifetimes.alloc(param);
|
self.lifetimes.alloc(param);
|
||||||
let lifetime_ref = LifetimeRef::new_name(name);
|
let lifetime_ref = LifetimeRef::new_name(name);
|
||||||
self.fill_bounds(lower_ctx, &lifetime_param, Either::Right(lifetime_ref));
|
self.fill_bounds(
|
||||||
|
lower_ctx,
|
||||||
|
lifetime_param.type_bound_list(),
|
||||||
|
Either::Right(lifetime_ref),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -421,6 +428,10 @@ fn file_id_and_params_of(
|
||||||
let src = it.lookup(db).source(db);
|
let src = it.lookup(db).source(db);
|
||||||
(src.file_id, src.value.generic_param_list())
|
(src.file_id, src.value.generic_param_list())
|
||||||
}
|
}
|
||||||
|
GenericDefId::TraitAliasId(it) => {
|
||||||
|
let src = it.lookup(db).source(db);
|
||||||
|
(src.file_id, src.value.generic_param_list())
|
||||||
|
}
|
||||||
GenericDefId::TypeAliasId(it) => {
|
GenericDefId::TypeAliasId(it) => {
|
||||||
let src = it.lookup(db).source(db);
|
let src = it.lookup(db).source(db);
|
||||||
(src.file_id, src.value.generic_param_list())
|
(src.file_id, src.value.generic_param_list())
|
||||||
|
@ -435,7 +446,7 @@ fn file_id_and_params_of(
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HasChildSource<LocalTypeOrConstParamId> for GenericDefId {
|
impl HasChildSource<LocalTypeOrConstParamId> for GenericDefId {
|
||||||
type Value = Either<ast::TypeOrConstParam, ast::Trait>;
|
type Value = Either<ast::TypeOrConstParam, ast::TraitOrAlias>;
|
||||||
fn child_source(
|
fn child_source(
|
||||||
&self,
|
&self,
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
|
@ -447,11 +458,20 @@ impl HasChildSource<LocalTypeOrConstParamId> for GenericDefId {
|
||||||
|
|
||||||
let mut params = ArenaMap::default();
|
let mut params = ArenaMap::default();
|
||||||
|
|
||||||
// For traits the first type index is `Self`, we need to add it before the other params.
|
// For traits and trait aliases the first type index is `Self`, we need to add it before
|
||||||
if let GenericDefId::TraitId(id) = *self {
|
// the other params.
|
||||||
let trait_ref = id.lookup(db).source(db).value;
|
match *self {
|
||||||
let idx = idx_iter.next().unwrap();
|
GenericDefId::TraitId(id) => {
|
||||||
params.insert(idx, Either::Right(trait_ref));
|
let trait_ref = id.lookup(db).source(db).value;
|
||||||
|
let idx = idx_iter.next().unwrap();
|
||||||
|
params.insert(idx, Either::Right(ast::TraitOrAlias::Trait(trait_ref)));
|
||||||
|
}
|
||||||
|
GenericDefId::TraitAliasId(id) => {
|
||||||
|
let alias = id.lookup(db).source(db).value;
|
||||||
|
let idx = idx_iter.next().unwrap();
|
||||||
|
params.insert(idx, Either::Right(ast::TraitOrAlias::TraitAlias(alias)));
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(generic_params_list) = generic_params_list {
|
if let Some(generic_params_list) = generic_params_list {
|
||||||
|
|
|
@ -264,6 +264,7 @@ pub enum ImportKind {
|
||||||
Const,
|
Const,
|
||||||
Static,
|
Static,
|
||||||
Trait,
|
Trait,
|
||||||
|
TraitAlias,
|
||||||
TypeAlias,
|
TypeAlias,
|
||||||
BuiltinType,
|
BuiltinType,
|
||||||
AssociatedItem,
|
AssociatedItem,
|
||||||
|
@ -459,6 +460,7 @@ fn item_import_kind(item: ItemInNs) -> Option<ImportKind> {
|
||||||
ModuleDefId::ConstId(_) => ImportKind::Const,
|
ModuleDefId::ConstId(_) => ImportKind::Const,
|
||||||
ModuleDefId::StaticId(_) => ImportKind::Static,
|
ModuleDefId::StaticId(_) => ImportKind::Static,
|
||||||
ModuleDefId::TraitId(_) => ImportKind::Trait,
|
ModuleDefId::TraitId(_) => ImportKind::Trait,
|
||||||
|
ModuleDefId::TraitAliasId(_) => ImportKind::TraitAlias,
|
||||||
ModuleDefId::TypeAliasId(_) => ImportKind::TypeAlias,
|
ModuleDefId::TypeAliasId(_) => ImportKind::TypeAlias,
|
||||||
ModuleDefId::BuiltinType(_) => ImportKind::BuiltinType,
|
ModuleDefId::BuiltinType(_) => ImportKind::BuiltinType,
|
||||||
ModuleDefId::MacroId(_) => ImportKind::Macro,
|
ModuleDefId::MacroId(_) => ImportKind::Macro,
|
||||||
|
|
|
@ -431,6 +431,7 @@ impl PerNs {
|
||||||
ModuleDefId::EnumVariantId(_) => PerNs::both(def, def, v),
|
ModuleDefId::EnumVariantId(_) => PerNs::both(def, def, v),
|
||||||
ModuleDefId::ConstId(_) | ModuleDefId::StaticId(_) => PerNs::values(def, v),
|
ModuleDefId::ConstId(_) | ModuleDefId::StaticId(_) => PerNs::values(def, v),
|
||||||
ModuleDefId::TraitId(_) => PerNs::types(def, v),
|
ModuleDefId::TraitId(_) => PerNs::types(def, v),
|
||||||
|
ModuleDefId::TraitAliasId(_) => PerNs::types(def, v),
|
||||||
ModuleDefId::TypeAliasId(_) => PerNs::types(def, v),
|
ModuleDefId::TypeAliasId(_) => PerNs::types(def, v),
|
||||||
ModuleDefId::BuiltinType(_) => PerNs::types(def, v),
|
ModuleDefId::BuiltinType(_) => PerNs::types(def, v),
|
||||||
ModuleDefId::MacroId(mac) => PerNs::macros(mac, v),
|
ModuleDefId::MacroId(mac) => PerNs::macros(mac, v),
|
||||||
|
|
|
@ -204,6 +204,7 @@ impl ItemTree {
|
||||||
consts,
|
consts,
|
||||||
statics,
|
statics,
|
||||||
traits,
|
traits,
|
||||||
|
trait_aliases,
|
||||||
impls,
|
impls,
|
||||||
type_aliases,
|
type_aliases,
|
||||||
mods,
|
mods,
|
||||||
|
@ -226,6 +227,7 @@ impl ItemTree {
|
||||||
consts.shrink_to_fit();
|
consts.shrink_to_fit();
|
||||||
statics.shrink_to_fit();
|
statics.shrink_to_fit();
|
||||||
traits.shrink_to_fit();
|
traits.shrink_to_fit();
|
||||||
|
trait_aliases.shrink_to_fit();
|
||||||
impls.shrink_to_fit();
|
impls.shrink_to_fit();
|
||||||
type_aliases.shrink_to_fit();
|
type_aliases.shrink_to_fit();
|
||||||
mods.shrink_to_fit();
|
mods.shrink_to_fit();
|
||||||
|
@ -276,6 +278,7 @@ struct ItemTreeData {
|
||||||
consts: Arena<Const>,
|
consts: Arena<Const>,
|
||||||
statics: Arena<Static>,
|
statics: Arena<Static>,
|
||||||
traits: Arena<Trait>,
|
traits: Arena<Trait>,
|
||||||
|
trait_aliases: Arena<TraitAlias>,
|
||||||
impls: Arena<Impl>,
|
impls: Arena<Impl>,
|
||||||
type_aliases: Arena<TypeAlias>,
|
type_aliases: Arena<TypeAlias>,
|
||||||
mods: Arena<Mod>,
|
mods: Arena<Mod>,
|
||||||
|
@ -496,6 +499,7 @@ mod_items! {
|
||||||
Const in consts -> ast::Const,
|
Const in consts -> ast::Const,
|
||||||
Static in statics -> ast::Static,
|
Static in statics -> ast::Static,
|
||||||
Trait in traits -> ast::Trait,
|
Trait in traits -> ast::Trait,
|
||||||
|
TraitAlias in trait_aliases -> ast::TraitAlias,
|
||||||
Impl in impls -> ast::Impl,
|
Impl in impls -> ast::Impl,
|
||||||
TypeAlias in type_aliases -> ast::TypeAlias,
|
TypeAlias in type_aliases -> ast::TypeAlias,
|
||||||
Mod in mods -> ast::Module,
|
Mod in mods -> ast::Module,
|
||||||
|
@ -672,11 +676,18 @@ pub struct Trait {
|
||||||
pub generic_params: Interned<GenericParams>,
|
pub generic_params: Interned<GenericParams>,
|
||||||
pub is_auto: bool,
|
pub is_auto: bool,
|
||||||
pub is_unsafe: bool,
|
pub is_unsafe: bool,
|
||||||
/// This is [`None`] if this Trait is a trait alias.
|
pub items: Box<[AssocItem]>,
|
||||||
pub items: Option<Box<[AssocItem]>>,
|
|
||||||
pub ast_id: FileAstId<ast::Trait>,
|
pub ast_id: FileAstId<ast::Trait>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
|
pub struct TraitAlias {
|
||||||
|
pub name: Name,
|
||||||
|
pub visibility: RawVisibilityId,
|
||||||
|
pub generic_params: Interned<GenericParams>,
|
||||||
|
pub ast_id: FileAstId<ast::TraitAlias>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
pub struct Impl {
|
pub struct Impl {
|
||||||
pub generic_params: Interned<GenericParams>,
|
pub generic_params: Interned<GenericParams>,
|
||||||
|
@ -872,6 +883,7 @@ impl ModItem {
|
||||||
| ModItem::Enum(_)
|
| ModItem::Enum(_)
|
||||||
| ModItem::Static(_)
|
| ModItem::Static(_)
|
||||||
| ModItem::Trait(_)
|
| ModItem::Trait(_)
|
||||||
|
| ModItem::TraitAlias(_)
|
||||||
| ModItem::Impl(_)
|
| ModItem::Impl(_)
|
||||||
| ModItem::Mod(_)
|
| ModItem::Mod(_)
|
||||||
| ModItem::MacroRules(_)
|
| ModItem::MacroRules(_)
|
||||||
|
@ -899,6 +911,7 @@ impl ModItem {
|
||||||
ModItem::Const(it) => tree[it.index].ast_id().upcast(),
|
ModItem::Const(it) => tree[it.index].ast_id().upcast(),
|
||||||
ModItem::Static(it) => tree[it.index].ast_id().upcast(),
|
ModItem::Static(it) => tree[it.index].ast_id().upcast(),
|
||||||
ModItem::Trait(it) => tree[it.index].ast_id().upcast(),
|
ModItem::Trait(it) => tree[it.index].ast_id().upcast(),
|
||||||
|
ModItem::TraitAlias(it) => tree[it.index].ast_id().upcast(),
|
||||||
ModItem::Impl(it) => tree[it.index].ast_id().upcast(),
|
ModItem::Impl(it) => tree[it.index].ast_id().upcast(),
|
||||||
ModItem::TypeAlias(it) => tree[it.index].ast_id().upcast(),
|
ModItem::TypeAlias(it) => tree[it.index].ast_id().upcast(),
|
||||||
ModItem::Mod(it) => tree[it.index].ast_id().upcast(),
|
ModItem::Mod(it) => tree[it.index].ast_id().upcast(),
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
use std::{collections::hash_map::Entry, sync::Arc};
|
use std::{collections::hash_map::Entry, sync::Arc};
|
||||||
|
|
||||||
use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId};
|
use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId};
|
||||||
use syntax::ast::{self, HasModuleItem};
|
use syntax::ast::{self, HasModuleItem, HasTypeBounds};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
generics::{GenericParams, TypeParamData, TypeParamProvenance},
|
generics::{GenericParams, TypeParamData, TypeParamProvenance},
|
||||||
|
@ -90,6 +90,13 @@ impl<'a> Ctx<'a> {
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
if let Some(ast::Expr::MacroExpr(expr)) = block.tail_expr() {
|
||||||
|
if let Some(call) = expr.macro_call() {
|
||||||
|
if let Some(mod_item) = self.lower_mod_item(&call.into()) {
|
||||||
|
self.tree.top_level.push(mod_item);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
self.tree
|
self.tree
|
||||||
}
|
}
|
||||||
|
@ -110,6 +117,7 @@ impl<'a> Ctx<'a> {
|
||||||
ast::Item::Const(ast) => self.lower_const(ast).into(),
|
ast::Item::Const(ast) => self.lower_const(ast).into(),
|
||||||
ast::Item::Module(ast) => self.lower_module(ast)?.into(),
|
ast::Item::Module(ast) => self.lower_module(ast)?.into(),
|
||||||
ast::Item::Trait(ast) => self.lower_trait(ast)?.into(),
|
ast::Item::Trait(ast) => self.lower_trait(ast)?.into(),
|
||||||
|
ast::Item::TraitAlias(ast) => self.lower_trait_alias(ast)?.into(),
|
||||||
ast::Item::Impl(ast) => self.lower_impl(ast)?.into(),
|
ast::Item::Impl(ast) => self.lower_impl(ast)?.into(),
|
||||||
ast::Item::Use(ast) => self.lower_use(ast)?.into(),
|
ast::Item::Use(ast) => self.lower_use(ast)?.into(),
|
||||||
ast::Item::ExternCrate(ast) => self.lower_extern_crate(ast)?.into(),
|
ast::Item::ExternCrate(ast) => self.lower_extern_crate(ast)?.into(),
|
||||||
|
@ -147,7 +155,7 @@ impl<'a> Ctx<'a> {
|
||||||
fn lower_struct(&mut self, strukt: &ast::Struct) -> Option<FileItemTreeId<Struct>> {
|
fn lower_struct(&mut self, strukt: &ast::Struct) -> Option<FileItemTreeId<Struct>> {
|
||||||
let visibility = self.lower_visibility(strukt);
|
let visibility = self.lower_visibility(strukt);
|
||||||
let name = strukt.name()?.as_name();
|
let name = strukt.name()?.as_name();
|
||||||
let generic_params = self.lower_generic_params(GenericsOwner::Struct, strukt);
|
let generic_params = self.lower_generic_params(HasImplicitSelf::No, strukt);
|
||||||
let fields = self.lower_fields(&strukt.kind());
|
let fields = self.lower_fields(&strukt.kind());
|
||||||
let ast_id = self.source_ast_id_map.ast_id(strukt);
|
let ast_id = self.source_ast_id_map.ast_id(strukt);
|
||||||
let res = Struct { name, visibility, generic_params, fields, ast_id };
|
let res = Struct { name, visibility, generic_params, fields, ast_id };
|
||||||
|
@ -211,7 +219,7 @@ impl<'a> Ctx<'a> {
|
||||||
fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> {
|
fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> {
|
||||||
let visibility = self.lower_visibility(union);
|
let visibility = self.lower_visibility(union);
|
||||||
let name = union.name()?.as_name();
|
let name = union.name()?.as_name();
|
||||||
let generic_params = self.lower_generic_params(GenericsOwner::Union, union);
|
let generic_params = self.lower_generic_params(HasImplicitSelf::No, union);
|
||||||
let fields = match union.record_field_list() {
|
let fields = match union.record_field_list() {
|
||||||
Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)),
|
Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)),
|
||||||
None => Fields::Record(IdxRange::new(self.next_field_idx()..self.next_field_idx())),
|
None => Fields::Record(IdxRange::new(self.next_field_idx()..self.next_field_idx())),
|
||||||
|
@ -224,7 +232,7 @@ impl<'a> Ctx<'a> {
|
||||||
fn lower_enum(&mut self, enum_: &ast::Enum) -> Option<FileItemTreeId<Enum>> {
|
fn lower_enum(&mut self, enum_: &ast::Enum) -> Option<FileItemTreeId<Enum>> {
|
||||||
let visibility = self.lower_visibility(enum_);
|
let visibility = self.lower_visibility(enum_);
|
||||||
let name = enum_.name()?.as_name();
|
let name = enum_.name()?.as_name();
|
||||||
let generic_params = self.lower_generic_params(GenericsOwner::Enum, enum_);
|
let generic_params = self.lower_generic_params(HasImplicitSelf::No, enum_);
|
||||||
let variants = match &enum_.variant_list() {
|
let variants = match &enum_.variant_list() {
|
||||||
Some(variant_list) => self.lower_variants(variant_list),
|
Some(variant_list) => self.lower_variants(variant_list),
|
||||||
None => IdxRange::new(self.next_variant_idx()..self.next_variant_idx()),
|
None => IdxRange::new(self.next_variant_idx()..self.next_variant_idx()),
|
||||||
|
@ -372,8 +380,7 @@ impl<'a> Ctx<'a> {
|
||||||
ast_id,
|
ast_id,
|
||||||
flags,
|
flags,
|
||||||
};
|
};
|
||||||
res.explicit_generic_params =
|
res.explicit_generic_params = self.lower_generic_params(HasImplicitSelf::No, func);
|
||||||
self.lower_generic_params(GenericsOwner::Function(&res), func);
|
|
||||||
|
|
||||||
Some(id(self.data().functions.alloc(res)))
|
Some(id(self.data().functions.alloc(res)))
|
||||||
}
|
}
|
||||||
|
@ -386,7 +393,7 @@ impl<'a> Ctx<'a> {
|
||||||
let type_ref = type_alias.ty().map(|it| self.lower_type_ref(&it));
|
let type_ref = type_alias.ty().map(|it| self.lower_type_ref(&it));
|
||||||
let visibility = self.lower_visibility(type_alias);
|
let visibility = self.lower_visibility(type_alias);
|
||||||
let bounds = self.lower_type_bounds(type_alias);
|
let bounds = self.lower_type_bounds(type_alias);
|
||||||
let generic_params = self.lower_generic_params(GenericsOwner::TypeAlias, type_alias);
|
let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias);
|
||||||
let ast_id = self.source_ast_id_map.ast_id(type_alias);
|
let ast_id = self.source_ast_id_map.ast_id(type_alias);
|
||||||
let res = TypeAlias {
|
let res = TypeAlias {
|
||||||
name,
|
name,
|
||||||
|
@ -442,27 +449,49 @@ impl<'a> Ctx<'a> {
|
||||||
fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option<FileItemTreeId<Trait>> {
|
fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option<FileItemTreeId<Trait>> {
|
||||||
let name = trait_def.name()?.as_name();
|
let name = trait_def.name()?.as_name();
|
||||||
let visibility = self.lower_visibility(trait_def);
|
let visibility = self.lower_visibility(trait_def);
|
||||||
let generic_params = self.lower_generic_params(GenericsOwner::Trait(trait_def), trait_def);
|
let generic_params =
|
||||||
|
self.lower_generic_params(HasImplicitSelf::Yes(trait_def.type_bound_list()), trait_def);
|
||||||
let is_auto = trait_def.auto_token().is_some();
|
let is_auto = trait_def.auto_token().is_some();
|
||||||
let is_unsafe = trait_def.unsafe_token().is_some();
|
let is_unsafe = trait_def.unsafe_token().is_some();
|
||||||
let items = trait_def.assoc_item_list().map(|list| {
|
|
||||||
list.assoc_items()
|
|
||||||
.filter_map(|item| {
|
|
||||||
let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
|
|
||||||
self.lower_assoc_item(&item).map(|item| {
|
|
||||||
self.add_attrs(ModItem::from(item).into(), attrs);
|
|
||||||
item
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
});
|
|
||||||
let ast_id = self.source_ast_id_map.ast_id(trait_def);
|
let ast_id = self.source_ast_id_map.ast_id(trait_def);
|
||||||
let res = Trait { name, visibility, generic_params, is_auto, is_unsafe, items, ast_id };
|
|
||||||
Some(id(self.data().traits.alloc(res)))
|
let items = trait_def
|
||||||
|
.assoc_item_list()
|
||||||
|
.into_iter()
|
||||||
|
.flat_map(|list| list.assoc_items())
|
||||||
|
.filter_map(|item| {
|
||||||
|
let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
|
||||||
|
self.lower_assoc_item(&item).map(|item| {
|
||||||
|
self.add_attrs(ModItem::from(item).into(), attrs);
|
||||||
|
item
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let def = Trait { name, visibility, generic_params, is_auto, is_unsafe, items, ast_id };
|
||||||
|
Some(id(self.data().traits.alloc(def)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lower_trait_alias(
|
||||||
|
&mut self,
|
||||||
|
trait_alias_def: &ast::TraitAlias,
|
||||||
|
) -> Option<FileItemTreeId<TraitAlias>> {
|
||||||
|
let name = trait_alias_def.name()?.as_name();
|
||||||
|
let visibility = self.lower_visibility(trait_alias_def);
|
||||||
|
let generic_params = self.lower_generic_params(
|
||||||
|
HasImplicitSelf::Yes(trait_alias_def.type_bound_list()),
|
||||||
|
trait_alias_def,
|
||||||
|
);
|
||||||
|
let ast_id = self.source_ast_id_map.ast_id(trait_alias_def);
|
||||||
|
|
||||||
|
let alias = TraitAlias { name, visibility, generic_params, ast_id };
|
||||||
|
Some(id(self.data().trait_aliases.alloc(alias)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lower_impl(&mut self, impl_def: &ast::Impl) -> Option<FileItemTreeId<Impl>> {
|
fn lower_impl(&mut self, impl_def: &ast::Impl) -> Option<FileItemTreeId<Impl>> {
|
||||||
let generic_params = self.lower_generic_params(GenericsOwner::Impl, impl_def);
|
// Note that trait impls don't get implicit `Self` unlike traits, because here they are a
|
||||||
|
// type alias rather than a type parameter, so this is handled by the resolver.
|
||||||
|
let generic_params = self.lower_generic_params(HasImplicitSelf::No, impl_def);
|
||||||
// FIXME: If trait lowering fails, due to a non PathType for example, we treat this impl
|
// FIXME: If trait lowering fails, due to a non PathType for example, we treat this impl
|
||||||
// as if it was an non-trait impl. Ideally we want to create a unique missing ref that only
|
// as if it was an non-trait impl. Ideally we want to create a unique missing ref that only
|
||||||
// equals itself.
|
// equals itself.
|
||||||
|
@ -566,42 +595,29 @@ impl<'a> Ctx<'a> {
|
||||||
|
|
||||||
fn lower_generic_params(
|
fn lower_generic_params(
|
||||||
&mut self,
|
&mut self,
|
||||||
owner: GenericsOwner<'_>,
|
has_implicit_self: HasImplicitSelf,
|
||||||
node: &dyn ast::HasGenericParams,
|
node: &dyn ast::HasGenericParams,
|
||||||
) -> Interned<GenericParams> {
|
) -> Interned<GenericParams> {
|
||||||
let mut generics = GenericParams::default();
|
let mut generics = GenericParams::default();
|
||||||
match owner {
|
|
||||||
GenericsOwner::Function(_)
|
if let HasImplicitSelf::Yes(bounds) = has_implicit_self {
|
||||||
| GenericsOwner::Struct
|
// Traits and trait aliases get the Self type as an implicit first type parameter.
|
||||||
| GenericsOwner::Enum
|
generics.type_or_consts.alloc(
|
||||||
| GenericsOwner::Union
|
TypeParamData {
|
||||||
| GenericsOwner::TypeAlias => {
|
name: Some(name![Self]),
|
||||||
generics.fill(&self.body_ctx, node);
|
default: None,
|
||||||
}
|
provenance: TypeParamProvenance::TraitSelf,
|
||||||
GenericsOwner::Trait(trait_def) => {
|
}
|
||||||
// traits get the Self type as an implicit first type parameter
|
.into(),
|
||||||
generics.type_or_consts.alloc(
|
);
|
||||||
TypeParamData {
|
// add super traits as bounds on Self
|
||||||
name: Some(name![Self]),
|
// i.e., `trait Foo: Bar` is equivalent to `trait Foo where Self: Bar`
|
||||||
default: None,
|
let self_param = TypeRef::Path(name![Self].into());
|
||||||
provenance: TypeParamProvenance::TraitSelf,
|
generics.fill_bounds(&self.body_ctx, bounds, Either::Left(self_param));
|
||||||
}
|
|
||||||
.into(),
|
|
||||||
);
|
|
||||||
// add super traits as bounds on Self
|
|
||||||
// i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar
|
|
||||||
let self_param = TypeRef::Path(name![Self].into());
|
|
||||||
generics.fill_bounds(&self.body_ctx, trait_def, Either::Left(self_param));
|
|
||||||
generics.fill(&self.body_ctx, node);
|
|
||||||
}
|
|
||||||
GenericsOwner::Impl => {
|
|
||||||
// Note that we don't add `Self` here: in `impl`s, `Self` is not a
|
|
||||||
// type-parameter, but rather is a type-alias for impl's target
|
|
||||||
// type, so this is handled by the resolver.
|
|
||||||
generics.fill(&self.body_ctx, node);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
generics.fill(&self.body_ctx, node);
|
||||||
|
|
||||||
generics.shrink_to_fit();
|
generics.shrink_to_fit();
|
||||||
Interned::new(generics)
|
Interned::new(generics)
|
||||||
}
|
}
|
||||||
|
@ -673,17 +689,10 @@ fn desugar_future_path(orig: TypeRef) -> Path {
|
||||||
Path::from_known_path(path, generic_args)
|
Path::from_known_path(path, generic_args)
|
||||||
}
|
}
|
||||||
|
|
||||||
enum GenericsOwner<'a> {
|
enum HasImplicitSelf {
|
||||||
/// We need access to the partially-lowered `Function` for lowering `impl Trait` in argument
|
/// Inner list is a type bound list for the implicit `Self`.
|
||||||
/// position.
|
Yes(Option<ast::TypeBoundList>),
|
||||||
Function(&'a Function),
|
No,
|
||||||
Struct,
|
|
||||||
Enum,
|
|
||||||
Union,
|
|
||||||
/// The `TraitDef` is needed to fill the source map for the implicit `Self` parameter.
|
|
||||||
Trait(&'a ast::Trait),
|
|
||||||
TypeAlias,
|
|
||||||
Impl,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lower_abi(abi: ast::Abi) -> Interned<str> {
|
fn lower_abi(abi: ast::Abi) -> Interned<str> {
|
||||||
|
|
|
@ -374,23 +374,24 @@ impl<'a> Printer<'a> {
|
||||||
}
|
}
|
||||||
w!(self, "trait {}", name);
|
w!(self, "trait {}", name);
|
||||||
self.print_generic_params(generic_params);
|
self.print_generic_params(generic_params);
|
||||||
match items {
|
self.print_where_clause_and_opening_brace(generic_params);
|
||||||
Some(items) => {
|
self.indented(|this| {
|
||||||
self.print_where_clause_and_opening_brace(generic_params);
|
for item in &**items {
|
||||||
self.indented(|this| {
|
this.print_mod_item((*item).into());
|
||||||
for item in &**items {
|
|
||||||
this.print_mod_item((*item).into());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
None => {
|
});
|
||||||
w!(self, " = ");
|
|
||||||
// FIXME: Print the aliased traits
|
|
||||||
self.print_where_clause_and_opening_brace(generic_params);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
wln!(self, "}}");
|
wln!(self, "}}");
|
||||||
}
|
}
|
||||||
|
ModItem::TraitAlias(it) => {
|
||||||
|
let TraitAlias { name, visibility, generic_params, ast_id: _ } = &self.tree[it];
|
||||||
|
self.print_visibility(*visibility);
|
||||||
|
w!(self, "trait {}", name);
|
||||||
|
self.print_generic_params(generic_params);
|
||||||
|
w!(self, " = ");
|
||||||
|
self.print_where_clause(generic_params);
|
||||||
|
w!(self, ";");
|
||||||
|
wln!(self);
|
||||||
|
}
|
||||||
ModItem::Impl(it) => {
|
ModItem::Impl(it) => {
|
||||||
let Impl { target_trait, self_ty, is_negative, items, generic_params, ast_id: _ } =
|
let Impl { target_trait, self_ty, is_negative, items, generic_params, ast_id: _ } =
|
||||||
&self.tree[it];
|
&self.tree[it];
|
||||||
|
|
|
@ -9,8 +9,8 @@ use syntax::{ast, AstNode, AstPtr};
|
||||||
use crate::{
|
use crate::{
|
||||||
dyn_map::{DynMap, Policy},
|
dyn_map::{DynMap, Policy},
|
||||||
ConstId, EnumId, EnumVariantId, FieldId, FunctionId, ImplId, LifetimeParamId, Macro2Id,
|
ConstId, EnumId, EnumVariantId, FieldId, FunctionId, ImplId, LifetimeParamId, Macro2Id,
|
||||||
MacroRulesId, ProcMacroId, StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId,
|
MacroRulesId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId,
|
||||||
UnionId,
|
TypeOrConstParamId, UnionId,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub type Key<K, V> = crate::dyn_map::Key<K, V, AstPtrPolicy<K, V>>;
|
pub type Key<K, V> = crate::dyn_map::Key<K, V, AstPtrPolicy<K, V>>;
|
||||||
|
@ -21,6 +21,7 @@ pub const STATIC: Key<ast::Static, StaticId> = Key::new();
|
||||||
pub const TYPE_ALIAS: Key<ast::TypeAlias, TypeAliasId> = Key::new();
|
pub const TYPE_ALIAS: Key<ast::TypeAlias, TypeAliasId> = Key::new();
|
||||||
pub const IMPL: Key<ast::Impl, ImplId> = Key::new();
|
pub const IMPL: Key<ast::Impl, ImplId> = Key::new();
|
||||||
pub const TRAIT: Key<ast::Trait, TraitId> = Key::new();
|
pub const TRAIT: Key<ast::Trait, TraitId> = Key::new();
|
||||||
|
pub const TRAIT_ALIAS: Key<ast::TraitAlias, TraitAliasId> = Key::new();
|
||||||
pub const STRUCT: Key<ast::Struct, StructId> = Key::new();
|
pub const STRUCT: Key<ast::Struct, StructId> = Key::new();
|
||||||
pub const UNION: Key<ast::Union, UnionId> = Key::new();
|
pub const UNION: Key<ast::Union, UnionId> = Key::new();
|
||||||
pub const ENUM: Key<ast::Enum, EnumId> = Key::new();
|
pub const ENUM: Key<ast::Enum, EnumId> = Key::new();
|
||||||
|
|
|
@ -181,15 +181,15 @@ impl LangItems {
|
||||||
T: Into<AttrDefId> + Copy,
|
T: Into<AttrDefId> + Copy,
|
||||||
{
|
{
|
||||||
let _p = profile::span("collect_lang_item");
|
let _p = profile::span("collect_lang_item");
|
||||||
if let Some(lang_item) = lang_attr(db, item).and_then(|it| LangItem::from_str(&it)) {
|
if let Some(lang_item) = lang_attr(db, item) {
|
||||||
self.items.entry(lang_item).or_insert_with(|| constructor(item));
|
self.items.entry(lang_item).or_insert_with(|| constructor(item));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn lang_attr(db: &dyn DefDatabase, item: impl Into<AttrDefId> + Copy) -> Option<SmolStr> {
|
pub fn lang_attr(db: &dyn DefDatabase, item: impl Into<AttrDefId> + Copy) -> Option<LangItem> {
|
||||||
let attrs = db.attrs(item.into());
|
let attrs = db.attrs(item.into());
|
||||||
attrs.by_key("lang").string_value().cloned()
|
attrs.by_key("lang").string_value().cloned().and_then(|it| LangItem::from_str(&it))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum GenericRequirement {
|
pub enum GenericRequirement {
|
||||||
|
|
|
@ -86,7 +86,7 @@ use crate::{
|
||||||
builtin_type::BuiltinType,
|
builtin_type::BuiltinType,
|
||||||
item_tree::{
|
item_tree::{
|
||||||
Const, Enum, Function, Impl, ItemTreeId, ItemTreeNode, MacroDef, MacroRules, ModItem,
|
Const, Enum, Function, Impl, ItemTreeId, ItemTreeNode, MacroDef, MacroRules, ModItem,
|
||||||
Static, Struct, Trait, TypeAlias, Union,
|
Static, Struct, Trait, TraitAlias, TypeAlias, Union,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -128,7 +128,7 @@ impl ModuleId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An ID of a module, **local** to a specific crate
|
/// An ID of a module, **local** to a `DefMap`.
|
||||||
pub type LocalModuleId = Idx<nameres::ModuleData>;
|
pub type LocalModuleId = Idx<nameres::ModuleData>;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -261,6 +261,11 @@ pub struct TraitId(salsa::InternId);
|
||||||
pub type TraitLoc = ItemLoc<Trait>;
|
pub type TraitLoc = ItemLoc<Trait>;
|
||||||
impl_intern!(TraitId, TraitLoc, intern_trait, lookup_intern_trait);
|
impl_intern!(TraitId, TraitLoc, intern_trait, lookup_intern_trait);
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
pub struct TraitAliasId(salsa::InternId);
|
||||||
|
pub type TraitAliasLoc = ItemLoc<TraitAlias>;
|
||||||
|
impl_intern!(TraitAliasId, TraitAliasLoc, intern_trait_alias, lookup_intern_trait_alias);
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
pub struct TypeAliasId(salsa::InternId);
|
pub struct TypeAliasId(salsa::InternId);
|
||||||
type TypeAliasLoc = AssocItemLoc<TypeAlias>;
|
type TypeAliasLoc = AssocItemLoc<TypeAlias>;
|
||||||
|
@ -453,6 +458,7 @@ pub enum ModuleDefId {
|
||||||
ConstId(ConstId),
|
ConstId(ConstId),
|
||||||
StaticId(StaticId),
|
StaticId(StaticId),
|
||||||
TraitId(TraitId),
|
TraitId(TraitId),
|
||||||
|
TraitAliasId(TraitAliasId),
|
||||||
TypeAliasId(TypeAliasId),
|
TypeAliasId(TypeAliasId),
|
||||||
BuiltinType(BuiltinType),
|
BuiltinType(BuiltinType),
|
||||||
MacroId(MacroId),
|
MacroId(MacroId),
|
||||||
|
@ -466,6 +472,7 @@ impl_from!(
|
||||||
ConstId,
|
ConstId,
|
||||||
StaticId,
|
StaticId,
|
||||||
TraitId,
|
TraitId,
|
||||||
|
TraitAliasId,
|
||||||
TypeAliasId,
|
TypeAliasId,
|
||||||
BuiltinType
|
BuiltinType
|
||||||
for ModuleDefId
|
for ModuleDefId
|
||||||
|
@ -516,6 +523,7 @@ pub enum GenericDefId {
|
||||||
FunctionId(FunctionId),
|
FunctionId(FunctionId),
|
||||||
AdtId(AdtId),
|
AdtId(AdtId),
|
||||||
TraitId(TraitId),
|
TraitId(TraitId),
|
||||||
|
TraitAliasId(TraitAliasId),
|
||||||
TypeAliasId(TypeAliasId),
|
TypeAliasId(TypeAliasId),
|
||||||
ImplId(ImplId),
|
ImplId(ImplId),
|
||||||
// enum variants cannot have generics themselves, but their parent enums
|
// enum variants cannot have generics themselves, but their parent enums
|
||||||
|
@ -528,6 +536,7 @@ impl_from!(
|
||||||
FunctionId,
|
FunctionId,
|
||||||
AdtId(StructId, EnumId, UnionId),
|
AdtId(StructId, EnumId, UnionId),
|
||||||
TraitId,
|
TraitId,
|
||||||
|
TraitAliasId,
|
||||||
TypeAliasId,
|
TypeAliasId,
|
||||||
ImplId,
|
ImplId,
|
||||||
EnumVariantId,
|
EnumVariantId,
|
||||||
|
@ -555,6 +564,7 @@ pub enum AttrDefId {
|
||||||
StaticId(StaticId),
|
StaticId(StaticId),
|
||||||
ConstId(ConstId),
|
ConstId(ConstId),
|
||||||
TraitId(TraitId),
|
TraitId(TraitId),
|
||||||
|
TraitAliasId(TraitAliasId),
|
||||||
TypeAliasId(TypeAliasId),
|
TypeAliasId(TypeAliasId),
|
||||||
MacroId(MacroId),
|
MacroId(MacroId),
|
||||||
ImplId(ImplId),
|
ImplId(ImplId),
|
||||||
|
@ -714,6 +724,7 @@ impl HasModule for GenericDefId {
|
||||||
GenericDefId::FunctionId(it) => it.lookup(db).module(db),
|
GenericDefId::FunctionId(it) => it.lookup(db).module(db),
|
||||||
GenericDefId::AdtId(it) => it.module(db),
|
GenericDefId::AdtId(it) => it.module(db),
|
||||||
GenericDefId::TraitId(it) => it.lookup(db).container,
|
GenericDefId::TraitId(it) => it.lookup(db).container,
|
||||||
|
GenericDefId::TraitAliasId(it) => it.lookup(db).container,
|
||||||
GenericDefId::TypeAliasId(it) => it.lookup(db).module(db),
|
GenericDefId::TypeAliasId(it) => it.lookup(db).module(db),
|
||||||
GenericDefId::ImplId(it) => it.lookup(db).container,
|
GenericDefId::ImplId(it) => it.lookup(db).container,
|
||||||
GenericDefId::EnumVariantId(it) => it.parent.lookup(db).container,
|
GenericDefId::EnumVariantId(it) => it.parent.lookup(db).container,
|
||||||
|
@ -747,6 +758,7 @@ impl ModuleDefId {
|
||||||
ModuleDefId::ConstId(id) => id.lookup(db).container.module(db),
|
ModuleDefId::ConstId(id) => id.lookup(db).container.module(db),
|
||||||
ModuleDefId::StaticId(id) => id.lookup(db).module(db),
|
ModuleDefId::StaticId(id) => id.lookup(db).module(db),
|
||||||
ModuleDefId::TraitId(id) => id.lookup(db).container,
|
ModuleDefId::TraitId(id) => id.lookup(db).container,
|
||||||
|
ModuleDefId::TraitAliasId(id) => id.lookup(db).container,
|
||||||
ModuleDefId::TypeAliasId(id) => id.lookup(db).module(db),
|
ModuleDefId::TypeAliasId(id) => id.lookup(db).module(db),
|
||||||
ModuleDefId::MacroId(id) => id.module(db),
|
ModuleDefId::MacroId(id) => id.module(db),
|
||||||
ModuleDefId::BuiltinType(_) => return None,
|
ModuleDefId::BuiltinType(_) => return None,
|
||||||
|
@ -765,6 +777,7 @@ impl AttrDefId {
|
||||||
AttrDefId::StaticId(it) => it.lookup(db).module(db).krate,
|
AttrDefId::StaticId(it) => it.lookup(db).module(db).krate,
|
||||||
AttrDefId::ConstId(it) => it.lookup(db).module(db).krate,
|
AttrDefId::ConstId(it) => it.lookup(db).module(db).krate,
|
||||||
AttrDefId::TraitId(it) => it.lookup(db).container.krate,
|
AttrDefId::TraitId(it) => it.lookup(db).container.krate,
|
||||||
|
AttrDefId::TraitAliasId(it) => it.lookup(db).container.krate,
|
||||||
AttrDefId::TypeAliasId(it) => it.lookup(db).module(db).krate,
|
AttrDefId::TypeAliasId(it) => it.lookup(db).module(db).krate,
|
||||||
AttrDefId::ImplId(it) => it.lookup(db).container.krate,
|
AttrDefId::ImplId(it) => it.lookup(db).container.krate,
|
||||||
AttrDefId::ExternBlockId(it) => it.lookup(db).container.krate,
|
AttrDefId::ExternBlockId(it) => it.lookup(db).container.krate,
|
||||||
|
|
|
@ -143,7 +143,7 @@ macro_rules! assert {
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
{
|
{
|
||||||
if !true {
|
if !(true ) {
|
||||||
$crate::panic!("{} {:?}", arg1(a, b, c), arg2);
|
$crate::panic!("{} {:?}", arg1(a, b, c), arg2);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -827,6 +827,7 @@ macro_rules! rgb_color {
|
||||||
/* parse error: expected type */
|
/* parse error: expected type */
|
||||||
/* parse error: expected R_PAREN */
|
/* parse error: expected R_PAREN */
|
||||||
/* parse error: expected R_ANGLE */
|
/* parse error: expected R_ANGLE */
|
||||||
|
/* parse error: expected `::` */
|
||||||
/* parse error: expected COMMA */
|
/* parse error: expected COMMA */
|
||||||
/* parse error: expected R_ANGLE */
|
/* parse error: expected R_ANGLE */
|
||||||
/* parse error: expected SEMICOLON */
|
/* parse error: expected SEMICOLON */
|
||||||
|
|
|
@ -342,7 +342,7 @@ impl DefMap {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn block_id(&self) -> Option<BlockId> {
|
pub(crate) fn block_id(&self) -> Option<BlockId> {
|
||||||
self.block.as_ref().map(|block| block.block)
|
self.block.map(|block| block.block)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn prelude(&self) -> Option<ModuleId> {
|
pub(crate) fn prelude(&self) -> Option<ModuleId> {
|
||||||
|
@ -354,7 +354,7 @@ impl DefMap {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn module_id(&self, local_id: LocalModuleId) -> ModuleId {
|
pub fn module_id(&self, local_id: LocalModuleId) -> ModuleId {
|
||||||
let block = self.block.as_ref().map(|b| b.block);
|
let block = self.block.map(|b| b.block);
|
||||||
ModuleId { krate: self.krate, local_id, block }
|
ModuleId { krate: self.krate, local_id, block }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -428,9 +428,9 @@ impl DefMap {
|
||||||
/// Returns the module containing `local_mod`, either the parent `mod`, or the module containing
|
/// Returns the module containing `local_mod`, either the parent `mod`, or the module containing
|
||||||
/// the block, if `self` corresponds to a block expression.
|
/// the block, if `self` corresponds to a block expression.
|
||||||
pub fn containing_module(&self, local_mod: LocalModuleId) -> Option<ModuleId> {
|
pub fn containing_module(&self, local_mod: LocalModuleId) -> Option<ModuleId> {
|
||||||
match &self[local_mod].parent {
|
match self[local_mod].parent {
|
||||||
Some(parent) => Some(self.module_id(*parent)),
|
Some(parent) => Some(self.module_id(parent)),
|
||||||
None => self.block.as_ref().map(|block| block.parent),
|
None => self.block.map(|block| block.parent),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -440,11 +440,11 @@ impl DefMap {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
let mut arc;
|
let mut arc;
|
||||||
let mut current_map = self;
|
let mut current_map = self;
|
||||||
while let Some(block) = ¤t_map.block {
|
while let Some(block) = current_map.block {
|
||||||
go(&mut buf, current_map, "block scope", current_map.root);
|
go(&mut buf, current_map, "block scope", current_map.root);
|
||||||
buf.push('\n');
|
buf.push('\n');
|
||||||
arc = block.parent.def_map(db);
|
arc = block.parent.def_map(db);
|
||||||
current_map = &*arc;
|
current_map = &arc;
|
||||||
}
|
}
|
||||||
go(&mut buf, current_map, "crate", current_map.root);
|
go(&mut buf, current_map, "crate", current_map.root);
|
||||||
return buf;
|
return buf;
|
||||||
|
@ -468,10 +468,10 @@ impl DefMap {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
let mut arc;
|
let mut arc;
|
||||||
let mut current_map = self;
|
let mut current_map = self;
|
||||||
while let Some(block) = ¤t_map.block {
|
while let Some(block) = current_map.block {
|
||||||
format_to!(buf, "{:?} in {:?}\n", block.block, block.parent);
|
format_to!(buf, "{:?} in {:?}\n", block.block, block.parent);
|
||||||
arc = block.parent.def_map(db);
|
arc = block.parent.def_map(db);
|
||||||
current_map = &*arc;
|
current_map = &arc;
|
||||||
}
|
}
|
||||||
|
|
||||||
format_to!(buf, "crate scope\n");
|
format_to!(buf, "crate scope\n");
|
||||||
|
|
|
@ -51,7 +51,8 @@ use crate::{
|
||||||
AdtId, AstId, AstIdWithPath, ConstLoc, EnumLoc, EnumVariantId, ExternBlockLoc, FunctionId,
|
AdtId, AstId, AstIdWithPath, ConstLoc, EnumLoc, EnumVariantId, ExternBlockLoc, FunctionId,
|
||||||
FunctionLoc, ImplLoc, Intern, ItemContainerId, LocalModuleId, Macro2Id, Macro2Loc,
|
FunctionLoc, ImplLoc, Intern, ItemContainerId, LocalModuleId, Macro2Id, Macro2Loc,
|
||||||
MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, ModuleDefId, ModuleId, ProcMacroId,
|
MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, ModuleDefId, ModuleId, ProcMacroId,
|
||||||
ProcMacroLoc, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro,
|
ProcMacroLoc, StaticLoc, StructLoc, TraitAliasLoc, TraitLoc, TypeAliasLoc, UnionLoc,
|
||||||
|
UnresolvedMacro,
|
||||||
};
|
};
|
||||||
|
|
||||||
static GLOB_RECURSION_LIMIT: Limit = Limit::new(100);
|
static GLOB_RECURSION_LIMIT: Limit = Limit::new(100);
|
||||||
|
@ -666,8 +667,10 @@ impl DefCollector<'_> {
|
||||||
macro_: Macro2Id,
|
macro_: Macro2Id,
|
||||||
vis: &RawVisibility,
|
vis: &RawVisibility,
|
||||||
) {
|
) {
|
||||||
let vis =
|
let vis = self
|
||||||
self.def_map.resolve_visibility(self.db, module_id, vis).unwrap_or(Visibility::Public);
|
.def_map
|
||||||
|
.resolve_visibility(self.db, module_id, vis, false)
|
||||||
|
.unwrap_or(Visibility::Public);
|
||||||
self.def_map.modules[module_id].scope.declare(macro_.into());
|
self.def_map.modules[module_id].scope.declare(macro_.into());
|
||||||
self.update(
|
self.update(
|
||||||
module_id,
|
module_id,
|
||||||
|
@ -831,7 +834,7 @@ impl DefCollector<'_> {
|
||||||
let mut def = directive.status.namespaces();
|
let mut def = directive.status.namespaces();
|
||||||
let vis = self
|
let vis = self
|
||||||
.def_map
|
.def_map
|
||||||
.resolve_visibility(self.db, module_id, &directive.import.visibility)
|
.resolve_visibility(self.db, module_id, &directive.import.visibility, false)
|
||||||
.unwrap_or(Visibility::Public);
|
.unwrap_or(Visibility::Public);
|
||||||
|
|
||||||
match import.kind {
|
match import.kind {
|
||||||
|
@ -1547,7 +1550,7 @@ impl ModCollector<'_, '_> {
|
||||||
};
|
};
|
||||||
let resolve_vis = |def_map: &DefMap, visibility| {
|
let resolve_vis = |def_map: &DefMap, visibility| {
|
||||||
def_map
|
def_map
|
||||||
.resolve_visibility(db, self.module_id, visibility)
|
.resolve_visibility(db, self.module_id, visibility, false)
|
||||||
.unwrap_or(Visibility::Public)
|
.unwrap_or(Visibility::Public)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1707,6 +1710,20 @@ impl ModCollector<'_, '_> {
|
||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
ModItem::TraitAlias(id) => {
|
||||||
|
let it = &self.item_tree[id];
|
||||||
|
|
||||||
|
let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
|
||||||
|
update_def(
|
||||||
|
self.def_collector,
|
||||||
|
TraitAliasLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
|
||||||
|
.intern(db)
|
||||||
|
.into(),
|
||||||
|
&it.name,
|
||||||
|
vis,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
}
|
||||||
ModItem::TypeAlias(id) => {
|
ModItem::TypeAlias(id) => {
|
||||||
let it = &self.item_tree[id];
|
let it = &self.item_tree[id];
|
||||||
|
|
||||||
|
@ -1823,7 +1840,7 @@ impl ModCollector<'_, '_> {
|
||||||
) -> LocalModuleId {
|
) -> LocalModuleId {
|
||||||
let def_map = &mut self.def_collector.def_map;
|
let def_map = &mut self.def_collector.def_map;
|
||||||
let vis = def_map
|
let vis = def_map
|
||||||
.resolve_visibility(self.def_collector.db, self.module_id, visibility)
|
.resolve_visibility(self.def_collector.db, self.module_id, visibility, false)
|
||||||
.unwrap_or(Visibility::Public);
|
.unwrap_or(Visibility::Public);
|
||||||
let modules = &mut def_map.modules;
|
let modules = &mut def_map.modules;
|
||||||
let origin = match definition {
|
let origin = match definition {
|
||||||
|
|
|
@ -78,6 +78,7 @@ impl DefMap {
|
||||||
// pub(path)
|
// pub(path)
|
||||||
// ^^^^ this
|
// ^^^^ this
|
||||||
visibility: &RawVisibility,
|
visibility: &RawVisibility,
|
||||||
|
within_impl: bool,
|
||||||
) -> Option<Visibility> {
|
) -> Option<Visibility> {
|
||||||
let mut vis = match visibility {
|
let mut vis = match visibility {
|
||||||
RawVisibility::Module(path) => {
|
RawVisibility::Module(path) => {
|
||||||
|
@ -102,7 +103,8 @@ impl DefMap {
|
||||||
// `super` to its parent (etc.). However, visibilities must only refer to a module in the
|
// `super` to its parent (etc.). However, visibilities must only refer to a module in the
|
||||||
// DefMap they're written in, so we restrict them when that happens.
|
// DefMap they're written in, so we restrict them when that happens.
|
||||||
if let Visibility::Module(m) = vis {
|
if let Visibility::Module(m) = vis {
|
||||||
if self.block_id() != m.block {
|
// ...unless we're resolving visibility for an associated item in an impl.
|
||||||
|
if self.block_id() != m.block && !within_impl {
|
||||||
cov_mark::hit!(adjust_vis_in_block_def_map);
|
cov_mark::hit!(adjust_vis_in_block_def_map);
|
||||||
vis = Visibility::Module(self.module_id(self.root()));
|
vis = Visibility::Module(self.module_id(self.root()));
|
||||||
tracing::debug!("visibility {:?} points outside DefMap, adjusting to {:?}", m, vis);
|
tracing::debug!("visibility {:?} points outside DefMap, adjusting to {:?}", m, vis);
|
||||||
|
|
|
@ -223,6 +223,7 @@ pub type Ty = ();
|
||||||
ModuleDefId::ConstId(it) => drop(db.const_data(it)),
|
ModuleDefId::ConstId(it) => drop(db.const_data(it)),
|
||||||
ModuleDefId::StaticId(it) => drop(db.static_data(it)),
|
ModuleDefId::StaticId(it) => drop(db.static_data(it)),
|
||||||
ModuleDefId::TraitId(it) => drop(db.trait_data(it)),
|
ModuleDefId::TraitId(it) => drop(db.trait_data(it)),
|
||||||
|
ModuleDefId::TraitAliasId(it) => drop(db.trait_alias_data(it)),
|
||||||
ModuleDefId::TypeAliasId(it) => drop(db.type_alias_data(it)),
|
ModuleDefId::TypeAliasId(it) => drop(db.type_alias_data(it)),
|
||||||
ModuleDefId::EnumVariantId(_)
|
ModuleDefId::EnumVariantId(_)
|
||||||
| ModuleDefId::ModuleId(_)
|
| ModuleDefId::ModuleId(_)
|
||||||
|
|
|
@ -8,7 +8,7 @@ use std::{
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
body::LowerCtx,
|
body::LowerCtx,
|
||||||
type_ref::{ConstScalarOrPath, LifetimeRef},
|
type_ref::{ConstRefOrPath, LifetimeRef},
|
||||||
};
|
};
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
|
@ -85,7 +85,7 @@ pub struct AssociatedTypeBinding {
|
||||||
pub enum GenericArg {
|
pub enum GenericArg {
|
||||||
Type(TypeRef),
|
Type(TypeRef),
|
||||||
Lifetime(LifetimeRef),
|
Lifetime(LifetimeRef),
|
||||||
Const(ConstScalarOrPath),
|
Const(ConstRefOrPath),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Path {
|
impl Path {
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
use std::iter;
|
use std::iter;
|
||||||
|
|
||||||
use crate::type_ref::ConstScalarOrPath;
|
use crate::type_ref::ConstRefOrPath;
|
||||||
|
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir_expand::name::{name, AsName};
|
use hir_expand::name::{name, AsName};
|
||||||
|
@ -212,7 +212,7 @@ pub(super) fn lower_generic_args(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::GenericArg::ConstArg(arg) => {
|
ast::GenericArg::ConstArg(arg) => {
|
||||||
let arg = ConstScalarOrPath::from_expr_opt(arg.expr());
|
let arg = ConstRefOrPath::from_expr_opt(arg.expr());
|
||||||
args.push(GenericArg::Const(arg))
|
args.push(GenericArg::Const(arg))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
//! Name resolution façade.
|
//! Name resolution façade.
|
||||||
use std::{hash::BuildHasherDefault, sync::Arc};
|
use std::{fmt, hash::BuildHasherDefault, sync::Arc};
|
||||||
|
|
||||||
use base_db::CrateId;
|
use base_db::CrateId;
|
||||||
use hir_expand::name::{name, Name};
|
use hir_expand::name::{name, Name};
|
||||||
|
@ -12,7 +12,7 @@ use crate::{
|
||||||
body::scope::{ExprScopes, ScopeId},
|
body::scope::{ExprScopes, ScopeId},
|
||||||
builtin_type::BuiltinType,
|
builtin_type::BuiltinType,
|
||||||
db::DefDatabase,
|
db::DefDatabase,
|
||||||
expr::{ExprId, LabelId, PatId},
|
expr::{BindingId, ExprId, LabelId},
|
||||||
generics::{GenericParams, TypeOrConstParamData},
|
generics::{GenericParams, TypeOrConstParamData},
|
||||||
item_scope::{BuiltinShadowMode, BUILTIN_SCOPE},
|
item_scope::{BuiltinShadowMode, BUILTIN_SCOPE},
|
||||||
nameres::DefMap,
|
nameres::DefMap,
|
||||||
|
@ -22,7 +22,8 @@ use crate::{
|
||||||
AdtId, AssocItemId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId,
|
AdtId, AssocItemId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId,
|
||||||
FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
|
FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
|
||||||
LocalModuleId, Lookup, Macro2Id, MacroId, MacroRulesId, ModuleDefId, ModuleId, ProcMacroId,
|
LocalModuleId, Lookup, Macro2Id, MacroId, MacroRulesId, ModuleDefId, ModuleId, ProcMacroId,
|
||||||
StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, VariantId,
|
StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId,
|
||||||
|
VariantId,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
|
@ -35,19 +36,34 @@ pub struct Resolver {
|
||||||
module_scope: ModuleItemMap,
|
module_scope: ModuleItemMap,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Clone)]
|
||||||
struct ModuleItemMap {
|
struct ModuleItemMap {
|
||||||
def_map: Arc<DefMap>,
|
def_map: Arc<DefMap>,
|
||||||
module_id: LocalModuleId,
|
module_id: LocalModuleId,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
impl fmt::Debug for ModuleItemMap {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.debug_struct("ModuleItemMap").field("module_id", &self.module_id).finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
struct ExprScope {
|
struct ExprScope {
|
||||||
owner: DefWithBodyId,
|
owner: DefWithBodyId,
|
||||||
expr_scopes: Arc<ExprScopes>,
|
expr_scopes: Arc<ExprScopes>,
|
||||||
scope_id: ScopeId,
|
scope_id: ScopeId,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for ExprScope {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.debug_struct("ExprScope")
|
||||||
|
.field("owner", &self.owner)
|
||||||
|
.field("scope_id", &self.scope_id)
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
enum Scope {
|
enum Scope {
|
||||||
/// All the items and imported names of a module
|
/// All the items and imported names of a module
|
||||||
|
@ -74,6 +90,7 @@ pub enum TypeNs {
|
||||||
TypeAliasId(TypeAliasId),
|
TypeAliasId(TypeAliasId),
|
||||||
BuiltinType(BuiltinType),
|
BuiltinType(BuiltinType),
|
||||||
TraitId(TraitId),
|
TraitId(TraitId),
|
||||||
|
TraitAliasId(TraitAliasId),
|
||||||
// Module belong to type ns, but the resolver is used when all module paths
|
// Module belong to type ns, but the resolver is used when all module paths
|
||||||
// are fully resolved.
|
// are fully resolved.
|
||||||
// ModuleId(ModuleId)
|
// ModuleId(ModuleId)
|
||||||
|
@ -85,10 +102,10 @@ pub enum ResolveValueResult {
|
||||||
Partial(TypeNs, usize),
|
Partial(TypeNs, usize),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||||
pub enum ValueNs {
|
pub enum ValueNs {
|
||||||
ImplSelf(ImplId),
|
ImplSelf(ImplId),
|
||||||
LocalBinding(PatId),
|
LocalBinding(BindingId),
|
||||||
FunctionId(FunctionId),
|
FunctionId(FunctionId),
|
||||||
ConstId(ConstId),
|
ConstId(ConstId),
|
||||||
StaticId(StaticId),
|
StaticId(StaticId),
|
||||||
|
@ -214,10 +231,12 @@ impl Resolver {
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
visibility: &RawVisibility,
|
visibility: &RawVisibility,
|
||||||
) -> Option<Visibility> {
|
) -> Option<Visibility> {
|
||||||
|
let within_impl =
|
||||||
|
self.scopes().find(|scope| matches!(scope, Scope::ImplDefScope(_))).is_some();
|
||||||
match visibility {
|
match visibility {
|
||||||
RawVisibility::Module(_) => {
|
RawVisibility::Module(_) => {
|
||||||
let (item_map, module) = self.item_scope();
|
let (item_map, module) = self.item_scope();
|
||||||
item_map.resolve_visibility(db, module, visibility)
|
item_map.resolve_visibility(db, module, visibility, within_impl)
|
||||||
}
|
}
|
||||||
RawVisibility::Public => Some(Visibility::Public),
|
RawVisibility::Public => Some(Visibility::Public),
|
||||||
}
|
}
|
||||||
|
@ -236,69 +255,81 @@ impl Resolver {
|
||||||
return self.module_scope.resolve_path_in_value_ns(db, path);
|
return self.module_scope.resolve_path_in_value_ns(db, path);
|
||||||
}
|
}
|
||||||
|
|
||||||
for scope in self.scopes() {
|
if n_segments <= 1 {
|
||||||
match scope {
|
for scope in self.scopes() {
|
||||||
Scope::ExprScope(_) if n_segments > 1 => continue,
|
match scope {
|
||||||
Scope::ExprScope(scope) => {
|
Scope::ExprScope(scope) => {
|
||||||
let entry = scope
|
let entry = scope
|
||||||
.expr_scopes
|
.expr_scopes
|
||||||
.entries(scope.scope_id)
|
.entries(scope.scope_id)
|
||||||
.iter()
|
.iter()
|
||||||
.find(|entry| entry.name() == first_name);
|
.find(|entry| entry.name() == first_name);
|
||||||
|
|
||||||
if let Some(e) = entry {
|
if let Some(e) = entry {
|
||||||
return Some(ResolveValueResult::ValueNs(ValueNs::LocalBinding(e.pat())));
|
return Some(ResolveValueResult::ValueNs(ValueNs::LocalBinding(
|
||||||
|
e.binding(),
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Scope::GenericParams { params, def } => {
|
||||||
|
if let Some(id) = params.find_const_by_name(first_name, *def) {
|
||||||
|
let val = ValueNs::GenericParam(id);
|
||||||
|
return Some(ResolveValueResult::ValueNs(val));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
&Scope::ImplDefScope(impl_) => {
|
||||||
|
if first_name == &name![Self] {
|
||||||
|
return Some(ResolveValueResult::ValueNs(ValueNs::ImplSelf(impl_)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// bare `Self` doesn't work in the value namespace in a struct/enum definition
|
||||||
|
Scope::AdtScope(_) => continue,
|
||||||
|
Scope::BlockScope(m) => {
|
||||||
|
if let Some(def) = m.resolve_path_in_value_ns(db, path) {
|
||||||
|
return Some(def);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Scope::GenericParams { params, def } if n_segments > 1 => {
|
}
|
||||||
if let Some(id) = params.find_type_by_name(first_name, *def) {
|
} else {
|
||||||
let ty = TypeNs::GenericParam(id);
|
for scope in self.scopes() {
|
||||||
return Some(ResolveValueResult::Partial(ty, 1));
|
match scope {
|
||||||
|
Scope::ExprScope(_) => continue,
|
||||||
|
Scope::GenericParams { params, def } => {
|
||||||
|
if let Some(id) = params.find_type_by_name(first_name, *def) {
|
||||||
|
let ty = TypeNs::GenericParam(id);
|
||||||
|
return Some(ResolveValueResult::Partial(ty, 1));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
&Scope::ImplDefScope(impl_) => {
|
||||||
Scope::GenericParams { .. } if n_segments != 1 => continue,
|
if first_name == &name![Self] {
|
||||||
Scope::GenericParams { params, def } => {
|
return Some(ResolveValueResult::Partial(TypeNs::SelfType(impl_), 1));
|
||||||
if let Some(id) = params.find_const_by_name(first_name, *def) {
|
}
|
||||||
let val = ValueNs::GenericParam(id);
|
|
||||||
return Some(ResolveValueResult::ValueNs(val));
|
|
||||||
}
|
}
|
||||||
}
|
Scope::AdtScope(adt) => {
|
||||||
|
if first_name == &name![Self] {
|
||||||
&Scope::ImplDefScope(impl_) => {
|
let ty = TypeNs::AdtSelfType(*adt);
|
||||||
if first_name == &name![Self] {
|
return Some(ResolveValueResult::Partial(ty, 1));
|
||||||
return Some(if n_segments > 1 {
|
}
|
||||||
ResolveValueResult::Partial(TypeNs::SelfType(impl_), 1)
|
|
||||||
} else {
|
|
||||||
ResolveValueResult::ValueNs(ValueNs::ImplSelf(impl_))
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
Scope::BlockScope(m) => {
|
||||||
// bare `Self` doesn't work in the value namespace in a struct/enum definition
|
if let Some(def) = m.resolve_path_in_value_ns(db, path) {
|
||||||
Scope::AdtScope(_) if n_segments == 1 => continue,
|
return Some(def);
|
||||||
Scope::AdtScope(adt) => {
|
}
|
||||||
if first_name == &name![Self] {
|
|
||||||
let ty = TypeNs::AdtSelfType(*adt);
|
|
||||||
return Some(ResolveValueResult::Partial(ty, 1));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Scope::BlockScope(m) => {
|
|
||||||
if let Some(def) = m.resolve_path_in_value_ns(db, path) {
|
|
||||||
return Some(def);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let res @ Some(_) = self.module_scope.resolve_path_in_value_ns(db, path) {
|
if let Some(res) = self.module_scope.resolve_path_in_value_ns(db, path) {
|
||||||
return res;
|
return Some(res);
|
||||||
}
|
}
|
||||||
|
|
||||||
// If a path of the shape `u16::from_le_bytes` failed to resolve at all, then we fall back
|
// If a path of the shape `u16::from_le_bytes` failed to resolve at all, then we fall back
|
||||||
// to resolving to the primitive type, to allow this to still work in the presence of
|
// to resolving to the primitive type, to allow this to still work in the presence of
|
||||||
// `use core::u16;`.
|
// `use core::u16;`.
|
||||||
if path.kind == PathKind::Plain && path.segments().len() > 1 {
|
if path.kind == PathKind::Plain && n_segments > 1 {
|
||||||
if let Some(builtin) = BuiltinType::by_name(&path.segments()[0]) {
|
if let Some(builtin) = BuiltinType::by_name(first_name) {
|
||||||
return Some(ResolveValueResult::Partial(TypeNs::BuiltinType(builtin), 1));
|
return Some(ResolveValueResult::Partial(TypeNs::BuiltinType(builtin), 1));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -400,6 +431,8 @@ impl Resolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn traits_in_scope(&self, db: &dyn DefDatabase) -> FxHashSet<TraitId> {
|
pub fn traits_in_scope(&self, db: &dyn DefDatabase) -> FxHashSet<TraitId> {
|
||||||
|
// FIXME(trait_alias): Trait alias brings aliased traits in scope! Note that supertraits of
|
||||||
|
// aliased traits are NOT brought in scope (unless also aliased).
|
||||||
let mut traits = FxHashSet::default();
|
let mut traits = FxHashSet::default();
|
||||||
|
|
||||||
for scope in self.scopes() {
|
for scope in self.scopes() {
|
||||||
|
@ -428,6 +461,15 @@ impl Resolver {
|
||||||
traits
|
traits
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn traits_in_scope_from_block_scopes(&self) -> impl Iterator<Item = TraitId> + '_ {
|
||||||
|
self.scopes()
|
||||||
|
.filter_map(|scope| match scope {
|
||||||
|
Scope::BlockScope(m) => Some(m.def_map[m.module_id].scope.traits()),
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
.flatten()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn module(&self) -> ModuleId {
|
pub fn module(&self) -> ModuleId {
|
||||||
let (def_map, local_id) = self.item_scope();
|
let (def_map, local_id) = self.item_scope();
|
||||||
def_map.module_id(local_id)
|
def_map.module_id(local_id)
|
||||||
|
@ -459,14 +501,85 @@ impl Resolver {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn generic_params(&self) -> Option<&Interned<GenericParams>> {
|
||||||
|
self.scopes().find_map(|scope| match scope {
|
||||||
|
Scope::GenericParams { params, .. } => Some(params),
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
pub fn body_owner(&self) -> Option<DefWithBodyId> {
|
pub fn body_owner(&self) -> Option<DefWithBodyId> {
|
||||||
self.scopes().find_map(|scope| match scope {
|
self.scopes().find_map(|scope| match scope {
|
||||||
Scope::ExprScope(it) => Some(it.owner),
|
Scope::ExprScope(it) => Some(it.owner),
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
/// `expr_id` is required to be an expression id that comes after the top level expression scope in the given resolver
|
||||||
|
#[must_use]
|
||||||
|
pub fn update_to_inner_scope(
|
||||||
|
&mut self,
|
||||||
|
db: &dyn DefDatabase,
|
||||||
|
owner: DefWithBodyId,
|
||||||
|
expr_id: ExprId,
|
||||||
|
) -> UpdateGuard {
|
||||||
|
#[inline(always)]
|
||||||
|
fn append_expr_scope(
|
||||||
|
db: &dyn DefDatabase,
|
||||||
|
resolver: &mut Resolver,
|
||||||
|
owner: DefWithBodyId,
|
||||||
|
expr_scopes: &Arc<ExprScopes>,
|
||||||
|
scope_id: ScopeId,
|
||||||
|
) {
|
||||||
|
resolver.scopes.push(Scope::ExprScope(ExprScope {
|
||||||
|
owner,
|
||||||
|
expr_scopes: expr_scopes.clone(),
|
||||||
|
scope_id,
|
||||||
|
}));
|
||||||
|
if let Some(block) = expr_scopes.block(scope_id) {
|
||||||
|
if let Some(def_map) = db.block_def_map(block) {
|
||||||
|
let root = def_map.root();
|
||||||
|
resolver
|
||||||
|
.scopes
|
||||||
|
.push(Scope::BlockScope(ModuleItemMap { def_map, module_id: root }));
|
||||||
|
// FIXME: This adds as many module scopes as there are blocks, but resolving in each
|
||||||
|
// already traverses all parents, so this is O(n²). I think we could only store the
|
||||||
|
// innermost module scope instead?
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let start = self.scopes.len();
|
||||||
|
let innermost_scope = self.scopes().next();
|
||||||
|
match innermost_scope {
|
||||||
|
Some(&Scope::ExprScope(ExprScope { scope_id, ref expr_scopes, owner })) => {
|
||||||
|
let expr_scopes = expr_scopes.clone();
|
||||||
|
let scope_chain = expr_scopes
|
||||||
|
.scope_chain(expr_scopes.scope_for(expr_id))
|
||||||
|
.take_while(|&it| it != scope_id);
|
||||||
|
for scope_id in scope_chain {
|
||||||
|
append_expr_scope(db, self, owner, &expr_scopes, scope_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
let expr_scopes = db.expr_scopes(owner);
|
||||||
|
let scope_chain = expr_scopes.scope_chain(expr_scopes.scope_for(expr_id));
|
||||||
|
|
||||||
|
for scope_id in scope_chain {
|
||||||
|
append_expr_scope(db, self, owner, &expr_scopes, scope_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.scopes[start..].reverse();
|
||||||
|
UpdateGuard(start)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reset_to_guard(&mut self, UpdateGuard(start): UpdateGuard) {
|
||||||
|
self.scopes.truncate(start);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct UpdateGuard(usize);
|
||||||
|
|
||||||
impl Resolver {
|
impl Resolver {
|
||||||
fn scopes(&self) -> impl Iterator<Item = &Scope> {
|
fn scopes(&self) -> impl Iterator<Item = &Scope> {
|
||||||
self.scopes.iter().rev()
|
self.scopes.iter().rev()
|
||||||
|
@ -504,7 +617,7 @@ pub enum ScopeDef {
|
||||||
ImplSelfType(ImplId),
|
ImplSelfType(ImplId),
|
||||||
AdtSelfType(AdtId),
|
AdtSelfType(AdtId),
|
||||||
GenericParam(GenericParamId),
|
GenericParam(GenericParamId),
|
||||||
Local(PatId),
|
Local(BindingId),
|
||||||
Label(LabelId),
|
Label(LabelId),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -556,17 +669,18 @@ impl Scope {
|
||||||
acc.add(&name, ScopeDef::Label(label))
|
acc.add(&name, ScopeDef::Label(label))
|
||||||
}
|
}
|
||||||
scope.expr_scopes.entries(scope.scope_id).iter().for_each(|e| {
|
scope.expr_scopes.entries(scope.scope_id).iter().for_each(|e| {
|
||||||
acc.add_local(e.name(), e.pat());
|
acc.add_local(e.name(), e.binding());
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// needs arbitrary_self_types to be a method... or maybe move to the def?
|
|
||||||
pub fn resolver_for_expr(db: &dyn DefDatabase, owner: DefWithBodyId, expr_id: ExprId) -> Resolver {
|
pub fn resolver_for_expr(db: &dyn DefDatabase, owner: DefWithBodyId, expr_id: ExprId) -> Resolver {
|
||||||
|
let r = owner.resolver(db);
|
||||||
let scopes = db.expr_scopes(owner);
|
let scopes = db.expr_scopes(owner);
|
||||||
resolver_for_scope(db, owner, scopes.scope_for(expr_id))
|
let scope_id = scopes.scope_for(expr_id);
|
||||||
|
resolver_for_scope_(db, scopes, scope_id, r, owner)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolver_for_scope(
|
pub fn resolver_for_scope(
|
||||||
|
@ -574,8 +688,18 @@ pub fn resolver_for_scope(
|
||||||
owner: DefWithBodyId,
|
owner: DefWithBodyId,
|
||||||
scope_id: Option<ScopeId>,
|
scope_id: Option<ScopeId>,
|
||||||
) -> Resolver {
|
) -> Resolver {
|
||||||
let mut r = owner.resolver(db);
|
let r = owner.resolver(db);
|
||||||
let scopes = db.expr_scopes(owner);
|
let scopes = db.expr_scopes(owner);
|
||||||
|
resolver_for_scope_(db, scopes, scope_id, r, owner)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resolver_for_scope_(
|
||||||
|
db: &dyn DefDatabase,
|
||||||
|
scopes: Arc<ExprScopes>,
|
||||||
|
scope_id: Option<ScopeId>,
|
||||||
|
mut r: Resolver,
|
||||||
|
owner: DefWithBodyId,
|
||||||
|
) -> Resolver {
|
||||||
let scope_chain = scopes.scope_chain(scope_id).collect::<Vec<_>>();
|
let scope_chain = scopes.scope_chain(scope_id).collect::<Vec<_>>();
|
||||||
r.scopes.reserve(scope_chain.len());
|
r.scopes.reserve(scope_chain.len());
|
||||||
|
|
||||||
|
@ -641,6 +765,7 @@ impl ModuleItemMap {
|
||||||
let ty = match module_def.take_types()? {
|
let ty = match module_def.take_types()? {
|
||||||
ModuleDefId::AdtId(it) => TypeNs::AdtId(it),
|
ModuleDefId::AdtId(it) => TypeNs::AdtId(it),
|
||||||
ModuleDefId::TraitId(it) => TypeNs::TraitId(it),
|
ModuleDefId::TraitId(it) => TypeNs::TraitId(it),
|
||||||
|
ModuleDefId::TraitAliasId(it) => TypeNs::TraitAliasId(it),
|
||||||
ModuleDefId::TypeAliasId(it) => TypeNs::TypeAliasId(it),
|
ModuleDefId::TypeAliasId(it) => TypeNs::TypeAliasId(it),
|
||||||
ModuleDefId::BuiltinType(it) => TypeNs::BuiltinType(it),
|
ModuleDefId::BuiltinType(it) => TypeNs::BuiltinType(it),
|
||||||
|
|
||||||
|
@ -678,6 +803,7 @@ fn to_value_ns(per_ns: PerNs) -> Option<ValueNs> {
|
||||||
|
|
||||||
ModuleDefId::AdtId(AdtId::EnumId(_) | AdtId::UnionId(_))
|
ModuleDefId::AdtId(AdtId::EnumId(_) | AdtId::UnionId(_))
|
||||||
| ModuleDefId::TraitId(_)
|
| ModuleDefId::TraitId(_)
|
||||||
|
| ModuleDefId::TraitAliasId(_)
|
||||||
| ModuleDefId::TypeAliasId(_)
|
| ModuleDefId::TypeAliasId(_)
|
||||||
| ModuleDefId::BuiltinType(_)
|
| ModuleDefId::BuiltinType(_)
|
||||||
| ModuleDefId::MacroId(_)
|
| ModuleDefId::MacroId(_)
|
||||||
|
@ -695,6 +821,7 @@ fn to_type_ns(per_ns: PerNs) -> Option<TypeNs> {
|
||||||
ModuleDefId::BuiltinType(it) => TypeNs::BuiltinType(it),
|
ModuleDefId::BuiltinType(it) => TypeNs::BuiltinType(it),
|
||||||
|
|
||||||
ModuleDefId::TraitId(it) => TypeNs::TraitId(it),
|
ModuleDefId::TraitId(it) => TypeNs::TraitId(it),
|
||||||
|
ModuleDefId::TraitAliasId(it) => TypeNs::TraitAliasId(it),
|
||||||
|
|
||||||
ModuleDefId::FunctionId(_)
|
ModuleDefId::FunctionId(_)
|
||||||
| ModuleDefId::ConstId(_)
|
| ModuleDefId::ConstId(_)
|
||||||
|
@ -732,7 +859,7 @@ impl ScopeNames {
|
||||||
self.add(name, ScopeDef::Unknown)
|
self.add(name, ScopeDef::Unknown)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn add_local(&mut self, name: &Name, pat: PatId) {
|
fn add_local(&mut self, name: &Name, binding: BindingId) {
|
||||||
let set = self.map.entry(name.clone()).or_default();
|
let set = self.map.entry(name.clone()).or_default();
|
||||||
// XXX: hack, account for local (and only local) shadowing.
|
// XXX: hack, account for local (and only local) shadowing.
|
||||||
//
|
//
|
||||||
|
@ -743,7 +870,7 @@ impl ScopeNames {
|
||||||
cov_mark::hit!(shadowing_shows_single_completion);
|
cov_mark::hit!(shadowing_shows_single_completion);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
set.push(ScopeDef::Local(pat))
|
set.push(ScopeDef::Local(binding))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -779,6 +906,12 @@ impl HasResolver for TraitId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl HasResolver for TraitAliasId {
|
||||||
|
fn resolver(self, db: &dyn DefDatabase) -> Resolver {
|
||||||
|
self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<T: Into<AdtId> + Copy> HasResolver for T {
|
impl<T: Into<AdtId> + Copy> HasResolver for T {
|
||||||
fn resolver(self, db: &dyn DefDatabase) -> Resolver {
|
fn resolver(self, db: &dyn DefDatabase) -> Resolver {
|
||||||
let def = self.into();
|
let def = self.into();
|
||||||
|
@ -858,6 +991,7 @@ impl HasResolver for GenericDefId {
|
||||||
GenericDefId::FunctionId(inner) => inner.resolver(db),
|
GenericDefId::FunctionId(inner) => inner.resolver(db),
|
||||||
GenericDefId::AdtId(adt) => adt.resolver(db),
|
GenericDefId::AdtId(adt) => adt.resolver(db),
|
||||||
GenericDefId::TraitId(inner) => inner.resolver(db),
|
GenericDefId::TraitId(inner) => inner.resolver(db),
|
||||||
|
GenericDefId::TraitAliasId(inner) => inner.resolver(db),
|
||||||
GenericDefId::TypeAliasId(inner) => inner.resolver(db),
|
GenericDefId::TypeAliasId(inner) => inner.resolver(db),
|
||||||
GenericDefId::ImplId(inner) => inner.resolver(db),
|
GenericDefId::ImplId(inner) => inner.resolver(db),
|
||||||
GenericDefId::EnumVariantId(inner) => inner.parent.resolver(db),
|
GenericDefId::EnumVariantId(inner) => inner.parent.resolver(db),
|
||||||
|
|
|
@ -116,7 +116,7 @@ pub enum TypeRef {
|
||||||
Reference(Box<TypeRef>, Option<LifetimeRef>, Mutability),
|
Reference(Box<TypeRef>, Option<LifetimeRef>, Mutability),
|
||||||
// FIXME: for full const generics, the latter element (length) here is going to have to be an
|
// FIXME: for full const generics, the latter element (length) here is going to have to be an
|
||||||
// expression that is further lowered later in hir_ty.
|
// expression that is further lowered later in hir_ty.
|
||||||
Array(Box<TypeRef>, ConstScalarOrPath),
|
Array(Box<TypeRef>, ConstRefOrPath),
|
||||||
Slice(Box<TypeRef>),
|
Slice(Box<TypeRef>),
|
||||||
/// A fn pointer. Last element of the vector is the return type.
|
/// A fn pointer. Last element of the vector is the return type.
|
||||||
Fn(Vec<(Option<Name>, TypeRef)>, bool /*varargs*/, bool /*is_unsafe*/),
|
Fn(Vec<(Option<Name>, TypeRef)>, bool /*varargs*/, bool /*is_unsafe*/),
|
||||||
|
@ -188,7 +188,7 @@ impl TypeRef {
|
||||||
// `hir_def::body::lower` to lower this into an `Expr` and then evaluate it at the
|
// `hir_def::body::lower` to lower this into an `Expr` and then evaluate it at the
|
||||||
// `hir_ty` level, which would allow knowing the type of:
|
// `hir_ty` level, which would allow knowing the type of:
|
||||||
// let v: [u8; 2 + 2] = [0u8; 4];
|
// let v: [u8; 2 + 2] = [0u8; 4];
|
||||||
let len = ConstScalarOrPath::from_expr_opt(inner.expr());
|
let len = ConstRefOrPath::from_expr_opt(inner.expr());
|
||||||
TypeRef::Array(Box::new(TypeRef::from_ast_opt(ctx, inner.ty())), len)
|
TypeRef::Array(Box::new(TypeRef::from_ast_opt(ctx, inner.ty())), len)
|
||||||
}
|
}
|
||||||
ast::Type::SliceType(inner) => {
|
ast::Type::SliceType(inner) => {
|
||||||
|
@ -378,25 +378,25 @@ impl TypeBound {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
pub enum ConstScalarOrPath {
|
pub enum ConstRefOrPath {
|
||||||
Scalar(ConstScalar),
|
Scalar(ConstRef),
|
||||||
Path(Name),
|
Path(Name),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for ConstScalarOrPath {
|
impl std::fmt::Display for ConstRefOrPath {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
ConstScalarOrPath::Scalar(s) => s.fmt(f),
|
ConstRefOrPath::Scalar(s) => s.fmt(f),
|
||||||
ConstScalarOrPath::Path(n) => n.fmt(f),
|
ConstRefOrPath::Path(n) => n.fmt(f),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ConstScalarOrPath {
|
impl ConstRefOrPath {
|
||||||
pub(crate) fn from_expr_opt(expr: Option<ast::Expr>) -> Self {
|
pub(crate) fn from_expr_opt(expr: Option<ast::Expr>) -> Self {
|
||||||
match expr {
|
match expr {
|
||||||
Some(x) => Self::from_expr(x),
|
Some(x) => Self::from_expr(x),
|
||||||
None => Self::Scalar(ConstScalar::Unknown),
|
None => Self::Scalar(ConstRef::Unknown),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -407,7 +407,7 @@ impl ConstScalarOrPath {
|
||||||
ast::Expr::PathExpr(p) => {
|
ast::Expr::PathExpr(p) => {
|
||||||
match p.path().and_then(|x| x.segment()).and_then(|x| x.name_ref()) {
|
match p.path().and_then(|x| x.segment()).and_then(|x| x.name_ref()) {
|
||||||
Some(x) => Self::Path(x.as_name()),
|
Some(x) => Self::Path(x.as_name()),
|
||||||
None => Self::Scalar(ConstScalar::Unknown),
|
None => Self::Scalar(ConstRef::Unknown),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::Expr::PrefixExpr(prefix_expr) => match prefix_expr.op_kind() {
|
ast::Expr::PrefixExpr(prefix_expr) => match prefix_expr.op_kind() {
|
||||||
|
@ -415,8 +415,8 @@ impl ConstScalarOrPath {
|
||||||
let unsigned = Self::from_expr_opt(prefix_expr.expr());
|
let unsigned = Self::from_expr_opt(prefix_expr.expr());
|
||||||
// Add sign
|
// Add sign
|
||||||
match unsigned {
|
match unsigned {
|
||||||
Self::Scalar(ConstScalar::UInt(num)) => {
|
Self::Scalar(ConstRef::UInt(num)) => {
|
||||||
Self::Scalar(ConstScalar::Int(-(num as i128)))
|
Self::Scalar(ConstRef::Int(-(num as i128)))
|
||||||
}
|
}
|
||||||
other => other,
|
other => other,
|
||||||
}
|
}
|
||||||
|
@ -425,22 +425,22 @@ impl ConstScalarOrPath {
|
||||||
},
|
},
|
||||||
ast::Expr::Literal(literal) => Self::Scalar(match literal.kind() {
|
ast::Expr::Literal(literal) => Self::Scalar(match literal.kind() {
|
||||||
ast::LiteralKind::IntNumber(num) => {
|
ast::LiteralKind::IntNumber(num) => {
|
||||||
num.value().map(ConstScalar::UInt).unwrap_or(ConstScalar::Unknown)
|
num.value().map(ConstRef::UInt).unwrap_or(ConstRef::Unknown)
|
||||||
}
|
}
|
||||||
ast::LiteralKind::Char(c) => {
|
ast::LiteralKind::Char(c) => {
|
||||||
c.value().map(ConstScalar::Char).unwrap_or(ConstScalar::Unknown)
|
c.value().map(ConstRef::Char).unwrap_or(ConstRef::Unknown)
|
||||||
}
|
}
|
||||||
ast::LiteralKind::Bool(f) => ConstScalar::Bool(f),
|
ast::LiteralKind::Bool(f) => ConstRef::Bool(f),
|
||||||
_ => ConstScalar::Unknown,
|
_ => ConstRef::Unknown,
|
||||||
}),
|
}),
|
||||||
_ => Self::Scalar(ConstScalar::Unknown),
|
_ => Self::Scalar(ConstRef::Unknown),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A concrete constant value
|
/// A concrete constant value
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
pub enum ConstScalar {
|
pub enum ConstRef {
|
||||||
Int(i128),
|
Int(i128),
|
||||||
UInt(u128),
|
UInt(u128),
|
||||||
Bool(bool),
|
Bool(bool),
|
||||||
|
@ -454,18 +454,18 @@ pub enum ConstScalar {
|
||||||
Unknown,
|
Unknown,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ConstScalar {
|
impl ConstRef {
|
||||||
pub fn builtin_type(&self) -> BuiltinType {
|
pub fn builtin_type(&self) -> BuiltinType {
|
||||||
match self {
|
match self {
|
||||||
ConstScalar::UInt(_) | ConstScalar::Unknown => BuiltinType::Uint(BuiltinUint::U128),
|
ConstRef::UInt(_) | ConstRef::Unknown => BuiltinType::Uint(BuiltinUint::U128),
|
||||||
ConstScalar::Int(_) => BuiltinType::Int(BuiltinInt::I128),
|
ConstRef::Int(_) => BuiltinType::Int(BuiltinInt::I128),
|
||||||
ConstScalar::Char(_) => BuiltinType::Char,
|
ConstRef::Char(_) => BuiltinType::Char,
|
||||||
ConstScalar::Bool(_) => BuiltinType::Bool,
|
ConstRef::Bool(_) => BuiltinType::Bool,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Literal> for ConstScalar {
|
impl From<Literal> for ConstRef {
|
||||||
fn from(literal: Literal) -> Self {
|
fn from(literal: Literal) -> Self {
|
||||||
match literal {
|
match literal {
|
||||||
Literal::Char(c) => Self::Char(c),
|
Literal::Char(c) => Self::Char(c),
|
||||||
|
@ -477,14 +477,14 @@ impl From<Literal> for ConstScalar {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for ConstScalar {
|
impl std::fmt::Display for ConstRef {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
|
||||||
match self {
|
match self {
|
||||||
ConstScalar::Int(num) => num.fmt(f),
|
ConstRef::Int(num) => num.fmt(f),
|
||||||
ConstScalar::UInt(num) => num.fmt(f),
|
ConstRef::UInt(num) => num.fmt(f),
|
||||||
ConstScalar::Bool(flag) => flag.fmt(f),
|
ConstRef::Bool(flag) => flag.fmt(f),
|
||||||
ConstScalar::Char(c) => write!(f, "'{c}'"),
|
ConstRef::Char(c) => write!(f, "'{c}'"),
|
||||||
ConstScalar::Unknown => f.write_char('_'),
|
ConstRef::Unknown => f.write_char('_'),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,7 @@ use crate::{
|
||||||
nameres::DefMap,
|
nameres::DefMap,
|
||||||
path::{ModPath, PathKind},
|
path::{ModPath, PathKind},
|
||||||
resolver::HasResolver,
|
resolver::HasResolver,
|
||||||
ConstId, FunctionId, HasModule, LocalFieldId, ModuleId, VariantId,
|
ConstId, FunctionId, HasModule, LocalFieldId, LocalModuleId, ModuleId, VariantId,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Visibility of an item, not yet resolved.
|
/// Visibility of an item, not yet resolved.
|
||||||
|
@ -120,7 +120,7 @@ impl Visibility {
|
||||||
self,
|
self,
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
def_map: &DefMap,
|
def_map: &DefMap,
|
||||||
mut from_module: crate::LocalModuleId,
|
mut from_module: LocalModuleId,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let mut to_module = match self {
|
let mut to_module = match self {
|
||||||
Visibility::Module(m) => m,
|
Visibility::Module(m) => m,
|
||||||
|
@ -142,7 +142,8 @@ impl Visibility {
|
||||||
arc = to_module.def_map(db);
|
arc = to_module.def_map(db);
|
||||||
&arc
|
&arc
|
||||||
};
|
};
|
||||||
let is_block_root = matches!(to_module.block, Some(_) if to_module_def_map[to_module.local_id].parent.is_none());
|
let is_block_root =
|
||||||
|
to_module.block.is_some() && to_module_def_map[to_module.local_id].parent.is_none();
|
||||||
if is_block_root {
|
if is_block_root {
|
||||||
to_module = to_module_def_map.containing_module(to_module.local_id).unwrap();
|
to_module = to_module_def_map.containing_module(to_module.local_id).unwrap();
|
||||||
}
|
}
|
||||||
|
|
|
@ -206,7 +206,7 @@ fn assert_expand(
|
||||||
let cond = cond.clone();
|
let cond = cond.clone();
|
||||||
let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma);
|
let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma);
|
||||||
quote! {{
|
quote! {{
|
||||||
if !#cond {
|
if !(#cond) {
|
||||||
#DOLLAR_CRATE::panic!(##panic_args);
|
#DOLLAR_CRATE::panic!(##panic_args);
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
|
|
|
@ -771,6 +771,15 @@ impl<T> InFile<Option<T>> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<L, R> InFile<Either<L, R>> {
|
||||||
|
pub fn transpose(self) -> Either<InFile<L>, InFile<R>> {
|
||||||
|
match self.value {
|
||||||
|
Either::Left(l) => Either::Left(InFile::new(self.file_id, l)),
|
||||||
|
Either::Right(r) => Either::Right(InFile::new(self.file_id, r)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<'a> InFile<&'a SyntaxNode> {
|
impl<'a> InFile<&'a SyntaxNode> {
|
||||||
pub fn ancestors_with_macros(
|
pub fn ancestors_with_macros(
|
||||||
self,
|
self,
|
||||||
|
@ -806,7 +815,7 @@ impl<'a> InFile<&'a SyntaxNode> {
|
||||||
/// Falls back to the macro call range if the node cannot be mapped up fully.
|
/// Falls back to the macro call range if the node cannot be mapped up fully.
|
||||||
///
|
///
|
||||||
/// For attributes and derives, this will point back to the attribute only.
|
/// For attributes and derives, this will point back to the attribute only.
|
||||||
/// For the entire item `InFile::use original_file_range_full`.
|
/// For the entire item use [`InFile::original_file_range_full`].
|
||||||
pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange {
|
pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange {
|
||||||
match self.file_id.repr() {
|
match self.file_id.repr() {
|
||||||
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
|
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
|
||||||
|
@ -821,6 +830,21 @@ impl<'a> InFile<&'a SyntaxNode> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Falls back to the macro call range if the node cannot be mapped up fully.
|
||||||
|
pub fn original_file_range_full(self, db: &dyn db::AstDatabase) -> FileRange {
|
||||||
|
match self.file_id.repr() {
|
||||||
|
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
|
||||||
|
HirFileIdRepr::MacroFile(mac_file) => {
|
||||||
|
if let Some(res) = self.original_file_range_opt(db) {
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
// Fall back to whole macro call.
|
||||||
|
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
|
||||||
|
loc.kind.original_call_range_with_body(db)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Attempts to map the syntax node back up its macro calls.
|
/// Attempts to map the syntax node back up its macro calls.
|
||||||
pub fn original_file_range_opt(self, db: &dyn db::AstDatabase) -> Option<FileRange> {
|
pub fn original_file_range_opt(self, db: &dyn db::AstDatabase) -> Option<FileRange> {
|
||||||
match ascend_node_border_tokens(db, self) {
|
match ascend_node_border_tokens(db, self) {
|
||||||
|
|
|
@ -18,6 +18,7 @@ arrayvec = "0.7.2"
|
||||||
bitflags = "1.3.2"
|
bitflags = "1.3.2"
|
||||||
smallvec.workspace = true
|
smallvec.workspace = true
|
||||||
ena = "0.14.0"
|
ena = "0.14.0"
|
||||||
|
either = "1.7.0"
|
||||||
tracing = "0.1.35"
|
tracing = "0.1.35"
|
||||||
rustc-hash = "1.1.0"
|
rustc-hash = "1.1.0"
|
||||||
scoped-tls = "1.0.0"
|
scoped-tls = "1.0.0"
|
||||||
|
|
|
@ -152,6 +152,15 @@ impl TyBuilder<()> {
|
||||||
TyKind::Tuple(0, Substitution::empty(Interner)).intern(Interner)
|
TyKind::Tuple(0, Substitution::empty(Interner)).intern(Interner)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME: rustc's ty is dependent on the adt type, maybe we need to do that as well
|
||||||
|
pub fn discr_ty() -> Ty {
|
||||||
|
TyKind::Scalar(chalk_ir::Scalar::Int(chalk_ir::IntTy::I128)).intern(Interner)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn bool() -> Ty {
|
||||||
|
TyKind::Scalar(chalk_ir::Scalar::Bool).intern(Interner)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn usize() -> Ty {
|
pub fn usize() -> Ty {
|
||||||
TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize)).intern(Interner)
|
TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize)).intern(Interner)
|
||||||
}
|
}
|
||||||
|
|
|
@ -540,8 +540,7 @@ pub(crate) fn trait_datum_query(
|
||||||
let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars);
|
let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars);
|
||||||
let associated_ty_ids = trait_data.associated_types().map(to_assoc_type_id).collect();
|
let associated_ty_ids = trait_data.associated_types().map(to_assoc_type_id).collect();
|
||||||
let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses };
|
let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses };
|
||||||
let well_known = lang_attr(db.upcast(), trait_)
|
let well_known = lang_attr(db.upcast(), trait_).and_then(well_known_trait_from_lang_item);
|
||||||
.and_then(|name| well_known_trait_from_lang_item(LangItem::from_str(&name)?));
|
|
||||||
let trait_datum = TraitDatum {
|
let trait_datum = TraitDatum {
|
||||||
id: trait_id,
|
id: trait_id,
|
||||||
binders: make_binders(db, &generic_params, trait_datum_bound),
|
binders: make_binders(db, &generic_params, trait_datum_bound),
|
||||||
|
|
|
@ -13,7 +13,7 @@ use crate::{
|
||||||
db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
|
db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
|
||||||
from_placeholder_idx, to_chalk_trait_id, utils::generics, AdtId, AliasEq, AliasTy, Binders,
|
from_placeholder_idx, to_chalk_trait_id, utils::generics, AdtId, AliasEq, AliasTy, Binders,
|
||||||
CallableDefId, CallableSig, FnPointer, ImplTraitId, Interner, Lifetime, ProjectionTy,
|
CallableDefId, CallableSig, FnPointer, ImplTraitId, Interner, Lifetime, ProjectionTy,
|
||||||
QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyKind, WhereClause,
|
QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyKind, TypeFlags, WhereClause,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub trait TyExt {
|
pub trait TyExt {
|
||||||
|
@ -22,6 +22,7 @@ pub trait TyExt {
|
||||||
fn is_floating_point(&self) -> bool;
|
fn is_floating_point(&self) -> bool;
|
||||||
fn is_never(&self) -> bool;
|
fn is_never(&self) -> bool;
|
||||||
fn is_unknown(&self) -> bool;
|
fn is_unknown(&self) -> bool;
|
||||||
|
fn contains_unknown(&self) -> bool;
|
||||||
fn is_ty_var(&self) -> bool;
|
fn is_ty_var(&self) -> bool;
|
||||||
|
|
||||||
fn as_adt(&self) -> Option<(hir_def::AdtId, &Substitution)>;
|
fn as_adt(&self) -> Option<(hir_def::AdtId, &Substitution)>;
|
||||||
|
@ -76,6 +77,10 @@ impl TyExt for Ty {
|
||||||
matches!(self.kind(Interner), TyKind::Error)
|
matches!(self.kind(Interner), TyKind::Error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn contains_unknown(&self) -> bool {
|
||||||
|
self.data(Interner).flags.contains(TypeFlags::HAS_ERROR)
|
||||||
|
}
|
||||||
|
|
||||||
fn is_ty_var(&self) -> bool {
|
fn is_ty_var(&self) -> bool {
|
||||||
matches!(self.kind(Interner), TyKind::InferenceVar(_, _))
|
matches!(self.kind(Interner), TyKind::InferenceVar(_, _))
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,30 +1,25 @@
|
||||||
//! Constant evaluation details
|
//! Constant evaluation details
|
||||||
|
|
||||||
use std::{
|
use base_db::CrateId;
|
||||||
collections::HashMap,
|
use chalk_ir::{BoundVar, DebruijnIndex, GenericArgData};
|
||||||
fmt::{Display, Write},
|
|
||||||
};
|
|
||||||
|
|
||||||
use chalk_ir::{BoundVar, DebruijnIndex, GenericArgData, IntTy, Scalar};
|
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
builtin_type::BuiltinInt,
|
expr::Expr,
|
||||||
expr::{ArithOp, BinaryOp, Expr, ExprId, Literal, Pat, PatId},
|
|
||||||
path::ModPath,
|
path::ModPath,
|
||||||
resolver::{resolver_for_expr, ResolveValueResult, Resolver, ValueNs},
|
resolver::{Resolver, ValueNs},
|
||||||
src::HasChildSource,
|
type_ref::ConstRef,
|
||||||
type_ref::ConstScalar,
|
ConstId, EnumVariantId,
|
||||||
ConstId, DefWithBodyId, EnumVariantId, Lookup,
|
|
||||||
};
|
};
|
||||||
use la_arena::{Arena, Idx, RawIdx};
|
use la_arena::{Idx, RawIdx};
|
||||||
use stdx::never;
|
use stdx::never;
|
||||||
use syntax::ast::HasName;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::HirDatabase, infer::InferenceContext, lower::ParamLoweringMode, to_placeholder_idx,
|
db::HirDatabase, infer::InferenceContext, layout::layout_of_ty, lower::ParamLoweringMode,
|
||||||
utils::Generics, Const, ConstData, ConstValue, GenericArg, InferenceResult, Interner, Ty,
|
to_placeholder_idx, utils::Generics, Const, ConstData, ConstScalar, ConstValue, GenericArg,
|
||||||
TyBuilder, TyKind,
|
Interner, MemoryMap, Ty, TyBuilder,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use super::mir::{interpret_mir, lower_to_mir, pad16, MirEvalError, MirLowerError};
|
||||||
|
|
||||||
/// Extension trait for [`Const`]
|
/// Extension trait for [`Const`]
|
||||||
pub trait ConstExt {
|
pub trait ConstExt {
|
||||||
/// Is a [`Const`] unknown?
|
/// Is a [`Const`] unknown?
|
||||||
|
@ -53,346 +48,24 @@ impl ConstExt for Const {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ConstEvalCtx<'a> {
|
|
||||||
pub db: &'a dyn HirDatabase,
|
|
||||||
pub owner: DefWithBodyId,
|
|
||||||
pub exprs: &'a Arena<Expr>,
|
|
||||||
pub pats: &'a Arena<Pat>,
|
|
||||||
pub local_data: HashMap<PatId, ComputedExpr>,
|
|
||||||
infer: &'a InferenceResult,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ConstEvalCtx<'_> {
|
|
||||||
fn expr_ty(&mut self, expr: ExprId) -> Ty {
|
|
||||||
self.infer[expr].clone()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub enum ConstEvalError {
|
pub enum ConstEvalError {
|
||||||
NotSupported(&'static str),
|
MirLowerError(MirLowerError),
|
||||||
SemanticError(&'static str),
|
MirEvalError(MirEvalError),
|
||||||
Loop,
|
|
||||||
IncompleteExpr,
|
|
||||||
Panic(String),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
impl From<MirLowerError> for ConstEvalError {
|
||||||
pub enum ComputedExpr {
|
fn from(value: MirLowerError) -> Self {
|
||||||
Literal(Literal),
|
match value {
|
||||||
Enum(String, EnumVariantId, Literal),
|
MirLowerError::ConstEvalError(e) => *e,
|
||||||
Tuple(Box<[ComputedExpr]>),
|
_ => ConstEvalError::MirLowerError(value),
|
||||||
}
|
|
||||||
|
|
||||||
impl Display for ComputedExpr {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
ComputedExpr::Literal(l) => match l {
|
|
||||||
Literal::Int(x, _) => {
|
|
||||||
if *x >= 10 {
|
|
||||||
write!(f, "{x} ({x:#X})")
|
|
||||||
} else {
|
|
||||||
x.fmt(f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Literal::Uint(x, _) => {
|
|
||||||
if *x >= 10 {
|
|
||||||
write!(f, "{x} ({x:#X})")
|
|
||||||
} else {
|
|
||||||
x.fmt(f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Literal::Float(x, _) => x.fmt(f),
|
|
||||||
Literal::Bool(x) => x.fmt(f),
|
|
||||||
Literal::Char(x) => std::fmt::Debug::fmt(x, f),
|
|
||||||
Literal::String(x) => std::fmt::Debug::fmt(x, f),
|
|
||||||
Literal::ByteString(x) => std::fmt::Debug::fmt(x, f),
|
|
||||||
},
|
|
||||||
ComputedExpr::Enum(name, _, _) => name.fmt(f),
|
|
||||||
ComputedExpr::Tuple(t) => {
|
|
||||||
f.write_char('(')?;
|
|
||||||
for x in &**t {
|
|
||||||
x.fmt(f)?;
|
|
||||||
f.write_str(", ")?;
|
|
||||||
}
|
|
||||||
f.write_char(')')
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn scalar_max(scalar: &Scalar) -> i128 {
|
impl From<MirEvalError> for ConstEvalError {
|
||||||
match scalar {
|
fn from(value: MirEvalError) -> Self {
|
||||||
Scalar::Bool => 1,
|
ConstEvalError::MirEvalError(value)
|
||||||
Scalar::Char => u32::MAX as i128,
|
|
||||||
Scalar::Int(x) => match x {
|
|
||||||
IntTy::Isize => isize::MAX as i128,
|
|
||||||
IntTy::I8 => i8::MAX as i128,
|
|
||||||
IntTy::I16 => i16::MAX as i128,
|
|
||||||
IntTy::I32 => i32::MAX as i128,
|
|
||||||
IntTy::I64 => i64::MAX as i128,
|
|
||||||
IntTy::I128 => i128::MAX,
|
|
||||||
},
|
|
||||||
Scalar::Uint(x) => match x {
|
|
||||||
chalk_ir::UintTy::Usize => usize::MAX as i128,
|
|
||||||
chalk_ir::UintTy::U8 => u8::MAX as i128,
|
|
||||||
chalk_ir::UintTy::U16 => u16::MAX as i128,
|
|
||||||
chalk_ir::UintTy::U32 => u32::MAX as i128,
|
|
||||||
chalk_ir::UintTy::U64 => u64::MAX as i128,
|
|
||||||
chalk_ir::UintTy::U128 => i128::MAX, // ignore too big u128 for now
|
|
||||||
},
|
|
||||||
Scalar::Float(_) => 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_valid(scalar: &Scalar, value: i128) -> bool {
|
|
||||||
if value < 0 {
|
|
||||||
!matches!(scalar, Scalar::Uint(_)) && -scalar_max(scalar) - 1 <= value
|
|
||||||
} else {
|
|
||||||
value <= scalar_max(scalar)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_name(ctx: &mut ConstEvalCtx<'_>, variant: EnumVariantId) -> String {
|
|
||||||
let loc = variant.parent.lookup(ctx.db.upcast());
|
|
||||||
let children = variant.parent.child_source(ctx.db.upcast());
|
|
||||||
let item_tree = loc.id.item_tree(ctx.db.upcast());
|
|
||||||
|
|
||||||
let variant_name = children.value[variant.local_id].name();
|
|
||||||
let enum_name = item_tree[loc.id.value].name.to_string();
|
|
||||||
enum_name + "::" + &variant_name.unwrap().to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn eval_const(
|
|
||||||
expr_id: ExprId,
|
|
||||||
ctx: &mut ConstEvalCtx<'_>,
|
|
||||||
) -> Result<ComputedExpr, ConstEvalError> {
|
|
||||||
let u128_to_i128 = |it: u128| -> Result<i128, ConstEvalError> {
|
|
||||||
it.try_into().map_err(|_| ConstEvalError::NotSupported("u128 is too big"))
|
|
||||||
};
|
|
||||||
|
|
||||||
let expr = &ctx.exprs[expr_id];
|
|
||||||
match expr {
|
|
||||||
Expr::Missing => match ctx.owner {
|
|
||||||
// evaluate the implicit variant index of an enum variant without expression
|
|
||||||
// FIXME: This should return the type of the enum representation
|
|
||||||
DefWithBodyId::VariantId(variant) => {
|
|
||||||
let prev_idx: u32 = variant.local_id.into_raw().into();
|
|
||||||
let prev_idx = prev_idx.checked_sub(1).map(RawIdx::from).map(Idx::from_raw);
|
|
||||||
let value = match prev_idx {
|
|
||||||
Some(local_id) => {
|
|
||||||
let prev_variant = EnumVariantId { local_id, parent: variant.parent };
|
|
||||||
1 + match ctx.db.const_eval_variant(prev_variant)? {
|
|
||||||
ComputedExpr::Literal(Literal::Int(v, _)) => v,
|
|
||||||
ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?,
|
|
||||||
_ => {
|
|
||||||
return Err(ConstEvalError::NotSupported(
|
|
||||||
"Enum can't contain this kind of value",
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => 0,
|
|
||||||
};
|
|
||||||
Ok(ComputedExpr::Literal(Literal::Int(value, Some(BuiltinInt::I128))))
|
|
||||||
}
|
|
||||||
_ => Err(ConstEvalError::IncompleteExpr),
|
|
||||||
},
|
|
||||||
Expr::Literal(l) => Ok(ComputedExpr::Literal(l.clone())),
|
|
||||||
&Expr::UnaryOp { expr, op } => {
|
|
||||||
let ty = &ctx.expr_ty(expr);
|
|
||||||
let ev = eval_const(expr, ctx)?;
|
|
||||||
match op {
|
|
||||||
hir_def::expr::UnaryOp::Deref => Err(ConstEvalError::NotSupported("deref")),
|
|
||||||
hir_def::expr::UnaryOp::Not => {
|
|
||||||
let v = match ev {
|
|
||||||
ComputedExpr::Literal(Literal::Bool(b)) => {
|
|
||||||
return Ok(ComputedExpr::Literal(Literal::Bool(!b)))
|
|
||||||
}
|
|
||||||
ComputedExpr::Literal(Literal::Int(v, _)) => v,
|
|
||||||
ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?,
|
|
||||||
_ => return Err(ConstEvalError::NotSupported("this kind of operator")),
|
|
||||||
};
|
|
||||||
let r = match ty.kind(Interner) {
|
|
||||||
TyKind::Scalar(Scalar::Uint(x)) => match x {
|
|
||||||
chalk_ir::UintTy::U8 => !(v as u8) as i128,
|
|
||||||
chalk_ir::UintTy::U16 => !(v as u16) as i128,
|
|
||||||
chalk_ir::UintTy::U32 => !(v as u32) as i128,
|
|
||||||
chalk_ir::UintTy::U64 => !(v as u64) as i128,
|
|
||||||
chalk_ir::UintTy::U128 => {
|
|
||||||
return Err(ConstEvalError::NotSupported("negation of u128"))
|
|
||||||
}
|
|
||||||
chalk_ir::UintTy::Usize => !(v as usize) as i128,
|
|
||||||
},
|
|
||||||
TyKind::Scalar(Scalar::Int(x)) => match x {
|
|
||||||
chalk_ir::IntTy::I8 => !(v as i8) as i128,
|
|
||||||
chalk_ir::IntTy::I16 => !(v as i16) as i128,
|
|
||||||
chalk_ir::IntTy::I32 => !(v as i32) as i128,
|
|
||||||
chalk_ir::IntTy::I64 => !(v as i64) as i128,
|
|
||||||
chalk_ir::IntTy::I128 => !v,
|
|
||||||
chalk_ir::IntTy::Isize => !(v as isize) as i128,
|
|
||||||
},
|
|
||||||
_ => return Err(ConstEvalError::NotSupported("unreachable?")),
|
|
||||||
};
|
|
||||||
Ok(ComputedExpr::Literal(Literal::Int(r, None)))
|
|
||||||
}
|
|
||||||
hir_def::expr::UnaryOp::Neg => {
|
|
||||||
let v = match ev {
|
|
||||||
ComputedExpr::Literal(Literal::Int(v, _)) => v,
|
|
||||||
ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?,
|
|
||||||
_ => return Err(ConstEvalError::NotSupported("this kind of operator")),
|
|
||||||
};
|
|
||||||
Ok(ComputedExpr::Literal(Literal::Int(
|
|
||||||
v.checked_neg().ok_or_else(|| {
|
|
||||||
ConstEvalError::Panic("overflow in negation".to_string())
|
|
||||||
})?,
|
|
||||||
None,
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
&Expr::BinaryOp { lhs, rhs, op } => {
|
|
||||||
let ty = &ctx.expr_ty(lhs);
|
|
||||||
let lhs = eval_const(lhs, ctx)?;
|
|
||||||
let rhs = eval_const(rhs, ctx)?;
|
|
||||||
let op = op.ok_or(ConstEvalError::IncompleteExpr)?;
|
|
||||||
let v1 = match lhs {
|
|
||||||
ComputedExpr::Literal(Literal::Int(v, _)) => v,
|
|
||||||
ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?,
|
|
||||||
_ => return Err(ConstEvalError::NotSupported("this kind of operator")),
|
|
||||||
};
|
|
||||||
let v2 = match rhs {
|
|
||||||
ComputedExpr::Literal(Literal::Int(v, _)) => v,
|
|
||||||
ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?,
|
|
||||||
_ => return Err(ConstEvalError::NotSupported("this kind of operator")),
|
|
||||||
};
|
|
||||||
match op {
|
|
||||||
BinaryOp::ArithOp(b) => {
|
|
||||||
let panic_arith = ConstEvalError::Panic(
|
|
||||||
"attempt to run invalid arithmetic operation".to_string(),
|
|
||||||
);
|
|
||||||
let r = match b {
|
|
||||||
ArithOp::Add => v1.checked_add(v2).ok_or_else(|| panic_arith.clone())?,
|
|
||||||
ArithOp::Mul => v1.checked_mul(v2).ok_or_else(|| panic_arith.clone())?,
|
|
||||||
ArithOp::Sub => v1.checked_sub(v2).ok_or_else(|| panic_arith.clone())?,
|
|
||||||
ArithOp::Div => v1.checked_div(v2).ok_or_else(|| panic_arith.clone())?,
|
|
||||||
ArithOp::Rem => v1.checked_rem(v2).ok_or_else(|| panic_arith.clone())?,
|
|
||||||
ArithOp::Shl => v1
|
|
||||||
.checked_shl(v2.try_into().map_err(|_| panic_arith.clone())?)
|
|
||||||
.ok_or_else(|| panic_arith.clone())?,
|
|
||||||
ArithOp::Shr => v1
|
|
||||||
.checked_shr(v2.try_into().map_err(|_| panic_arith.clone())?)
|
|
||||||
.ok_or_else(|| panic_arith.clone())?,
|
|
||||||
ArithOp::BitXor => v1 ^ v2,
|
|
||||||
ArithOp::BitOr => v1 | v2,
|
|
||||||
ArithOp::BitAnd => v1 & v2,
|
|
||||||
};
|
|
||||||
if let TyKind::Scalar(s) = ty.kind(Interner) {
|
|
||||||
if !is_valid(s, r) {
|
|
||||||
return Err(panic_arith);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(ComputedExpr::Literal(Literal::Int(r, None)))
|
|
||||||
}
|
|
||||||
BinaryOp::LogicOp(_) => Err(ConstEvalError::SemanticError("logic op on numbers")),
|
|
||||||
_ => Err(ConstEvalError::NotSupported("bin op on this operators")),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Expr::Block { statements, tail, .. } => {
|
|
||||||
let mut prev_values = HashMap::<PatId, Option<ComputedExpr>>::default();
|
|
||||||
for statement in &**statements {
|
|
||||||
match *statement {
|
|
||||||
hir_def::expr::Statement::Let { pat: pat_id, initializer, .. } => {
|
|
||||||
let pat = &ctx.pats[pat_id];
|
|
||||||
match pat {
|
|
||||||
Pat::Bind { subpat, .. } if subpat.is_none() => (),
|
|
||||||
_ => {
|
|
||||||
return Err(ConstEvalError::NotSupported("complex patterns in let"))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let value = match initializer {
|
|
||||||
Some(x) => eval_const(x, ctx)?,
|
|
||||||
None => continue,
|
|
||||||
};
|
|
||||||
if !prev_values.contains_key(&pat_id) {
|
|
||||||
let prev = ctx.local_data.insert(pat_id, value);
|
|
||||||
prev_values.insert(pat_id, prev);
|
|
||||||
} else {
|
|
||||||
ctx.local_data.insert(pat_id, value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
hir_def::expr::Statement::Expr { .. } => {
|
|
||||||
return Err(ConstEvalError::NotSupported("this kind of statement"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let r = match tail {
|
|
||||||
&Some(x) => eval_const(x, ctx),
|
|
||||||
None => Ok(ComputedExpr::Tuple(Box::new([]))),
|
|
||||||
};
|
|
||||||
// clean up local data, so caller will receive the exact map that passed to us
|
|
||||||
for (name, val) in prev_values {
|
|
||||||
match val {
|
|
||||||
Some(x) => ctx.local_data.insert(name, x),
|
|
||||||
None => ctx.local_data.remove(&name),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
r
|
|
||||||
}
|
|
||||||
Expr::Path(p) => {
|
|
||||||
let resolver = resolver_for_expr(ctx.db.upcast(), ctx.owner, expr_id);
|
|
||||||
let pr = resolver
|
|
||||||
.resolve_path_in_value_ns(ctx.db.upcast(), p.mod_path())
|
|
||||||
.ok_or(ConstEvalError::SemanticError("unresolved path"))?;
|
|
||||||
let pr = match pr {
|
|
||||||
ResolveValueResult::ValueNs(v) => v,
|
|
||||||
ResolveValueResult::Partial(..) => {
|
|
||||||
return match ctx
|
|
||||||
.infer
|
|
||||||
.assoc_resolutions_for_expr(expr_id)
|
|
||||||
.ok_or(ConstEvalError::SemanticError("unresolved assoc item"))?
|
|
||||||
.0
|
|
||||||
{
|
|
||||||
hir_def::AssocItemId::FunctionId(_) => {
|
|
||||||
Err(ConstEvalError::NotSupported("assoc function"))
|
|
||||||
}
|
|
||||||
// FIXME use actual impl for trait assoc const
|
|
||||||
hir_def::AssocItemId::ConstId(c) => ctx.db.const_eval(c),
|
|
||||||
hir_def::AssocItemId::TypeAliasId(_) => {
|
|
||||||
Err(ConstEvalError::NotSupported("assoc type alias"))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
match pr {
|
|
||||||
ValueNs::LocalBinding(pat_id) => {
|
|
||||||
let r = ctx
|
|
||||||
.local_data
|
|
||||||
.get(&pat_id)
|
|
||||||
.ok_or(ConstEvalError::NotSupported("Unexpected missing local"))?;
|
|
||||||
Ok(r.clone())
|
|
||||||
}
|
|
||||||
ValueNs::ConstId(id) => ctx.db.const_eval(id),
|
|
||||||
ValueNs::GenericParam(_) => {
|
|
||||||
Err(ConstEvalError::NotSupported("const generic without substitution"))
|
|
||||||
}
|
|
||||||
ValueNs::EnumVariantId(id) => match ctx.db.const_eval_variant(id)? {
|
|
||||||
ComputedExpr::Literal(lit) => {
|
|
||||||
Ok(ComputedExpr::Enum(get_name(ctx, id), id, lit))
|
|
||||||
}
|
|
||||||
_ => Err(ConstEvalError::NotSupported(
|
|
||||||
"Enums can't evalute to anything but numbers",
|
|
||||||
)),
|
|
||||||
},
|
|
||||||
_ => Err(ConstEvalError::NotSupported("path that are not const or local")),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// FIXME: Handle the cast target
|
|
||||||
&Expr::Cast { expr, .. } => match eval_const(expr, ctx)? {
|
|
||||||
ComputedExpr::Enum(_, _, lit) => Ok(ComputedExpr::Literal(lit)),
|
|
||||||
_ => Err(ConstEvalError::NotSupported("Can't cast these types")),
|
|
||||||
},
|
|
||||||
_ => Err(ConstEvalError::NotSupported("This kind of expression")),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -449,68 +122,102 @@ pub fn intern_const_scalar(value: ConstScalar, ty: Ty) -> Const {
|
||||||
.intern(Interner)
|
.intern(Interner)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Interns a constant scalar with the given type
|
||||||
|
pub fn intern_const_ref(db: &dyn HirDatabase, value: &ConstRef, ty: Ty, krate: CrateId) -> Const {
|
||||||
|
let bytes = match value {
|
||||||
|
ConstRef::Int(i) => {
|
||||||
|
// FIXME: We should handle failure of layout better.
|
||||||
|
let size = layout_of_ty(db, &ty, krate).map(|x| x.size.bytes_usize()).unwrap_or(16);
|
||||||
|
ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
|
||||||
|
}
|
||||||
|
ConstRef::UInt(i) => {
|
||||||
|
let size = layout_of_ty(db, &ty, krate).map(|x| x.size.bytes_usize()).unwrap_or(16);
|
||||||
|
ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
|
||||||
|
}
|
||||||
|
ConstRef::Bool(b) => ConstScalar::Bytes(vec![*b as u8], MemoryMap::default()),
|
||||||
|
ConstRef::Char(c) => {
|
||||||
|
ConstScalar::Bytes((*c as u32).to_le_bytes().to_vec(), MemoryMap::default())
|
||||||
|
}
|
||||||
|
ConstRef::Unknown => ConstScalar::Unknown,
|
||||||
|
};
|
||||||
|
intern_const_scalar(bytes, ty)
|
||||||
|
}
|
||||||
|
|
||||||
/// Interns a possibly-unknown target usize
|
/// Interns a possibly-unknown target usize
|
||||||
pub fn usize_const(value: Option<u128>) -> Const {
|
pub fn usize_const(db: &dyn HirDatabase, value: Option<u128>, krate: CrateId) -> Const {
|
||||||
intern_const_scalar(value.map_or(ConstScalar::Unknown, ConstScalar::UInt), TyBuilder::usize())
|
intern_const_ref(
|
||||||
|
db,
|
||||||
|
&value.map_or(ConstRef::Unknown, ConstRef::UInt),
|
||||||
|
TyBuilder::usize(),
|
||||||
|
krate,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn try_const_usize(c: &Const) -> Option<u128> {
|
||||||
|
match &c.data(Interner).value {
|
||||||
|
chalk_ir::ConstValue::BoundVar(_) => None,
|
||||||
|
chalk_ir::ConstValue::InferenceVar(_) => None,
|
||||||
|
chalk_ir::ConstValue::Placeholder(_) => None,
|
||||||
|
chalk_ir::ConstValue::Concrete(c) => match &c.interned {
|
||||||
|
ConstScalar::Bytes(x, _) => Some(u128::from_le_bytes(pad16(&x, false))),
|
||||||
|
_ => None,
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn const_eval_recover(
|
pub(crate) fn const_eval_recover(
|
||||||
_: &dyn HirDatabase,
|
_: &dyn HirDatabase,
|
||||||
_: &[String],
|
_: &[String],
|
||||||
_: &ConstId,
|
_: &ConstId,
|
||||||
) -> Result<ComputedExpr, ConstEvalError> {
|
) -> Result<Const, ConstEvalError> {
|
||||||
Err(ConstEvalError::Loop)
|
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn const_eval_variant_recover(
|
pub(crate) fn const_eval_discriminant_recover(
|
||||||
_: &dyn HirDatabase,
|
_: &dyn HirDatabase,
|
||||||
_: &[String],
|
_: &[String],
|
||||||
_: &EnumVariantId,
|
_: &EnumVariantId,
|
||||||
) -> Result<ComputedExpr, ConstEvalError> {
|
) -> Result<i128, ConstEvalError> {
|
||||||
Err(ConstEvalError::Loop)
|
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn const_eval_variant_query(
|
pub(crate) fn const_eval_query(
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
const_id: ConstId,
|
const_id: ConstId,
|
||||||
) -> Result<ComputedExpr, ConstEvalError> {
|
) -> Result<Const, ConstEvalError> {
|
||||||
let def = const_id.into();
|
let def = const_id.into();
|
||||||
let body = db.body(def);
|
let body = db.mir_body(def)?;
|
||||||
let infer = &db.infer(def);
|
let c = interpret_mir(db, &body, false)?;
|
||||||
let result = eval_const(
|
Ok(c)
|
||||||
body.body_expr,
|
|
||||||
&mut ConstEvalCtx {
|
|
||||||
db,
|
|
||||||
owner: const_id.into(),
|
|
||||||
exprs: &body.exprs,
|
|
||||||
pats: &body.pats,
|
|
||||||
local_data: HashMap::default(),
|
|
||||||
infer,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
result
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn const_eval_query_variant(
|
pub(crate) fn const_eval_discriminant_variant(
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
variant_id: EnumVariantId,
|
variant_id: EnumVariantId,
|
||||||
) -> Result<ComputedExpr, ConstEvalError> {
|
) -> Result<i128, ConstEvalError> {
|
||||||
let def = variant_id.into();
|
let def = variant_id.into();
|
||||||
let body = db.body(def);
|
let body = db.body(def);
|
||||||
let infer = &db.infer(def);
|
if body.exprs[body.body_expr] == Expr::Missing {
|
||||||
eval_const(
|
let prev_idx: u32 = variant_id.local_id.into_raw().into();
|
||||||
body.body_expr,
|
let prev_idx = prev_idx.checked_sub(1).map(RawIdx::from).map(Idx::from_raw);
|
||||||
&mut ConstEvalCtx {
|
let value = match prev_idx {
|
||||||
db,
|
Some(local_id) => {
|
||||||
owner: def,
|
let prev_variant = EnumVariantId { local_id, parent: variant_id.parent };
|
||||||
exprs: &body.exprs,
|
1 + db.const_eval_discriminant(prev_variant)?
|
||||||
pats: &body.pats,
|
}
|
||||||
local_data: HashMap::default(),
|
_ => 0,
|
||||||
infer,
|
};
|
||||||
},
|
return Ok(value);
|
||||||
)
|
}
|
||||||
|
let mir_body = db.mir_body(def)?;
|
||||||
|
let c = interpret_mir(db, &mir_body, false)?;
|
||||||
|
let c = try_const_usize(&c).unwrap() as i128;
|
||||||
|
Ok(c)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME: Ideally constants in const eval should have separate body (issue #7434), and this function should
|
||||||
|
// get an `InferenceResult` instead of an `InferenceContext`. And we should remove `ctx.clone().resolve_all()` here
|
||||||
|
// and make this function private. See the fixme comment on `InferenceContext::resolve_all`.
|
||||||
pub(crate) fn eval_to_const(
|
pub(crate) fn eval_to_const(
|
||||||
expr: Idx<Expr>,
|
expr: Idx<Expr>,
|
||||||
mode: ParamLoweringMode,
|
mode: ParamLoweringMode,
|
||||||
|
@ -518,28 +225,20 @@ pub(crate) fn eval_to_const(
|
||||||
args: impl FnOnce() -> Generics,
|
args: impl FnOnce() -> Generics,
|
||||||
debruijn: DebruijnIndex,
|
debruijn: DebruijnIndex,
|
||||||
) -> Const {
|
) -> Const {
|
||||||
|
let db = ctx.db;
|
||||||
if let Expr::Path(p) = &ctx.body.exprs[expr] {
|
if let Expr::Path(p) = &ctx.body.exprs[expr] {
|
||||||
let db = ctx.db;
|
|
||||||
let resolver = &ctx.resolver;
|
let resolver = &ctx.resolver;
|
||||||
if let Some(c) = path_to_const(db, resolver, p.mod_path(), mode, args, debruijn) {
|
if let Some(c) = path_to_const(db, resolver, p.mod_path(), mode, args, debruijn) {
|
||||||
return c;
|
return c;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let body = ctx.body.clone();
|
let infer = ctx.clone().resolve_all();
|
||||||
let mut ctx = ConstEvalCtx {
|
if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, &ctx.body, &infer, expr) {
|
||||||
db: ctx.db,
|
if let Ok(result) = interpret_mir(db, &mir_body, true) {
|
||||||
owner: ctx.owner,
|
return result;
|
||||||
exprs: &body.exprs,
|
}
|
||||||
pats: &body.pats,
|
}
|
||||||
local_data: HashMap::default(),
|
unknown_const(infer[expr].clone())
|
||||||
infer: &ctx.result,
|
|
||||||
};
|
|
||||||
let computed_expr = eval_const(expr, &mut ctx);
|
|
||||||
let const_scalar = match computed_expr {
|
|
||||||
Ok(ComputedExpr::Literal(literal)) => literal.into(),
|
|
||||||
_ => ConstScalar::Unknown,
|
|
||||||
};
|
|
||||||
intern_const_scalar(const_scalar, TyBuilder::usize())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -16,10 +16,12 @@ use smallvec::SmallVec;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
chalk_db,
|
chalk_db,
|
||||||
consteval::{ComputedExpr, ConstEvalError},
|
consteval::ConstEvalError,
|
||||||
method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
|
method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
|
||||||
Binders, CallableDefId, FnDefId, GenericArg, ImplTraitId, InferenceResult, Interner, PolyFnSig,
|
mir::{BorrowckResult, MirBody, MirLowerError},
|
||||||
QuantifiedWhereClause, ReturnTypeImplTraits, Substitution, TraitRef, Ty, TyDefId, ValueTyDefId,
|
Binders, CallableDefId, Const, FnDefId, GenericArg, ImplTraitId, InferenceResult, Interner,
|
||||||
|
PolyFnSig, QuantifiedWhereClause, ReturnTypeImplTraits, Substitution, TraitRef, Ty, TyDefId,
|
||||||
|
ValueTyDefId,
|
||||||
};
|
};
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
|
|
||||||
|
@ -32,6 +34,13 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
|
||||||
#[salsa::invoke(crate::infer::infer_query)]
|
#[salsa::invoke(crate::infer::infer_query)]
|
||||||
fn infer_query(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
|
fn infer_query(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
|
||||||
|
|
||||||
|
#[salsa::invoke(crate::mir::mir_body_query)]
|
||||||
|
#[salsa::cycle(crate::mir::mir_body_recover)]
|
||||||
|
fn mir_body(&self, def: DefWithBodyId) -> Result<Arc<MirBody>, MirLowerError>;
|
||||||
|
|
||||||
|
#[salsa::invoke(crate::mir::borrowck_query)]
|
||||||
|
fn borrowck(&self, def: DefWithBodyId) -> Result<Arc<BorrowckResult>, MirLowerError>;
|
||||||
|
|
||||||
#[salsa::invoke(crate::lower::ty_query)]
|
#[salsa::invoke(crate::lower::ty_query)]
|
||||||
#[salsa::cycle(crate::lower::ty_recover)]
|
#[salsa::cycle(crate::lower::ty_recover)]
|
||||||
fn ty(&self, def: TyDefId) -> Binders<Ty>;
|
fn ty(&self, def: TyDefId) -> Binders<Ty>;
|
||||||
|
@ -46,13 +55,13 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
|
||||||
#[salsa::invoke(crate::lower::const_param_ty_query)]
|
#[salsa::invoke(crate::lower::const_param_ty_query)]
|
||||||
fn const_param_ty(&self, def: ConstParamId) -> Ty;
|
fn const_param_ty(&self, def: ConstParamId) -> Ty;
|
||||||
|
|
||||||
#[salsa::invoke(crate::consteval::const_eval_variant_query)]
|
#[salsa::invoke(crate::consteval::const_eval_query)]
|
||||||
#[salsa::cycle(crate::consteval::const_eval_recover)]
|
#[salsa::cycle(crate::consteval::const_eval_recover)]
|
||||||
fn const_eval(&self, def: ConstId) -> Result<ComputedExpr, ConstEvalError>;
|
fn const_eval(&self, def: ConstId) -> Result<Const, ConstEvalError>;
|
||||||
|
|
||||||
#[salsa::invoke(crate::consteval::const_eval_query_variant)]
|
#[salsa::invoke(crate::consteval::const_eval_discriminant_variant)]
|
||||||
#[salsa::cycle(crate::consteval::const_eval_variant_recover)]
|
#[salsa::cycle(crate::consteval::const_eval_discriminant_recover)]
|
||||||
fn const_eval_variant(&self, def: EnumVariantId) -> Result<ComputedExpr, ConstEvalError>;
|
fn const_eval_discriminant(&self, def: EnumVariantId) -> Result<i128, ConstEvalError>;
|
||||||
|
|
||||||
#[salsa::invoke(crate::lower::impl_trait_query)]
|
#[salsa::invoke(crate::lower::impl_trait_query)]
|
||||||
fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>;
|
fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>;
|
||||||
|
|
|
@ -178,6 +178,7 @@ impl<'a> DeclValidator<'a> {
|
||||||
AttrDefId::StaticId(sid) => Some(sid.lookup(self.db.upcast()).container.into()),
|
AttrDefId::StaticId(sid) => Some(sid.lookup(self.db.upcast()).container.into()),
|
||||||
AttrDefId::ConstId(cid) => Some(cid.lookup(self.db.upcast()).container.into()),
|
AttrDefId::ConstId(cid) => Some(cid.lookup(self.db.upcast()).container.into()),
|
||||||
AttrDefId::TraitId(tid) => Some(tid.lookup(self.db.upcast()).container.into()),
|
AttrDefId::TraitId(tid) => Some(tid.lookup(self.db.upcast()).container.into()),
|
||||||
|
AttrDefId::TraitAliasId(taid) => Some(taid.lookup(self.db.upcast()).container.into()),
|
||||||
AttrDefId::ImplId(iid) => Some(iid.lookup(self.db.upcast()).container.into()),
|
AttrDefId::ImplId(iid) => Some(iid.lookup(self.db.upcast()).container.into()),
|
||||||
AttrDefId::ExternBlockId(id) => Some(id.lookup(self.db.upcast()).container.into()),
|
AttrDefId::ExternBlockId(id) => Some(id.lookup(self.db.upcast()).container.into()),
|
||||||
// These warnings should not explore macro definitions at all
|
// These warnings should not explore macro definitions at all
|
||||||
|
@ -234,8 +235,8 @@ impl<'a> DeclValidator<'a> {
|
||||||
let pats_replacements = body
|
let pats_replacements = body
|
||||||
.pats
|
.pats
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|(id, pat)| match pat {
|
.filter_map(|(pat_id, pat)| match pat {
|
||||||
Pat::Bind { name, .. } => Some((id, name)),
|
Pat::Bind { id, .. } => Some((pat_id, &body.bindings[*id].name)),
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
.filter_map(|(id, bind_name)| {
|
.filter_map(|(id, bind_name)| {
|
||||||
|
|
|
@ -5,11 +5,11 @@
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use either::Either;
|
||||||
use hir_def::lang_item::LangItem;
|
use hir_def::lang_item::LangItem;
|
||||||
use hir_def::{resolver::HasResolver, AdtId, AssocItemId, DefWithBodyId, HasModule};
|
use hir_def::{resolver::HasResolver, AdtId, AssocItemId, DefWithBodyId, HasModule};
|
||||||
use hir_def::{ItemContainerId, Lookup};
|
use hir_def::{ItemContainerId, Lookup};
|
||||||
use hir_expand::name;
|
use hir_expand::name;
|
||||||
use itertools::Either;
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use typed_arena::Arena;
|
use typed_arena::Arena;
|
||||||
|
@ -84,7 +84,7 @@ impl ExprValidator {
|
||||||
|
|
||||||
match expr {
|
match expr {
|
||||||
Expr::Match { expr, arms } => {
|
Expr::Match { expr, arms } => {
|
||||||
self.validate_match(id, *expr, arms, db, self.infer.clone());
|
self.validate_match(id, *expr, arms, db);
|
||||||
}
|
}
|
||||||
Expr::Call { .. } | Expr::MethodCall { .. } => {
|
Expr::Call { .. } | Expr::MethodCall { .. } => {
|
||||||
self.validate_call(db, id, expr, &mut filter_map_next_checker);
|
self.validate_call(db, id, expr, &mut filter_map_next_checker);
|
||||||
|
@ -147,16 +147,15 @@ impl ExprValidator {
|
||||||
|
|
||||||
fn validate_match(
|
fn validate_match(
|
||||||
&mut self,
|
&mut self,
|
||||||
id: ExprId,
|
|
||||||
match_expr: ExprId,
|
match_expr: ExprId,
|
||||||
|
scrutinee_expr: ExprId,
|
||||||
arms: &[MatchArm],
|
arms: &[MatchArm],
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
infer: Arc<InferenceResult>,
|
|
||||||
) {
|
) {
|
||||||
let body = db.body(self.owner);
|
let body = db.body(self.owner);
|
||||||
|
|
||||||
let match_expr_ty = &infer[match_expr];
|
let scrut_ty = &self.infer[scrutinee_expr];
|
||||||
if match_expr_ty.is_unknown() {
|
if scrut_ty.is_unknown() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -166,23 +165,23 @@ impl ExprValidator {
|
||||||
let mut m_arms = Vec::with_capacity(arms.len());
|
let mut m_arms = Vec::with_capacity(arms.len());
|
||||||
let mut has_lowering_errors = false;
|
let mut has_lowering_errors = false;
|
||||||
for arm in arms {
|
for arm in arms {
|
||||||
if let Some(pat_ty) = infer.type_of_pat.get(arm.pat) {
|
if let Some(pat_ty) = self.infer.type_of_pat.get(arm.pat) {
|
||||||
// We only include patterns whose type matches the type
|
// We only include patterns whose type matches the type
|
||||||
// of the match expression. If we had an InvalidMatchArmPattern
|
// of the scrutinee expression. If we had an InvalidMatchArmPattern
|
||||||
// diagnostic or similar we could raise that in an else
|
// diagnostic or similar we could raise that in an else
|
||||||
// block here.
|
// block here.
|
||||||
//
|
//
|
||||||
// When comparing the types, we also have to consider that rustc
|
// When comparing the types, we also have to consider that rustc
|
||||||
// will automatically de-reference the match expression type if
|
// will automatically de-reference the scrutinee expression type if
|
||||||
// necessary.
|
// necessary.
|
||||||
//
|
//
|
||||||
// FIXME we should use the type checker for this.
|
// FIXME we should use the type checker for this.
|
||||||
if (pat_ty == match_expr_ty
|
if (pat_ty == scrut_ty
|
||||||
|| match_expr_ty
|
|| scrut_ty
|
||||||
.as_reference()
|
.as_reference()
|
||||||
.map(|(match_expr_ty, ..)| match_expr_ty == pat_ty)
|
.map(|(match_expr_ty, ..)| match_expr_ty == pat_ty)
|
||||||
.unwrap_or(false))
|
.unwrap_or(false))
|
||||||
&& types_of_subpatterns_do_match(arm.pat, &body, &infer)
|
&& types_of_subpatterns_do_match(arm.pat, &body, &self.infer)
|
||||||
{
|
{
|
||||||
// If we had a NotUsefulMatchArm diagnostic, we could
|
// If we had a NotUsefulMatchArm diagnostic, we could
|
||||||
// check the usefulness of each pattern as we added it
|
// check the usefulness of each pattern as we added it
|
||||||
|
@ -206,7 +205,7 @@ impl ExprValidator {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let report = compute_match_usefulness(&cx, &m_arms, match_expr_ty);
|
let report = compute_match_usefulness(&cx, &m_arms, scrut_ty);
|
||||||
|
|
||||||
// FIXME Report unreacheble arms
|
// FIXME Report unreacheble arms
|
||||||
// https://github.com/rust-lang/rust/blob/f31622a50/compiler/rustc_mir_build/src/thir/pattern/check_match.rs#L200
|
// https://github.com/rust-lang/rust/blob/f31622a50/compiler/rustc_mir_build/src/thir/pattern/check_match.rs#L200
|
||||||
|
@ -214,8 +213,8 @@ impl ExprValidator {
|
||||||
let witnesses = report.non_exhaustiveness_witnesses;
|
let witnesses = report.non_exhaustiveness_witnesses;
|
||||||
if !witnesses.is_empty() {
|
if !witnesses.is_empty() {
|
||||||
self.diagnostics.push(BodyValidationDiagnostic::MissingMatchArms {
|
self.diagnostics.push(BodyValidationDiagnostic::MissingMatchArms {
|
||||||
match_expr: id,
|
match_expr,
|
||||||
uncovered_patterns: missing_match_arms(&cx, match_expr_ty, witnesses, arms),
|
uncovered_patterns: missing_match_arms(&cx, scrut_ty, witnesses, arms),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -379,7 +378,7 @@ fn missing_match_arms<'p>(
|
||||||
arms: &[MatchArm],
|
arms: &[MatchArm],
|
||||||
) -> String {
|
) -> String {
|
||||||
struct DisplayWitness<'a, 'p>(&'a DeconstructedPat<'p>, &'a MatchCheckCtx<'a, 'p>);
|
struct DisplayWitness<'a, 'p>(&'a DeconstructedPat<'p>, &'a MatchCheckCtx<'a, 'p>);
|
||||||
impl<'a, 'p> fmt::Display for DisplayWitness<'a, 'p> {
|
impl fmt::Display for DisplayWitness<'_, '_> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let DisplayWitness(witness, cx) = *self;
|
let DisplayWitness(witness, cx) = *self;
|
||||||
let pat = witness.to_pat(cx);
|
let pat = witness.to_pat(cx);
|
||||||
|
|
|
@ -146,8 +146,9 @@ impl<'a> PatCtxt<'a> {
|
||||||
PatKind::Leaf { subpatterns }
|
PatKind::Leaf { subpatterns }
|
||||||
}
|
}
|
||||||
|
|
||||||
hir_def::expr::Pat::Bind { ref name, subpat, .. } => {
|
hir_def::expr::Pat::Bind { id, subpat, .. } => {
|
||||||
let bm = self.infer.pat_binding_modes[&pat];
|
let bm = self.infer.pat_binding_modes[&pat];
|
||||||
|
let name = &self.body.bindings[id].name;
|
||||||
match (bm, ty.kind(Interner)) {
|
match (bm, ty.kind(Interner)) {
|
||||||
(BindingMode::Ref(_), TyKind::Ref(.., rty)) => ty = rty,
|
(BindingMode::Ref(_), TyKind::Ref(.., rty)) => ty = rty,
|
||||||
(BindingMode::Ref(_), _) => {
|
(BindingMode::Ref(_), _) => {
|
||||||
|
|
|
@ -94,8 +94,10 @@ fn walk_unsafe(
|
||||||
unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });
|
unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Expr::Unsafe { body: child } => {
|
Expr::Unsafe { .. } => {
|
||||||
return walk_unsafe(db, infer, def, body, *child, true, unsafe_expr_cb);
|
return expr.walk_child_exprs(|child| {
|
||||||
|
walk_unsafe(db, infer, def, body, child, true, unsafe_expr_cb);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,8 +5,9 @@
|
||||||
use std::fmt::{self, Debug};
|
use std::fmt::{self, Debug};
|
||||||
|
|
||||||
use base_db::CrateId;
|
use base_db::CrateId;
|
||||||
use chalk_ir::BoundVar;
|
use chalk_ir::{BoundVar, TyKind};
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
|
adt::VariantData,
|
||||||
body,
|
body,
|
||||||
db::DefDatabase,
|
db::DefDatabase,
|
||||||
find_path,
|
find_path,
|
||||||
|
@ -14,9 +15,9 @@ use hir_def::{
|
||||||
item_scope::ItemInNs,
|
item_scope::ItemInNs,
|
||||||
lang_item::{LangItem, LangItemTarget},
|
lang_item::{LangItem, LangItemTarget},
|
||||||
path::{Path, PathKind},
|
path::{Path, PathKind},
|
||||||
type_ref::{ConstScalar, TraitBoundModifier, TypeBound, TypeRef},
|
type_ref::{TraitBoundModifier, TypeBound, TypeRef},
|
||||||
visibility::Visibility,
|
visibility::Visibility,
|
||||||
HasModule, ItemContainerId, Lookup, ModuleDefId, ModuleId, TraitId,
|
HasModule, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId, TraitId,
|
||||||
};
|
};
|
||||||
use hir_expand::{hygiene::Hygiene, name::Name};
|
use hir_expand::{hygiene::Hygiene, name::Name};
|
||||||
use intern::{Internable, Interned};
|
use intern::{Internable, Interned};
|
||||||
|
@ -25,14 +26,17 @@ use smallvec::SmallVec;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::HirDatabase,
|
db::HirDatabase,
|
||||||
from_assoc_type_id, from_foreign_def_id, from_placeholder_idx, lt_from_placeholder_idx,
|
from_assoc_type_id, from_foreign_def_id, from_placeholder_idx,
|
||||||
|
layout::layout_of_ty,
|
||||||
|
lt_from_placeholder_idx,
|
||||||
mapping::from_chalk,
|
mapping::from_chalk,
|
||||||
|
mir::pad16,
|
||||||
primitive, to_assoc_type_id,
|
primitive, to_assoc_type_id,
|
||||||
utils::{self, generics},
|
utils::{self, generics},
|
||||||
AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Const, ConstValue, DomainGoal,
|
AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Const, ConstScalar, ConstValue,
|
||||||
GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData, LifetimeOutlives, Mutability,
|
DomainGoal, GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData, LifetimeOutlives,
|
||||||
OpaqueTy, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Scalar, Substitution, TraitRef,
|
MemoryMap, Mutability, OpaqueTy, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Scalar,
|
||||||
TraitRefExt, Ty, TyExt, TyKind, WhereClause,
|
Substitution, TraitRef, TraitRefExt, Ty, TyExt, WhereClause,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub trait HirWrite: fmt::Write {
|
pub trait HirWrite: fmt::Write {
|
||||||
|
@ -362,20 +366,176 @@ impl HirDisplay for GenericArg {
|
||||||
impl HirDisplay for Const {
|
impl HirDisplay for Const {
|
||||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||||
let data = self.interned();
|
let data = self.interned();
|
||||||
match data.value {
|
match &data.value {
|
||||||
ConstValue::BoundVar(idx) => idx.hir_fmt(f),
|
ConstValue::BoundVar(idx) => idx.hir_fmt(f),
|
||||||
ConstValue::InferenceVar(..) => write!(f, "#c#"),
|
ConstValue::InferenceVar(..) => write!(f, "#c#"),
|
||||||
ConstValue::Placeholder(idx) => {
|
ConstValue::Placeholder(idx) => {
|
||||||
let id = from_placeholder_idx(f.db, idx);
|
let id = from_placeholder_idx(f.db, *idx);
|
||||||
let generics = generics(f.db.upcast(), id.parent);
|
let generics = generics(f.db.upcast(), id.parent);
|
||||||
let param_data = &generics.params.type_or_consts[id.local_id];
|
let param_data = &generics.params.type_or_consts[id.local_id];
|
||||||
write!(f, "{}", param_data.name().unwrap())
|
write!(f, "{}", param_data.name().unwrap())
|
||||||
}
|
}
|
||||||
ConstValue::Concrete(c) => write!(f, "{}", c.interned),
|
ConstValue::Concrete(c) => match &c.interned {
|
||||||
|
ConstScalar::Bytes(b, m) => render_const_scalar(f, &b, m, &data.ty),
|
||||||
|
ConstScalar::Unknown => f.write_char('_'),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct HexifiedConst(pub Const);
|
||||||
|
|
||||||
|
impl HirDisplay for HexifiedConst {
|
||||||
|
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||||
|
let data = &self.0.data(Interner);
|
||||||
|
if let TyKind::Scalar(s) = data.ty.kind(Interner) {
|
||||||
|
if matches!(s, Scalar::Int(_) | Scalar::Uint(_)) {
|
||||||
|
if let ConstValue::Concrete(c) = &data.value {
|
||||||
|
if let ConstScalar::Bytes(b, m) = &c.interned {
|
||||||
|
let value = u128::from_le_bytes(pad16(b, false));
|
||||||
|
if value >= 10 {
|
||||||
|
render_const_scalar(f, &b, m, &data.ty)?;
|
||||||
|
return write!(f, " ({:#X})", value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.0.hir_fmt(f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn render_const_scalar(
|
||||||
|
f: &mut HirFormatter<'_>,
|
||||||
|
b: &[u8],
|
||||||
|
memory_map: &MemoryMap,
|
||||||
|
ty: &Ty,
|
||||||
|
) -> Result<(), HirDisplayError> {
|
||||||
|
match ty.kind(Interner) {
|
||||||
|
chalk_ir::TyKind::Scalar(s) => match s {
|
||||||
|
Scalar::Bool => write!(f, "{}", if b[0] == 0 { false } else { true }),
|
||||||
|
Scalar::Char => {
|
||||||
|
let x = u128::from_le_bytes(pad16(b, false)) as u32;
|
||||||
|
let Ok(c) = char::try_from(x) else {
|
||||||
|
return f.write_str("<unicode-error>");
|
||||||
|
};
|
||||||
|
write!(f, "{c:?}")
|
||||||
|
}
|
||||||
|
Scalar::Int(_) => {
|
||||||
|
let x = i128::from_le_bytes(pad16(b, true));
|
||||||
|
write!(f, "{x}")
|
||||||
|
}
|
||||||
|
Scalar::Uint(_) => {
|
||||||
|
let x = u128::from_le_bytes(pad16(b, false));
|
||||||
|
write!(f, "{x}")
|
||||||
|
}
|
||||||
|
Scalar::Float(fl) => match fl {
|
||||||
|
chalk_ir::FloatTy::F32 => {
|
||||||
|
let x = f32::from_le_bytes(b.try_into().unwrap());
|
||||||
|
write!(f, "{x:?}")
|
||||||
|
}
|
||||||
|
chalk_ir::FloatTy::F64 => {
|
||||||
|
let x = f64::from_le_bytes(b.try_into().unwrap());
|
||||||
|
write!(f, "{x:?}")
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
chalk_ir::TyKind::Ref(_, _, t) => match t.kind(Interner) {
|
||||||
|
chalk_ir::TyKind::Str => {
|
||||||
|
let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap());
|
||||||
|
let bytes = memory_map.0.get(&addr).map(|x| &**x).unwrap_or(&[]);
|
||||||
|
let s = std::str::from_utf8(bytes).unwrap_or("<utf8-error>");
|
||||||
|
write!(f, "{s:?}")
|
||||||
|
}
|
||||||
|
_ => f.write_str("<ref-not-supported>"),
|
||||||
|
},
|
||||||
|
chalk_ir::TyKind::Tuple(_, subst) => {
|
||||||
|
// FIXME: Remove this line. If the target data layout is independent
|
||||||
|
// of the krate, the `db.target_data_layout` and its callers like `layout_of_ty` don't need
|
||||||
|
// to get krate. Otherwise, we need to get krate from the final callers of the hir display
|
||||||
|
// infrastructure and have it here as a field on `f`.
|
||||||
|
let krate = *f.db.crate_graph().crates_in_topological_order().last().unwrap();
|
||||||
|
let Ok(layout) = layout_of_ty(f.db, ty, krate) else {
|
||||||
|
return f.write_str("<layout-error>");
|
||||||
|
};
|
||||||
|
f.write_str("(")?;
|
||||||
|
let mut first = true;
|
||||||
|
for (id, ty) in subst.iter(Interner).enumerate() {
|
||||||
|
if first {
|
||||||
|
first = false;
|
||||||
|
} else {
|
||||||
|
f.write_str(", ")?;
|
||||||
|
}
|
||||||
|
let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument
|
||||||
|
let offset = layout.fields.offset(id).bytes_usize();
|
||||||
|
let Ok(layout) = layout_of_ty(f.db, &ty, krate) else {
|
||||||
|
f.write_str("<layout-error>")?;
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
let size = layout.size.bytes_usize();
|
||||||
|
render_const_scalar(f, &b[offset..offset + size], memory_map, &ty)?;
|
||||||
|
}
|
||||||
|
f.write_str(")")
|
||||||
|
}
|
||||||
|
chalk_ir::TyKind::Adt(adt, subst) => match adt.0 {
|
||||||
|
hir_def::AdtId::StructId(s) => {
|
||||||
|
let data = f.db.struct_data(s);
|
||||||
|
let Ok(layout) = f.db.layout_of_adt(adt.0, subst.clone()) else {
|
||||||
|
return f.write_str("<layout-error>");
|
||||||
|
};
|
||||||
|
match data.variant_data.as_ref() {
|
||||||
|
VariantData::Record(fields) | VariantData::Tuple(fields) => {
|
||||||
|
let field_types = f.db.field_types(s.into());
|
||||||
|
let krate = adt.0.module(f.db.upcast()).krate();
|
||||||
|
let render_field = |f: &mut HirFormatter<'_>, id: LocalFieldId| {
|
||||||
|
let offset = layout
|
||||||
|
.fields
|
||||||
|
.offset(u32::from(id.into_raw()) as usize)
|
||||||
|
.bytes_usize();
|
||||||
|
let ty = field_types[id].clone().substitute(Interner, subst);
|
||||||
|
let Ok(layout) = layout_of_ty(f.db, &ty, krate) else {
|
||||||
|
return f.write_str("<layout-error>");
|
||||||
|
};
|
||||||
|
let size = layout.size.bytes_usize();
|
||||||
|
render_const_scalar(f, &b[offset..offset + size], memory_map, &ty)
|
||||||
|
};
|
||||||
|
let mut it = fields.iter();
|
||||||
|
if matches!(data.variant_data.as_ref(), VariantData::Record(_)) {
|
||||||
|
write!(f, "{} {{", data.name)?;
|
||||||
|
if let Some((id, data)) = it.next() {
|
||||||
|
write!(f, " {}: ", data.name)?;
|
||||||
|
render_field(f, id)?;
|
||||||
|
}
|
||||||
|
for (id, data) in it {
|
||||||
|
write!(f, ", {}: ", data.name)?;
|
||||||
|
render_field(f, id)?;
|
||||||
|
}
|
||||||
|
write!(f, " }}")?;
|
||||||
|
} else {
|
||||||
|
let mut it = it.map(|x| x.0);
|
||||||
|
write!(f, "{}(", data.name)?;
|
||||||
|
if let Some(id) = it.next() {
|
||||||
|
render_field(f, id)?;
|
||||||
|
}
|
||||||
|
for id in it {
|
||||||
|
write!(f, ", ")?;
|
||||||
|
render_field(f, id)?;
|
||||||
|
}
|
||||||
|
write!(f, ")")?;
|
||||||
|
}
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
VariantData::Unit => write!(f, "{}", data.name),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
hir_def::AdtId::UnionId(u) => write!(f, "{}", f.db.union_data(u).name),
|
||||||
|
hir_def::AdtId::EnumId(_) => f.write_str("<enum-not-supported>"),
|
||||||
|
},
|
||||||
|
chalk_ir::TyKind::FnDef(..) => ty.hir_fmt(f),
|
||||||
|
_ => f.write_str("<not-supported>"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl HirDisplay for BoundVar {
|
impl HirDisplay for BoundVar {
|
||||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||||
write!(f, "?{}.{}", self.debruijn.depth(), self.index)
|
write!(f, "?{}.{}", self.debruijn.depth(), self.index)
|
||||||
|
@ -614,8 +774,9 @@ impl HirDisplay for Ty {
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
if let Some(ConstValue::Concrete(c)) =
|
if let Some(ConstValue::Concrete(c)) = parameter
|
||||||
parameter.constant(Interner).map(|x| x.data(Interner).value)
|
.constant(Interner)
|
||||||
|
.map(|x| &x.data(Interner).value)
|
||||||
{
|
{
|
||||||
if c.interned == ConstScalar::Unknown {
|
if c.interned == ConstScalar::Unknown {
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -17,11 +17,12 @@ use std::ops::Index;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use chalk_ir::{cast::Cast, ConstValue, DebruijnIndex, Mutability, Safety, Scalar, TypeFlags};
|
use chalk_ir::{cast::Cast, ConstValue, DebruijnIndex, Mutability, Safety, Scalar, TypeFlags};
|
||||||
|
use either::Either;
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
body::Body,
|
body::Body,
|
||||||
builtin_type::{BuiltinInt, BuiltinType, BuiltinUint},
|
builtin_type::{BuiltinInt, BuiltinType, BuiltinUint},
|
||||||
data::{ConstData, StaticData},
|
data::{ConstData, StaticData},
|
||||||
expr::{BindingAnnotation, ExprId, ExprOrPatId, PatId},
|
expr::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, PatId},
|
||||||
lang_item::{LangItem, LangItemTarget},
|
lang_item::{LangItem, LangItemTarget},
|
||||||
layout::Integer,
|
layout::Integer,
|
||||||
path::Path,
|
path::Path,
|
||||||
|
@ -30,10 +31,9 @@ use hir_def::{
|
||||||
AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, HasModule,
|
AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, HasModule,
|
||||||
ItemContainerId, Lookup, TraitId, TypeAliasId, VariantId,
|
ItemContainerId, Lookup, TraitId, TypeAliasId, VariantId,
|
||||||
};
|
};
|
||||||
use hir_expand::name::name;
|
use hir_expand::name::{name, Name};
|
||||||
use itertools::Either;
|
|
||||||
use la_arena::ArenaMap;
|
use la_arena::ArenaMap;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use stdx::always;
|
use stdx::always;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -66,8 +66,10 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
|
||||||
let mut ctx = InferenceContext::new(db, def, &body, resolver);
|
let mut ctx = InferenceContext::new(db, def, &body, resolver);
|
||||||
|
|
||||||
match def {
|
match def {
|
||||||
|
DefWithBodyId::FunctionId(f) => {
|
||||||
|
ctx.collect_fn(f);
|
||||||
|
}
|
||||||
DefWithBodyId::ConstId(c) => ctx.collect_const(&db.const_data(c)),
|
DefWithBodyId::ConstId(c) => ctx.collect_const(&db.const_data(c)),
|
||||||
DefWithBodyId::FunctionId(f) => ctx.collect_fn(f),
|
|
||||||
DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_data(s)),
|
DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_data(s)),
|
||||||
DefWithBodyId::VariantId(v) => {
|
DefWithBodyId::VariantId(v) => {
|
||||||
ctx.return_ty = TyBuilder::builtin(match db.enum_data(v.parent).variant_body_type() {
|
ctx.return_ty = TyBuilder::builtin(match db.enum_data(v.parent).variant_body_type() {
|
||||||
|
@ -144,44 +146,6 @@ impl Default for BindingMode {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Used to generalize patterns and assignee expressions.
|
|
||||||
trait PatLike: Into<ExprOrPatId> + Copy {
|
|
||||||
type BindingMode: Copy;
|
|
||||||
|
|
||||||
fn infer(
|
|
||||||
this: &mut InferenceContext<'_>,
|
|
||||||
id: Self,
|
|
||||||
expected_ty: &Ty,
|
|
||||||
default_bm: Self::BindingMode,
|
|
||||||
) -> Ty;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PatLike for ExprId {
|
|
||||||
type BindingMode = ();
|
|
||||||
|
|
||||||
fn infer(
|
|
||||||
this: &mut InferenceContext<'_>,
|
|
||||||
id: Self,
|
|
||||||
expected_ty: &Ty,
|
|
||||||
_: Self::BindingMode,
|
|
||||||
) -> Ty {
|
|
||||||
this.infer_assignee_expr(id, expected_ty)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PatLike for PatId {
|
|
||||||
type BindingMode = BindingMode;
|
|
||||||
|
|
||||||
fn infer(
|
|
||||||
this: &mut InferenceContext<'_>,
|
|
||||||
id: Self,
|
|
||||||
expected_ty: &Ty,
|
|
||||||
default_bm: Self::BindingMode,
|
|
||||||
) -> Ty {
|
|
||||||
this.infer_pat(id, expected_ty, default_bm)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) struct InferOk<T> {
|
pub(crate) struct InferOk<T> {
|
||||||
value: T,
|
value: T,
|
||||||
|
@ -200,11 +164,45 @@ pub(crate) type InferResult<T> = Result<InferOk<T>, TypeError>;
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
pub enum InferenceDiagnostic {
|
pub enum InferenceDiagnostic {
|
||||||
NoSuchField { expr: ExprId },
|
NoSuchField {
|
||||||
PrivateField { expr: ExprId, field: FieldId },
|
expr: ExprId,
|
||||||
PrivateAssocItem { id: ExprOrPatId, item: AssocItemId },
|
},
|
||||||
BreakOutsideOfLoop { expr: ExprId, is_break: bool },
|
PrivateField {
|
||||||
MismatchedArgCount { call_expr: ExprId, expected: usize, found: usize },
|
expr: ExprId,
|
||||||
|
field: FieldId,
|
||||||
|
},
|
||||||
|
PrivateAssocItem {
|
||||||
|
id: ExprOrPatId,
|
||||||
|
item: AssocItemId,
|
||||||
|
},
|
||||||
|
UnresolvedField {
|
||||||
|
expr: ExprId,
|
||||||
|
receiver: Ty,
|
||||||
|
name: Name,
|
||||||
|
method_with_same_name_exists: bool,
|
||||||
|
},
|
||||||
|
UnresolvedMethodCall {
|
||||||
|
expr: ExprId,
|
||||||
|
receiver: Ty,
|
||||||
|
name: Name,
|
||||||
|
/// Contains the type the field resolves to
|
||||||
|
field_with_same_name: Option<Ty>,
|
||||||
|
},
|
||||||
|
// FIXME: Make this proper
|
||||||
|
BreakOutsideOfLoop {
|
||||||
|
expr: ExprId,
|
||||||
|
is_break: bool,
|
||||||
|
bad_value_break: bool,
|
||||||
|
},
|
||||||
|
MismatchedArgCount {
|
||||||
|
call_expr: ExprId,
|
||||||
|
expected: usize,
|
||||||
|
found: usize,
|
||||||
|
},
|
||||||
|
ExpectedFunction {
|
||||||
|
call_expr: ExprId,
|
||||||
|
found: Ty,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A mismatch between an expected and an inferred type.
|
/// A mismatch between an expected and an inferred type.
|
||||||
|
@ -293,8 +291,10 @@ pub enum Adjust {
|
||||||
/// call, with the signature `&'a T -> &'a U` or `&'a mut T -> &'a mut U`.
|
/// call, with the signature `&'a T -> &'a U` or `&'a mut T -> &'a mut U`.
|
||||||
/// The target type is `U` in both cases, with the region and mutability
|
/// The target type is `U` in both cases, with the region and mutability
|
||||||
/// being those shared by both the receiver and the returned reference.
|
/// being those shared by both the receiver and the returned reference.
|
||||||
|
///
|
||||||
|
/// Mutability is `None` when we are not sure.
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||||
pub struct OverloadedDeref(pub Mutability);
|
pub struct OverloadedDeref(pub Option<Mutability>);
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||||
pub enum AutoBorrow {
|
pub enum AutoBorrow {
|
||||||
|
@ -354,7 +354,10 @@ pub struct InferenceResult {
|
||||||
/// **Note**: When a pattern type is resolved it may still contain
|
/// **Note**: When a pattern type is resolved it may still contain
|
||||||
/// unresolved or missing subpatterns or subpatterns of mismatched types.
|
/// unresolved or missing subpatterns or subpatterns of mismatched types.
|
||||||
pub type_of_pat: ArenaMap<PatId, Ty>,
|
pub type_of_pat: ArenaMap<PatId, Ty>,
|
||||||
|
pub type_of_binding: ArenaMap<BindingId, Ty>,
|
||||||
pub type_of_rpit: ArenaMap<RpitId, Ty>,
|
pub type_of_rpit: ArenaMap<RpitId, Ty>,
|
||||||
|
/// Type of the result of `.into_iter()` on the for. `ExprId` is the one of the whole for loop.
|
||||||
|
pub type_of_for_iterator: FxHashMap<ExprId, Ty>,
|
||||||
type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch>,
|
type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch>,
|
||||||
/// Interned common types to return references to.
|
/// Interned common types to return references to.
|
||||||
standard_types: InternedStandardTypes,
|
standard_types: InternedStandardTypes,
|
||||||
|
@ -389,18 +392,15 @@ impl InferenceResult {
|
||||||
pub fn type_mismatch_for_pat(&self, pat: PatId) -> Option<&TypeMismatch> {
|
pub fn type_mismatch_for_pat(&self, pat: PatId) -> Option<&TypeMismatch> {
|
||||||
self.type_mismatches.get(&pat.into())
|
self.type_mismatches.get(&pat.into())
|
||||||
}
|
}
|
||||||
|
pub fn type_mismatches(&self) -> impl Iterator<Item = (ExprOrPatId, &TypeMismatch)> {
|
||||||
|
self.type_mismatches.iter().map(|(expr_or_pat, mismatch)| (*expr_or_pat, mismatch))
|
||||||
|
}
|
||||||
pub fn expr_type_mismatches(&self) -> impl Iterator<Item = (ExprId, &TypeMismatch)> {
|
pub fn expr_type_mismatches(&self) -> impl Iterator<Item = (ExprId, &TypeMismatch)> {
|
||||||
self.type_mismatches.iter().filter_map(|(expr_or_pat, mismatch)| match *expr_or_pat {
|
self.type_mismatches.iter().filter_map(|(expr_or_pat, mismatch)| match *expr_or_pat {
|
||||||
ExprOrPatId::ExprId(expr) => Some((expr, mismatch)),
|
ExprOrPatId::ExprId(expr) => Some((expr, mismatch)),
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
pub fn pat_type_mismatches(&self) -> impl Iterator<Item = (PatId, &TypeMismatch)> {
|
|
||||||
self.type_mismatches.iter().filter_map(|(expr_or_pat, mismatch)| match *expr_or_pat {
|
|
||||||
ExprOrPatId::PatId(pat) => Some((pat, mismatch)),
|
|
||||||
_ => None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Index<ExprId> for InferenceResult {
|
impl Index<ExprId> for InferenceResult {
|
||||||
|
@ -419,6 +419,14 @@ impl Index<PatId> for InferenceResult {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Index<BindingId> for InferenceResult {
|
||||||
|
type Output = Ty;
|
||||||
|
|
||||||
|
fn index(&self, b: BindingId) -> &Ty {
|
||||||
|
self.type_of_binding.get(b).unwrap_or(&self.standard_types.unknown)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// The inference context contains all information needed during type inference.
|
/// The inference context contains all information needed during type inference.
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub(crate) struct InferenceContext<'a> {
|
pub(crate) struct InferenceContext<'a> {
|
||||||
|
@ -428,14 +436,19 @@ pub(crate) struct InferenceContext<'a> {
|
||||||
pub(crate) resolver: Resolver,
|
pub(crate) resolver: Resolver,
|
||||||
table: unify::InferenceTable<'a>,
|
table: unify::InferenceTable<'a>,
|
||||||
trait_env: Arc<TraitEnvironment>,
|
trait_env: Arc<TraitEnvironment>,
|
||||||
|
/// The traits in scope, disregarding block modules. This is used for caching purposes.
|
||||||
|
traits_in_scope: FxHashSet<TraitId>,
|
||||||
pub(crate) result: InferenceResult,
|
pub(crate) result: InferenceResult,
|
||||||
/// The return type of the function being inferred, the closure or async block if we're
|
/// The return type of the function being inferred, the closure or async block if we're
|
||||||
/// currently within one.
|
/// currently within one.
|
||||||
///
|
///
|
||||||
/// We might consider using a nested inference context for checking
|
/// We might consider using a nested inference context for checking
|
||||||
/// closures, but currently this is the only field that will change there,
|
/// closures so we can swap all shared things out at once.
|
||||||
/// so it doesn't make sense.
|
|
||||||
return_ty: Ty,
|
return_ty: Ty,
|
||||||
|
/// If `Some`, this stores coercion information for returned
|
||||||
|
/// expressions. If `None`, this is in a context where return is
|
||||||
|
/// inappropriate, such as a const expression.
|
||||||
|
return_coercion: Option<CoerceMany>,
|
||||||
/// The resume type and the yield type, respectively, of the generator being inferred.
|
/// The resume type and the yield type, respectively, of the generator being inferred.
|
||||||
resume_yield_tys: Option<(Ty, Ty)>,
|
resume_yield_tys: Option<(Ty, Ty)>,
|
||||||
diverges: Diverges,
|
diverges: Diverges,
|
||||||
|
@ -447,7 +460,7 @@ struct BreakableContext {
|
||||||
/// Whether this context contains at least one break expression.
|
/// Whether this context contains at least one break expression.
|
||||||
may_break: bool,
|
may_break: bool,
|
||||||
/// The coercion target of the context.
|
/// The coercion target of the context.
|
||||||
coerce: CoerceMany,
|
coerce: Option<CoerceMany>,
|
||||||
/// The optional label of the context.
|
/// The optional label of the context.
|
||||||
label: Option<name::Name>,
|
label: Option<name::Name>,
|
||||||
kind: BreakableKind,
|
kind: BreakableKind,
|
||||||
|
@ -503,16 +516,22 @@ impl<'a> InferenceContext<'a> {
|
||||||
trait_env,
|
trait_env,
|
||||||
return_ty: TyKind::Error.intern(Interner), // set in collect_* calls
|
return_ty: TyKind::Error.intern(Interner), // set in collect_* calls
|
||||||
resume_yield_tys: None,
|
resume_yield_tys: None,
|
||||||
|
return_coercion: None,
|
||||||
db,
|
db,
|
||||||
owner,
|
owner,
|
||||||
body,
|
body,
|
||||||
|
traits_in_scope: resolver.traits_in_scope(db.upcast()),
|
||||||
resolver,
|
resolver,
|
||||||
diverges: Diverges::Maybe,
|
diverges: Diverges::Maybe,
|
||||||
breakables: Vec::new(),
|
breakables: Vec::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_all(self) -> InferenceResult {
|
// FIXME: This function should be private in module. It is currently only used in the consteval, since we need
|
||||||
|
// `InferenceResult` in the middle of inference. See the fixme comment in `consteval::eval_to_const`. If you
|
||||||
|
// used this function for another workaround, mention it here. If you really need this function and believe that
|
||||||
|
// there is no problem in it being `pub(crate)`, remove this comment.
|
||||||
|
pub(crate) fn resolve_all(self) -> InferenceResult {
|
||||||
let InferenceContext { mut table, mut result, .. } = self;
|
let InferenceContext { mut table, mut result, .. } = self;
|
||||||
|
|
||||||
table.fallback_if_possible();
|
table.fallback_if_possible();
|
||||||
|
@ -528,13 +547,46 @@ impl<'a> InferenceContext<'a> {
|
||||||
for ty in result.type_of_pat.values_mut() {
|
for ty in result.type_of_pat.values_mut() {
|
||||||
*ty = table.resolve_completely(ty.clone());
|
*ty = table.resolve_completely(ty.clone());
|
||||||
}
|
}
|
||||||
for ty in result.type_of_rpit.iter_mut().map(|x| x.1) {
|
for ty in result.type_of_binding.values_mut() {
|
||||||
|
*ty = table.resolve_completely(ty.clone());
|
||||||
|
}
|
||||||
|
for ty in result.type_of_rpit.values_mut() {
|
||||||
|
*ty = table.resolve_completely(ty.clone());
|
||||||
|
}
|
||||||
|
for ty in result.type_of_for_iterator.values_mut() {
|
||||||
*ty = table.resolve_completely(ty.clone());
|
*ty = table.resolve_completely(ty.clone());
|
||||||
}
|
}
|
||||||
for mismatch in result.type_mismatches.values_mut() {
|
for mismatch in result.type_mismatches.values_mut() {
|
||||||
mismatch.expected = table.resolve_completely(mismatch.expected.clone());
|
mismatch.expected = table.resolve_completely(mismatch.expected.clone());
|
||||||
mismatch.actual = table.resolve_completely(mismatch.actual.clone());
|
mismatch.actual = table.resolve_completely(mismatch.actual.clone());
|
||||||
}
|
}
|
||||||
|
result.diagnostics.retain_mut(|diagnostic| {
|
||||||
|
if let InferenceDiagnostic::ExpectedFunction { found: ty, .. }
|
||||||
|
| InferenceDiagnostic::UnresolvedField { receiver: ty, .. }
|
||||||
|
| InferenceDiagnostic::UnresolvedMethodCall { receiver: ty, .. } = diagnostic
|
||||||
|
{
|
||||||
|
*ty = table.resolve_completely(ty.clone());
|
||||||
|
// FIXME: Remove this when we are on par with rustc in terms of inference
|
||||||
|
if ty.contains_unknown() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let InferenceDiagnostic::UnresolvedMethodCall { field_with_same_name, .. } =
|
||||||
|
diagnostic
|
||||||
|
{
|
||||||
|
let clear = if let Some(ty) = field_with_same_name {
|
||||||
|
*ty = table.resolve_completely(ty.clone());
|
||||||
|
ty.contains_unknown()
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
};
|
||||||
|
if clear {
|
||||||
|
*field_with_same_name = None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
true
|
||||||
|
});
|
||||||
for (_, subst) in result.method_resolutions.values_mut() {
|
for (_, subst) in result.method_resolutions.values_mut() {
|
||||||
*subst = table.resolve_completely(subst.clone());
|
*subst = table.resolve_completely(subst.clone());
|
||||||
}
|
}
|
||||||
|
@ -580,7 +632,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
let ty = self.insert_type_vars(ty);
|
let ty = self.insert_type_vars(ty);
|
||||||
let ty = self.normalize_associated_types_in(ty);
|
let ty = self.normalize_associated_types_in(ty);
|
||||||
|
|
||||||
self.infer_pat(*pat, &ty, BindingMode::default());
|
self.infer_top_pat(*pat, &ty);
|
||||||
}
|
}
|
||||||
let error_ty = &TypeRef::Error;
|
let error_ty = &TypeRef::Error;
|
||||||
let return_ty = if data.has_async_kw() {
|
let return_ty = if data.has_async_kw() {
|
||||||
|
@ -632,10 +684,19 @@ impl<'a> InferenceContext<'a> {
|
||||||
};
|
};
|
||||||
|
|
||||||
self.return_ty = self.normalize_associated_types_in(return_ty);
|
self.return_ty = self.normalize_associated_types_in(return_ty);
|
||||||
|
self.return_coercion = Some(CoerceMany::new(self.return_ty.clone()));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn infer_body(&mut self) {
|
fn infer_body(&mut self) {
|
||||||
self.infer_expr_coerce(self.body.body_expr, &Expectation::has_type(self.return_ty.clone()));
|
match self.return_coercion {
|
||||||
|
Some(_) => self.infer_return(self.body.body_expr),
|
||||||
|
None => {
|
||||||
|
_ = self.infer_expr_coerce(
|
||||||
|
self.body.body_expr,
|
||||||
|
&Expectation::has_type(self.return_ty.clone()),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_expr_ty(&mut self, expr: ExprId, ty: Ty) {
|
fn write_expr_ty(&mut self, expr: ExprId, ty: Ty) {
|
||||||
|
@ -662,12 +723,15 @@ impl<'a> InferenceContext<'a> {
|
||||||
self.result.type_of_pat.insert(pat, ty);
|
self.result.type_of_pat.insert(pat, ty);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn write_binding_ty(&mut self, id: BindingId, ty: Ty) {
|
||||||
|
self.result.type_of_binding.insert(id, ty);
|
||||||
|
}
|
||||||
|
|
||||||
fn push_diagnostic(&mut self, diagnostic: InferenceDiagnostic) {
|
fn push_diagnostic(&mut self, diagnostic: InferenceDiagnostic) {
|
||||||
self.result.diagnostics.push(diagnostic);
|
self.result.diagnostics.push(diagnostic);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_ty(&mut self, type_ref: &TypeRef) -> Ty {
|
fn make_ty(&mut self, type_ref: &TypeRef) -> Ty {
|
||||||
// FIXME use right resolver for block
|
|
||||||
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
|
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
|
||||||
let ty = ctx.lower_ty(type_ref);
|
let ty = ctx.lower_ty(type_ref);
|
||||||
let ty = self.insert_type_vars(ty);
|
let ty = self.insert_type_vars(ty);
|
||||||
|
@ -681,11 +745,9 @@ impl<'a> InferenceContext<'a> {
|
||||||
/// Replaces ConstScalar::Unknown by a new type var, so we can maybe still infer it.
|
/// Replaces ConstScalar::Unknown by a new type var, so we can maybe still infer it.
|
||||||
fn insert_const_vars_shallow(&mut self, c: Const) -> Const {
|
fn insert_const_vars_shallow(&mut self, c: Const) -> Const {
|
||||||
let data = c.data(Interner);
|
let data = c.data(Interner);
|
||||||
match data.value {
|
match &data.value {
|
||||||
ConstValue::Concrete(cc) => match cc.interned {
|
ConstValue::Concrete(cc) => match cc.interned {
|
||||||
hir_def::type_ref::ConstScalar::Unknown => {
|
crate::ConstScalar::Unknown => self.table.new_const_var(data.ty.clone()),
|
||||||
self.table.new_const_var(data.ty.clone())
|
|
||||||
}
|
|
||||||
_ => c,
|
_ => c,
|
||||||
},
|
},
|
||||||
_ => c,
|
_ => c,
|
||||||
|
@ -785,12 +847,11 @@ impl<'a> InferenceContext<'a> {
|
||||||
Some(path) => path,
|
Some(path) => path,
|
||||||
None => return (self.err_ty(), None),
|
None => return (self.err_ty(), None),
|
||||||
};
|
};
|
||||||
let resolver = &self.resolver;
|
|
||||||
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
|
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
|
||||||
// FIXME: this should resolve assoc items as well, see this example:
|
// FIXME: this should resolve assoc items as well, see this example:
|
||||||
// https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521
|
// https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521
|
||||||
let (resolution, unresolved) = if value_ns {
|
let (resolution, unresolved) = if value_ns {
|
||||||
match resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path()) {
|
match self.resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path()) {
|
||||||
Some(ResolveValueResult::ValueNs(value)) => match value {
|
Some(ResolveValueResult::ValueNs(value)) => match value {
|
||||||
ValueNs::EnumVariantId(var) => {
|
ValueNs::EnumVariantId(var) => {
|
||||||
let substs = ctx.substs_from_path(path, var.into(), true);
|
let substs = ctx.substs_from_path(path, var.into(), true);
|
||||||
|
@ -811,7 +872,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
None => return (self.err_ty(), None),
|
None => return (self.err_ty(), None),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
match resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
|
match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
|
||||||
Some(it) => it,
|
Some(it) => it,
|
||||||
None => return (self.err_ty(), None),
|
None => return (self.err_ty(), None),
|
||||||
}
|
}
|
||||||
|
@ -866,7 +927,10 @@ impl<'a> InferenceContext<'a> {
|
||||||
// FIXME potentially resolve assoc type
|
// FIXME potentially resolve assoc type
|
||||||
(self.err_ty(), None)
|
(self.err_ty(), None)
|
||||||
}
|
}
|
||||||
TypeNs::AdtId(AdtId::EnumId(_)) | TypeNs::BuiltinType(_) | TypeNs::TraitId(_) => {
|
TypeNs::AdtId(AdtId::EnumId(_))
|
||||||
|
| TypeNs::BuiltinType(_)
|
||||||
|
| TypeNs::TraitId(_)
|
||||||
|
| TypeNs::TraitAliasId(_) => {
|
||||||
// FIXME diagnostic
|
// FIXME diagnostic
|
||||||
(self.err_ty(), None)
|
(self.err_ty(), None)
|
||||||
}
|
}
|
||||||
|
@ -1018,6 +1082,15 @@ impl<'a> InferenceContext<'a> {
|
||||||
let struct_ = self.resolve_lang_item(LangItem::VaList)?.as_struct()?;
|
let struct_ = self.resolve_lang_item(LangItem::VaList)?.as_struct()?;
|
||||||
Some(struct_.into())
|
Some(struct_.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_traits_in_scope(&self) -> Either<FxHashSet<TraitId>, &FxHashSet<TraitId>> {
|
||||||
|
let mut b_traits = self.resolver.traits_in_scope_from_block_scopes().peekable();
|
||||||
|
if b_traits.peek().is_some() {
|
||||||
|
Either::Left(self.traits_in_scope.iter().copied().chain(b_traits).collect())
|
||||||
|
} else {
|
||||||
|
Either::Right(&self.traits_in_scope)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// When inferring an expression, we propagate downward whatever type hint we
|
/// When inferring an expression, we propagate downward whatever type hint we
|
||||||
|
|
|
@ -50,11 +50,44 @@ fn success(
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub(super) struct CoerceMany {
|
pub(super) struct CoerceMany {
|
||||||
expected_ty: Ty,
|
expected_ty: Ty,
|
||||||
|
final_ty: Option<Ty>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CoerceMany {
|
impl CoerceMany {
|
||||||
pub(super) fn new(expected: Ty) -> Self {
|
pub(super) fn new(expected: Ty) -> Self {
|
||||||
CoerceMany { expected_ty: expected }
|
CoerceMany { expected_ty: expected, final_ty: None }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the "expected type" with which this coercion was
|
||||||
|
/// constructed. This represents the "downward propagated" type
|
||||||
|
/// that was given to us at the start of typing whatever construct
|
||||||
|
/// we are typing (e.g., the match expression).
|
||||||
|
///
|
||||||
|
/// Typically, this is used as the expected type when
|
||||||
|
/// type-checking each of the alternative expressions whose types
|
||||||
|
/// we are trying to merge.
|
||||||
|
pub(super) fn expected_ty(&self) -> Ty {
|
||||||
|
self.expected_ty.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the current "merged type", representing our best-guess
|
||||||
|
/// at the LUB of the expressions we've seen so far (if any). This
|
||||||
|
/// isn't *final* until you call `self.complete()`, which will return
|
||||||
|
/// the merged type.
|
||||||
|
pub(super) fn merged_ty(&self) -> Ty {
|
||||||
|
self.final_ty.clone().unwrap_or_else(|| self.expected_ty.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn complete(self, ctx: &mut InferenceContext<'_>) -> Ty {
|
||||||
|
if let Some(final_ty) = self.final_ty {
|
||||||
|
final_ty
|
||||||
|
} else {
|
||||||
|
ctx.result.standard_types.never.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn coerce_forced_unit(&mut self, ctx: &mut InferenceContext<'_>) {
|
||||||
|
self.coerce(ctx, None, &ctx.result.standard_types.unit.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Merge two types from different branches, with possible coercion.
|
/// Merge two types from different branches, with possible coercion.
|
||||||
|
@ -76,25 +109,25 @@ impl CoerceMany {
|
||||||
// Special case: two function types. Try to coerce both to
|
// Special case: two function types. Try to coerce both to
|
||||||
// pointers to have a chance at getting a match. See
|
// pointers to have a chance at getting a match. See
|
||||||
// https://github.com/rust-lang/rust/blob/7b805396bf46dce972692a6846ce2ad8481c5f85/src/librustc_typeck/check/coercion.rs#L877-L916
|
// https://github.com/rust-lang/rust/blob/7b805396bf46dce972692a6846ce2ad8481c5f85/src/librustc_typeck/check/coercion.rs#L877-L916
|
||||||
let sig = match (self.expected_ty.kind(Interner), expr_ty.kind(Interner)) {
|
let sig = match (self.merged_ty().kind(Interner), expr_ty.kind(Interner)) {
|
||||||
(TyKind::FnDef(..) | TyKind::Closure(..), TyKind::FnDef(..) | TyKind::Closure(..)) => {
|
(TyKind::FnDef(..) | TyKind::Closure(..), TyKind::FnDef(..) | TyKind::Closure(..)) => {
|
||||||
// FIXME: we're ignoring safety here. To be more correct, if we have one FnDef and one Closure,
|
// FIXME: we're ignoring safety here. To be more correct, if we have one FnDef and one Closure,
|
||||||
// we should be coercing the closure to a fn pointer of the safety of the FnDef
|
// we should be coercing the closure to a fn pointer of the safety of the FnDef
|
||||||
cov_mark::hit!(coerce_fn_reification);
|
cov_mark::hit!(coerce_fn_reification);
|
||||||
let sig =
|
let sig =
|
||||||
self.expected_ty.callable_sig(ctx.db).expect("FnDef without callable sig");
|
self.merged_ty().callable_sig(ctx.db).expect("FnDef without callable sig");
|
||||||
Some(sig)
|
Some(sig)
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
if let Some(sig) = sig {
|
if let Some(sig) = sig {
|
||||||
let target_ty = TyKind::Function(sig.to_fn_ptr()).intern(Interner);
|
let target_ty = TyKind::Function(sig.to_fn_ptr()).intern(Interner);
|
||||||
let result1 = ctx.table.coerce_inner(self.expected_ty.clone(), &target_ty);
|
let result1 = ctx.table.coerce_inner(self.merged_ty(), &target_ty);
|
||||||
let result2 = ctx.table.coerce_inner(expr_ty.clone(), &target_ty);
|
let result2 = ctx.table.coerce_inner(expr_ty.clone(), &target_ty);
|
||||||
if let (Ok(result1), Ok(result2)) = (result1, result2) {
|
if let (Ok(result1), Ok(result2)) = (result1, result2) {
|
||||||
ctx.table.register_infer_ok(result1);
|
ctx.table.register_infer_ok(result1);
|
||||||
ctx.table.register_infer_ok(result2);
|
ctx.table.register_infer_ok(result2);
|
||||||
return self.expected_ty = target_ty;
|
return self.final_ty = Some(target_ty);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -102,25 +135,20 @@ impl CoerceMany {
|
||||||
// type is a type variable and the new one is `!`, trying it the other
|
// type is a type variable and the new one is `!`, trying it the other
|
||||||
// way around first would mean we make the type variable `!`, instead of
|
// way around first would mean we make the type variable `!`, instead of
|
||||||
// just marking it as possibly diverging.
|
// just marking it as possibly diverging.
|
||||||
if ctx.coerce(expr, &expr_ty, &self.expected_ty).is_ok() {
|
if let Ok(res) = ctx.coerce(expr, &expr_ty, &self.merged_ty()) {
|
||||||
/* self.expected_ty is already correct */
|
self.final_ty = Some(res);
|
||||||
} else if ctx.coerce(expr, &self.expected_ty, &expr_ty).is_ok() {
|
} else if let Ok(res) = ctx.coerce(expr, &self.merged_ty(), &expr_ty) {
|
||||||
self.expected_ty = expr_ty;
|
self.final_ty = Some(res);
|
||||||
} else {
|
} else {
|
||||||
if let Some(id) = expr {
|
if let Some(id) = expr {
|
||||||
ctx.result.type_mismatches.insert(
|
ctx.result.type_mismatches.insert(
|
||||||
id.into(),
|
id.into(),
|
||||||
TypeMismatch { expected: self.expected_ty.clone(), actual: expr_ty },
|
TypeMismatch { expected: self.merged_ty().clone(), actual: expr_ty.clone() },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
cov_mark::hit!(coerce_merge_fail_fallback);
|
cov_mark::hit!(coerce_merge_fail_fallback);
|
||||||
/* self.expected_ty is already correct */
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn complete(self) -> Ty {
|
|
||||||
self.expected_ty
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn could_coerce(
|
pub fn could_coerce(
|
||||||
|
@ -665,7 +693,7 @@ pub(super) fn auto_deref_adjust_steps(autoderef: &Autoderef<'_, '_>) -> Vec<Adju
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(kind, _source)| match kind {
|
.map(|(kind, _source)| match kind {
|
||||||
// We do not know what kind of deref we require at this point yet
|
// We do not know what kind of deref we require at this point yet
|
||||||
AutoderefKind::Overloaded => Some(OverloadedDeref(Mutability::Not)),
|
AutoderefKind::Overloaded => Some(OverloadedDeref(None)),
|
||||||
AutoderefKind::Builtin => None,
|
AutoderefKind::Builtin => None,
|
||||||
})
|
})
|
||||||
.zip(targets)
|
.zip(targets)
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -4,22 +4,60 @@ use std::iter::repeat_with;
|
||||||
|
|
||||||
use chalk_ir::Mutability;
|
use chalk_ir::Mutability;
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
expr::{BindingAnnotation, Expr, Literal, Pat, PatId},
|
body::Body,
|
||||||
|
expr::{
|
||||||
|
Binding, BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, Literal, Pat, PatId,
|
||||||
|
RecordFieldPat,
|
||||||
|
},
|
||||||
path::Path,
|
path::Path,
|
||||||
type_ref::ConstScalar,
|
|
||||||
};
|
};
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
consteval::intern_const_scalar,
|
consteval::{try_const_usize, usize_const},
|
||||||
infer::{BindingMode, Expectation, InferenceContext, TypeMismatch},
|
infer::{BindingMode, Expectation, InferenceContext, TypeMismatch},
|
||||||
lower::lower_to_chalk_mutability,
|
lower::lower_to_chalk_mutability,
|
||||||
primitive::UintTy,
|
primitive::UintTy,
|
||||||
static_lifetime, ConcreteConst, ConstValue, Interner, Scalar, Substitution, Ty, TyBuilder,
|
static_lifetime, Interner, Scalar, Substitution, Ty, TyBuilder, TyExt, TyKind,
|
||||||
TyExt, TyKind,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::PatLike;
|
/// Used to generalize patterns and assignee expressions.
|
||||||
|
pub(super) trait PatLike: Into<ExprOrPatId> + Copy {
|
||||||
|
type BindingMode: Copy;
|
||||||
|
|
||||||
|
fn infer(
|
||||||
|
this: &mut InferenceContext<'_>,
|
||||||
|
id: Self,
|
||||||
|
expected_ty: &Ty,
|
||||||
|
default_bm: Self::BindingMode,
|
||||||
|
) -> Ty;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PatLike for ExprId {
|
||||||
|
type BindingMode = ();
|
||||||
|
|
||||||
|
fn infer(
|
||||||
|
this: &mut InferenceContext<'_>,
|
||||||
|
id: Self,
|
||||||
|
expected_ty: &Ty,
|
||||||
|
(): Self::BindingMode,
|
||||||
|
) -> Ty {
|
||||||
|
this.infer_assignee_expr(id, expected_ty)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PatLike for PatId {
|
||||||
|
type BindingMode = BindingMode;
|
||||||
|
|
||||||
|
fn infer(
|
||||||
|
this: &mut InferenceContext<'_>,
|
||||||
|
id: Self,
|
||||||
|
expected_ty: &Ty,
|
||||||
|
default_bm: Self::BindingMode,
|
||||||
|
) -> Ty {
|
||||||
|
this.infer_pat(id, expected_ty, default_bm)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<'a> InferenceContext<'a> {
|
impl<'a> InferenceContext<'a> {
|
||||||
/// Infers type for tuple struct pattern or its corresponding assignee expression.
|
/// Infers type for tuple struct pattern or its corresponding assignee expression.
|
||||||
|
@ -112,6 +150,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
ellipsis: Option<usize>,
|
ellipsis: Option<usize>,
|
||||||
subs: &[T],
|
subs: &[T],
|
||||||
) -> Ty {
|
) -> Ty {
|
||||||
|
let expected = self.resolve_ty_shallow(expected);
|
||||||
let expectations = match expected.as_tuple() {
|
let expectations = match expected.as_tuple() {
|
||||||
Some(parameters) => &*parameters.as_slice(Interner),
|
Some(parameters) => &*parameters.as_slice(Interner),
|
||||||
_ => &[],
|
_ => &[],
|
||||||
|
@ -145,12 +184,11 @@ impl<'a> InferenceContext<'a> {
|
||||||
.intern(Interner)
|
.intern(Interner)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn infer_pat(
|
pub(super) fn infer_top_pat(&mut self, pat: PatId, expected: &Ty) {
|
||||||
&mut self,
|
self.infer_pat(pat, expected, BindingMode::default());
|
||||||
pat: PatId,
|
}
|
||||||
expected: &Ty,
|
|
||||||
mut default_bm: BindingMode,
|
fn infer_pat(&mut self, pat: PatId, expected: &Ty, mut default_bm: BindingMode) -> Ty {
|
||||||
) -> Ty {
|
|
||||||
let mut expected = self.resolve_ty_shallow(expected);
|
let mut expected = self.resolve_ty_shallow(expected);
|
||||||
|
|
||||||
if is_non_ref_pat(self.body, pat) {
|
if is_non_ref_pat(self.body, pat) {
|
||||||
|
@ -185,30 +223,17 @@ impl<'a> InferenceContext<'a> {
|
||||||
self.infer_tuple_pat_like(&expected, default_bm, *ellipsis, args)
|
self.infer_tuple_pat_like(&expected, default_bm, *ellipsis, args)
|
||||||
}
|
}
|
||||||
Pat::Or(pats) => {
|
Pat::Or(pats) => {
|
||||||
if let Some((first_pat, rest)) = pats.split_first() {
|
for pat in pats.iter() {
|
||||||
let ty = self.infer_pat(*first_pat, &expected, default_bm);
|
self.infer_pat(*pat, &expected, default_bm);
|
||||||
for pat in rest {
|
|
||||||
self.infer_pat(*pat, &expected, default_bm);
|
|
||||||
}
|
|
||||||
ty
|
|
||||||
} else {
|
|
||||||
self.err_ty()
|
|
||||||
}
|
}
|
||||||
|
expected.clone()
|
||||||
}
|
}
|
||||||
Pat::Ref { pat, mutability } => {
|
&Pat::Ref { pat, mutability } => self.infer_ref_pat(
|
||||||
let mutability = lower_to_chalk_mutability(*mutability);
|
pat,
|
||||||
let expectation = match expected.as_reference() {
|
lower_to_chalk_mutability(mutability),
|
||||||
Some((inner_ty, _lifetime, exp_mut)) => {
|
&expected,
|
||||||
if mutability != exp_mut {
|
default_bm,
|
||||||
// FIXME: emit type error?
|
),
|
||||||
}
|
|
||||||
inner_ty.clone()
|
|
||||||
}
|
|
||||||
_ => self.result.standard_types.unknown.clone(),
|
|
||||||
};
|
|
||||||
let subty = self.infer_pat(*pat, &expectation, default_bm);
|
|
||||||
TyKind::Ref(mutability, static_lifetime(), subty).intern(Interner)
|
|
||||||
}
|
|
||||||
Pat::TupleStruct { path: p, args: subpats, ellipsis } => self
|
Pat::TupleStruct { path: p, args: subpats, ellipsis } => self
|
||||||
.infer_tuple_struct_pat_like(
|
.infer_tuple_struct_pat_like(
|
||||||
p.as_deref(),
|
p.as_deref(),
|
||||||
|
@ -223,72 +248,14 @@ impl<'a> InferenceContext<'a> {
|
||||||
self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat, subs)
|
self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat, subs)
|
||||||
}
|
}
|
||||||
Pat::Path(path) => {
|
Pat::Path(path) => {
|
||||||
// FIXME use correct resolver for the surrounding expression
|
// FIXME update resolver for the surrounding expression
|
||||||
let resolver = self.resolver.clone();
|
self.infer_path(path, pat.into()).unwrap_or_else(|| self.err_ty())
|
||||||
self.infer_path(&resolver, path, pat.into()).unwrap_or_else(|| self.err_ty())
|
|
||||||
}
|
}
|
||||||
Pat::Bind { mode, name: _, subpat } => {
|
Pat::Bind { id, subpat } => {
|
||||||
let mode = if mode == &BindingAnnotation::Unannotated {
|
return self.infer_bind_pat(pat, *id, default_bm, *subpat, &expected);
|
||||||
default_bm
|
|
||||||
} else {
|
|
||||||
BindingMode::convert(*mode)
|
|
||||||
};
|
|
||||||
self.result.pat_binding_modes.insert(pat, mode);
|
|
||||||
|
|
||||||
let inner_ty = match subpat {
|
|
||||||
Some(subpat) => self.infer_pat(*subpat, &expected, default_bm),
|
|
||||||
None => expected,
|
|
||||||
};
|
|
||||||
let inner_ty = self.insert_type_vars_shallow(inner_ty);
|
|
||||||
|
|
||||||
let bound_ty = match mode {
|
|
||||||
BindingMode::Ref(mutability) => {
|
|
||||||
TyKind::Ref(mutability, static_lifetime(), inner_ty.clone())
|
|
||||||
.intern(Interner)
|
|
||||||
}
|
|
||||||
BindingMode::Move => inner_ty.clone(),
|
|
||||||
};
|
|
||||||
self.write_pat_ty(pat, bound_ty);
|
|
||||||
return inner_ty;
|
|
||||||
}
|
}
|
||||||
Pat::Slice { prefix, slice, suffix } => {
|
Pat::Slice { prefix, slice, suffix } => {
|
||||||
let elem_ty = match expected.kind(Interner) {
|
self.infer_slice_pat(&expected, prefix, slice, suffix, default_bm)
|
||||||
TyKind::Array(st, _) | TyKind::Slice(st) => st.clone(),
|
|
||||||
_ => self.err_ty(),
|
|
||||||
};
|
|
||||||
|
|
||||||
for &pat_id in prefix.iter().chain(suffix.iter()) {
|
|
||||||
self.infer_pat(pat_id, &elem_ty, default_bm);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let &Some(slice_pat_id) = slice {
|
|
||||||
let rest_pat_ty = match expected.kind(Interner) {
|
|
||||||
TyKind::Array(_, length) => {
|
|
||||||
let len = match length.data(Interner).value {
|
|
||||||
ConstValue::Concrete(ConcreteConst {
|
|
||||||
interned: ConstScalar::UInt(len),
|
|
||||||
}) => len.checked_sub((prefix.len() + suffix.len()) as u128),
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
TyKind::Array(
|
|
||||||
elem_ty.clone(),
|
|
||||||
intern_const_scalar(
|
|
||||||
len.map_or(ConstScalar::Unknown, |len| ConstScalar::UInt(len)),
|
|
||||||
TyBuilder::usize(),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
_ => TyKind::Slice(elem_ty.clone()),
|
|
||||||
}
|
|
||||||
.intern(Interner);
|
|
||||||
self.infer_pat(slice_pat_id, &rest_pat_ty, default_bm);
|
|
||||||
}
|
|
||||||
|
|
||||||
match expected.kind(Interner) {
|
|
||||||
TyKind::Array(_, const_) => TyKind::Array(elem_ty, const_.clone()),
|
|
||||||
_ => TyKind::Slice(elem_ty),
|
|
||||||
}
|
|
||||||
.intern(Interner)
|
|
||||||
}
|
}
|
||||||
Pat::Wild => expected.clone(),
|
Pat::Wild => expected.clone(),
|
||||||
Pat::Range { start, end } => {
|
Pat::Range { start, end } => {
|
||||||
|
@ -296,27 +263,10 @@ impl<'a> InferenceContext<'a> {
|
||||||
self.infer_expr(*end, &Expectation::has_type(start_ty))
|
self.infer_expr(*end, &Expectation::has_type(start_ty))
|
||||||
}
|
}
|
||||||
&Pat::Lit(expr) => {
|
&Pat::Lit(expr) => {
|
||||||
// FIXME: using `Option` here is a workaround until we can use if-let chains in stable.
|
// Don't emit type mismatches again, the expression lowering already did that.
|
||||||
let mut pat_ty = None;
|
let ty = self.infer_lit_pat(expr, &expected);
|
||||||
|
self.write_pat_ty(pat, ty.clone());
|
||||||
// Like slice patterns, byte string patterns can denote both `&[u8; N]` and `&[u8]`.
|
return ty;
|
||||||
if let Expr::Literal(Literal::ByteString(_)) = self.body[expr] {
|
|
||||||
if let Some((inner, ..)) = expected.as_reference() {
|
|
||||||
let inner = self.resolve_ty_shallow(inner);
|
|
||||||
if matches!(inner.kind(Interner), TyKind::Slice(_)) {
|
|
||||||
let elem_ty = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner);
|
|
||||||
let slice_ty = TyKind::Slice(elem_ty).intern(Interner);
|
|
||||||
let ty = TyKind::Ref(Mutability::Not, static_lifetime(), slice_ty)
|
|
||||||
.intern(Interner);
|
|
||||||
self.write_expr_ty(expr, ty.clone());
|
|
||||||
pat_ty = Some(ty);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pat_ty.unwrap_or_else(|| {
|
|
||||||
self.infer_expr(expr, &Expectation::has_type(expected.clone()))
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
Pat::Box { inner } => match self.resolve_boxed_box() {
|
Pat::Box { inner } => match self.resolve_boxed_box() {
|
||||||
Some(box_adt) => {
|
Some(box_adt) => {
|
||||||
|
@ -345,7 +295,8 @@ impl<'a> InferenceContext<'a> {
|
||||||
};
|
};
|
||||||
// use a new type variable if we got error type here
|
// use a new type variable if we got error type here
|
||||||
let ty = self.insert_type_vars_shallow(ty);
|
let ty = self.insert_type_vars_shallow(ty);
|
||||||
if !self.unify(&ty, &expected) {
|
// FIXME: This never check is odd, but required with out we do inference right now
|
||||||
|
if !expected.is_never() && !self.unify(&ty, &expected) {
|
||||||
self.result
|
self.result
|
||||||
.type_mismatches
|
.type_mismatches
|
||||||
.insert(pat.into(), TypeMismatch { expected, actual: ty.clone() });
|
.insert(pat.into(), TypeMismatch { expected, actual: ty.clone() });
|
||||||
|
@ -353,6 +304,111 @@ impl<'a> InferenceContext<'a> {
|
||||||
self.write_pat_ty(pat, ty.clone());
|
self.write_pat_ty(pat, ty.clone());
|
||||||
ty
|
ty
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn infer_ref_pat(
|
||||||
|
&mut self,
|
||||||
|
pat: PatId,
|
||||||
|
mutability: Mutability,
|
||||||
|
expected: &Ty,
|
||||||
|
default_bm: BindingMode,
|
||||||
|
) -> Ty {
|
||||||
|
let expectation = match expected.as_reference() {
|
||||||
|
Some((inner_ty, _lifetime, _exp_mut)) => inner_ty.clone(),
|
||||||
|
_ => self.result.standard_types.unknown.clone(),
|
||||||
|
};
|
||||||
|
let subty = self.infer_pat(pat, &expectation, default_bm);
|
||||||
|
TyKind::Ref(mutability, static_lifetime(), subty).intern(Interner)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn infer_bind_pat(
|
||||||
|
&mut self,
|
||||||
|
pat: PatId,
|
||||||
|
binding: BindingId,
|
||||||
|
default_bm: BindingMode,
|
||||||
|
subpat: Option<PatId>,
|
||||||
|
expected: &Ty,
|
||||||
|
) -> Ty {
|
||||||
|
let Binding { mode, .. } = self.body.bindings[binding];
|
||||||
|
let mode = if mode == BindingAnnotation::Unannotated {
|
||||||
|
default_bm
|
||||||
|
} else {
|
||||||
|
BindingMode::convert(mode)
|
||||||
|
};
|
||||||
|
self.result.pat_binding_modes.insert(pat, mode);
|
||||||
|
|
||||||
|
let inner_ty = match subpat {
|
||||||
|
Some(subpat) => self.infer_pat(subpat, &expected, default_bm),
|
||||||
|
None => expected.clone(),
|
||||||
|
};
|
||||||
|
let inner_ty = self.insert_type_vars_shallow(inner_ty);
|
||||||
|
|
||||||
|
let bound_ty = match mode {
|
||||||
|
BindingMode::Ref(mutability) => {
|
||||||
|
TyKind::Ref(mutability, static_lifetime(), inner_ty.clone()).intern(Interner)
|
||||||
|
}
|
||||||
|
BindingMode::Move => inner_ty.clone(),
|
||||||
|
};
|
||||||
|
self.write_pat_ty(pat, bound_ty.clone());
|
||||||
|
self.write_binding_ty(binding, bound_ty);
|
||||||
|
return inner_ty;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn infer_slice_pat(
|
||||||
|
&mut self,
|
||||||
|
expected: &Ty,
|
||||||
|
prefix: &[PatId],
|
||||||
|
slice: &Option<PatId>,
|
||||||
|
suffix: &[PatId],
|
||||||
|
default_bm: BindingMode,
|
||||||
|
) -> Ty {
|
||||||
|
let elem_ty = match expected.kind(Interner) {
|
||||||
|
TyKind::Array(st, _) | TyKind::Slice(st) => st.clone(),
|
||||||
|
_ => self.err_ty(),
|
||||||
|
};
|
||||||
|
|
||||||
|
for &pat_id in prefix.iter().chain(suffix.iter()) {
|
||||||
|
self.infer_pat(pat_id, &elem_ty, default_bm);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let &Some(slice_pat_id) = slice {
|
||||||
|
let rest_pat_ty = match expected.kind(Interner) {
|
||||||
|
TyKind::Array(_, length) => {
|
||||||
|
let len = try_const_usize(length);
|
||||||
|
let len =
|
||||||
|
len.and_then(|len| len.checked_sub((prefix.len() + suffix.len()) as u128));
|
||||||
|
TyKind::Array(elem_ty.clone(), usize_const(self.db, len, self.resolver.krate()))
|
||||||
|
}
|
||||||
|
_ => TyKind::Slice(elem_ty.clone()),
|
||||||
|
}
|
||||||
|
.intern(Interner);
|
||||||
|
self.infer_pat(slice_pat_id, &rest_pat_ty, default_bm);
|
||||||
|
}
|
||||||
|
|
||||||
|
match expected.kind(Interner) {
|
||||||
|
TyKind::Array(_, const_) => TyKind::Array(elem_ty, const_.clone()),
|
||||||
|
_ => TyKind::Slice(elem_ty),
|
||||||
|
}
|
||||||
|
.intern(Interner)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn infer_lit_pat(&mut self, expr: ExprId, expected: &Ty) -> Ty {
|
||||||
|
// Like slice patterns, byte string patterns can denote both `&[u8; N]` and `&[u8]`.
|
||||||
|
if let Expr::Literal(Literal::ByteString(_)) = self.body[expr] {
|
||||||
|
if let Some((inner, ..)) = expected.as_reference() {
|
||||||
|
let inner = self.resolve_ty_shallow(inner);
|
||||||
|
if matches!(inner.kind(Interner), TyKind::Slice(_)) {
|
||||||
|
let elem_ty = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner);
|
||||||
|
let slice_ty = TyKind::Slice(elem_ty).intern(Interner);
|
||||||
|
let ty =
|
||||||
|
TyKind::Ref(Mutability::Not, static_lifetime(), slice_ty).intern(Interner);
|
||||||
|
self.write_expr_ty(expr, ty.clone());
|
||||||
|
return ty;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.infer_expr(expr, &Expectation::has_type(expected.clone()))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool {
|
fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool {
|
||||||
|
@ -369,11 +425,52 @@ fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool {
|
||||||
Pat::Lit(expr) => {
|
Pat::Lit(expr) => {
|
||||||
!matches!(body[*expr], Expr::Literal(Literal::String(..) | Literal::ByteString(..)))
|
!matches!(body[*expr], Expr::Literal(Literal::String(..) | Literal::ByteString(..)))
|
||||||
}
|
}
|
||||||
Pat::Bind {
|
Pat::Bind { id, subpat: Some(subpat), .. }
|
||||||
mode: BindingAnnotation::Mutable | BindingAnnotation::Unannotated,
|
if matches!(
|
||||||
subpat: Some(subpat),
|
body.bindings[*id].mode,
|
||||||
..
|
BindingAnnotation::Mutable | BindingAnnotation::Unannotated
|
||||||
} => is_non_ref_pat(body, *subpat),
|
) =>
|
||||||
|
{
|
||||||
|
is_non_ref_pat(body, *subpat)
|
||||||
|
}
|
||||||
Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Box { .. } | Pat::Missing => false,
|
Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Box { .. } | Pat::Missing => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(super) fn contains_explicit_ref_binding(body: &Body, pat_id: PatId) -> bool {
|
||||||
|
let mut res = false;
|
||||||
|
walk_pats(body, pat_id, &mut |pat| {
|
||||||
|
res |= matches!(pat, Pat::Bind { id, .. } if body.bindings[*id].mode == BindingAnnotation::Ref);
|
||||||
|
});
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
fn walk_pats(body: &Body, pat_id: PatId, f: &mut impl FnMut(&Pat)) {
|
||||||
|
let pat = &body[pat_id];
|
||||||
|
f(pat);
|
||||||
|
match pat {
|
||||||
|
Pat::Range { .. }
|
||||||
|
| Pat::Lit(..)
|
||||||
|
| Pat::Path(..)
|
||||||
|
| Pat::ConstBlock(..)
|
||||||
|
| Pat::Wild
|
||||||
|
| Pat::Missing => {}
|
||||||
|
&Pat::Bind { subpat, .. } => {
|
||||||
|
if let Some(subpat) = subpat {
|
||||||
|
walk_pats(body, subpat, f);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Pat::Or(args) | Pat::Tuple { args, .. } | Pat::TupleStruct { args, .. } => {
|
||||||
|
args.iter().copied().for_each(|p| walk_pats(body, p, f));
|
||||||
|
}
|
||||||
|
Pat::Ref { pat, .. } => walk_pats(body, *pat, f),
|
||||||
|
Pat::Slice { prefix, slice, suffix } => {
|
||||||
|
let total_iter = prefix.iter().chain(slice.iter()).chain(suffix.iter());
|
||||||
|
total_iter.copied().for_each(|p| walk_pats(body, p, f));
|
||||||
|
}
|
||||||
|
Pat::Record { args, .. } => {
|
||||||
|
args.iter().for_each(|RecordFieldPat { pat, .. }| walk_pats(body, *pat, f));
|
||||||
|
}
|
||||||
|
Pat::Box { inner } => walk_pats(body, *inner, f),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
use chalk_ir::cast::Cast;
|
use chalk_ir::cast::Cast;
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
path::{Path, PathSegment},
|
path::{Path, PathSegment},
|
||||||
resolver::{ResolveValueResult, Resolver, TypeNs, ValueNs},
|
resolver::{ResolveValueResult, TypeNs, ValueNs},
|
||||||
AdtId, AssocItemId, EnumVariantId, ItemContainerId, Lookup,
|
AdtId, AssocItemId, EnumVariantId, ItemContainerId, Lookup,
|
||||||
};
|
};
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
|
@ -21,55 +21,42 @@ use crate::{
|
||||||
use super::{ExprOrPatId, InferenceContext, TraitRef};
|
use super::{ExprOrPatId, InferenceContext, TraitRef};
|
||||||
|
|
||||||
impl<'a> InferenceContext<'a> {
|
impl<'a> InferenceContext<'a> {
|
||||||
pub(super) fn infer_path(
|
pub(super) fn infer_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<Ty> {
|
||||||
&mut self,
|
let ty = self.resolve_value_path(path, id)?;
|
||||||
resolver: &Resolver,
|
|
||||||
path: &Path,
|
|
||||||
id: ExprOrPatId,
|
|
||||||
) -> Option<Ty> {
|
|
||||||
let ty = self.resolve_value_path(resolver, path, id)?;
|
|
||||||
let ty = self.insert_type_vars(ty);
|
let ty = self.insert_type_vars(ty);
|
||||||
let ty = self.normalize_associated_types_in(ty);
|
let ty = self.normalize_associated_types_in(ty);
|
||||||
Some(ty)
|
Some(ty)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_value_path(
|
fn resolve_value_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<Ty> {
|
||||||
&mut self,
|
|
||||||
resolver: &Resolver,
|
|
||||||
path: &Path,
|
|
||||||
id: ExprOrPatId,
|
|
||||||
) -> Option<Ty> {
|
|
||||||
let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
|
let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
|
||||||
if path.segments().is_empty() {
|
let Some(last) = path.segments().last() else { return None };
|
||||||
// This can't actually happen syntax-wise
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
let ty = self.make_ty(type_ref);
|
let ty = self.make_ty(type_ref);
|
||||||
let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
|
let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
|
||||||
let ctx = crate::lower::TyLoweringContext::new(self.db, resolver);
|
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
|
||||||
let (ty, _) = ctx.lower_ty_relative_path(ty, None, remaining_segments_for_ty);
|
let (ty, _) = ctx.lower_ty_relative_path(ty, None, remaining_segments_for_ty);
|
||||||
self.resolve_ty_assoc_item(
|
self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))?
|
||||||
ty,
|
|
||||||
path.segments().last().expect("path had at least one segment").name,
|
|
||||||
id,
|
|
||||||
)?
|
|
||||||
} else {
|
} else {
|
||||||
|
// FIXME: report error, unresolved first path segment
|
||||||
let value_or_partial =
|
let value_or_partial =
|
||||||
resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path())?;
|
self.resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path())?;
|
||||||
|
|
||||||
match value_or_partial {
|
match value_or_partial {
|
||||||
ResolveValueResult::ValueNs(it) => (it, None),
|
ResolveValueResult::ValueNs(it) => (it, None),
|
||||||
ResolveValueResult::Partial(def, remaining_index) => {
|
ResolveValueResult::Partial(def, remaining_index) => self
|
||||||
self.resolve_assoc_item(def, path, remaining_index, id)?
|
.resolve_assoc_item(def, path, remaining_index, id)
|
||||||
}
|
.map(|(it, substs)| (it, Some(substs)))?,
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let typable: ValueTyDefId = match value {
|
let typable: ValueTyDefId = match value {
|
||||||
ValueNs::LocalBinding(pat) => {
|
ValueNs::LocalBinding(pat) => match self.result.type_of_binding.get(pat) {
|
||||||
let ty = self.result.type_of_pat.get(pat)?.clone();
|
Some(ty) => return Some(ty.clone()),
|
||||||
return Some(ty);
|
None => {
|
||||||
}
|
never!("uninferred pattern?");
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
},
|
||||||
ValueNs::FunctionId(it) => it.into(),
|
ValueNs::FunctionId(it) => it.into(),
|
||||||
ValueNs::ConstId(it) => it.into(),
|
ValueNs::ConstId(it) => it.into(),
|
||||||
ValueNs::StaticId(it) => it.into(),
|
ValueNs::StaticId(it) => it.into(),
|
||||||
|
@ -91,7 +78,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
let ty = self.db.value_ty(struct_id.into()).substitute(Interner, &substs);
|
let ty = self.db.value_ty(struct_id.into()).substitute(Interner, &substs);
|
||||||
return Some(ty);
|
return Some(ty);
|
||||||
} else {
|
} else {
|
||||||
// FIXME: diagnostic, invalid Self reference
|
// FIXME: report error, invalid Self reference
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -126,7 +113,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
path: &Path,
|
path: &Path,
|
||||||
remaining_index: usize,
|
remaining_index: usize,
|
||||||
id: ExprOrPatId,
|
id: ExprOrPatId,
|
||||||
) -> Option<(ValueNs, Option<Substitution>)> {
|
) -> Option<(ValueNs, Substitution)> {
|
||||||
assert!(remaining_index < path.segments().len());
|
assert!(remaining_index < path.segments().len());
|
||||||
// there may be more intermediate segments between the resolved one and
|
// there may be more intermediate segments between the resolved one and
|
||||||
// the end. Only the last segment needs to be resolved to a value; from
|
// the end. Only the last segment needs to be resolved to a value; from
|
||||||
|
@ -179,7 +166,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
trait_ref: TraitRef,
|
trait_ref: TraitRef,
|
||||||
segment: PathSegment<'_>,
|
segment: PathSegment<'_>,
|
||||||
id: ExprOrPatId,
|
id: ExprOrPatId,
|
||||||
) -> Option<(ValueNs, Option<Substitution>)> {
|
) -> Option<(ValueNs, Substitution)> {
|
||||||
let trait_ = trait_ref.hir_trait_id();
|
let trait_ = trait_ref.hir_trait_id();
|
||||||
let item =
|
let item =
|
||||||
self.db.trait_data(trait_).items.iter().map(|(_name, id)| (*id)).find_map(|item| {
|
self.db.trait_data(trait_).items.iter().map(|(_name, id)| (*id)).find_map(|item| {
|
||||||
|
@ -215,7 +202,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
};
|
};
|
||||||
|
|
||||||
self.write_assoc_resolution(id, item, trait_ref.substitution.clone());
|
self.write_assoc_resolution(id, item, trait_ref.substitution.clone());
|
||||||
Some((def, Some(trait_ref.substitution)))
|
Some((def, trait_ref.substitution))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_ty_assoc_item(
|
fn resolve_ty_assoc_item(
|
||||||
|
@ -223,7 +210,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
ty: Ty,
|
ty: Ty,
|
||||||
name: &Name,
|
name: &Name,
|
||||||
id: ExprOrPatId,
|
id: ExprOrPatId,
|
||||||
) -> Option<(ValueNs, Option<Substitution>)> {
|
) -> Option<(ValueNs, Substitution)> {
|
||||||
if let TyKind::Error = ty.kind(Interner) {
|
if let TyKind::Error = ty.kind(Interner) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
@ -233,70 +220,66 @@ impl<'a> InferenceContext<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
let canonical_ty = self.canonicalize(ty.clone());
|
let canonical_ty = self.canonicalize(ty.clone());
|
||||||
let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
|
|
||||||
|
|
||||||
let mut not_visible = None;
|
let mut not_visible = None;
|
||||||
let res = method_resolution::iterate_method_candidates(
|
let res = method_resolution::iterate_method_candidates(
|
||||||
&canonical_ty.value,
|
&canonical_ty.value,
|
||||||
self.db,
|
self.db,
|
||||||
self.table.trait_env.clone(),
|
self.table.trait_env.clone(),
|
||||||
&traits_in_scope,
|
self.get_traits_in_scope().as_ref().left_or_else(|&it| it),
|
||||||
VisibleFromModule::Filter(self.resolver.module()),
|
VisibleFromModule::Filter(self.resolver.module()),
|
||||||
Some(name),
|
Some(name),
|
||||||
method_resolution::LookupMode::Path,
|
method_resolution::LookupMode::Path,
|
||||||
|_ty, item, visible| {
|
|_ty, item, visible| {
|
||||||
let (def, container) = match item {
|
|
||||||
AssocItemId::FunctionId(f) => {
|
|
||||||
(ValueNs::FunctionId(f), f.lookup(self.db.upcast()).container)
|
|
||||||
}
|
|
||||||
AssocItemId::ConstId(c) => {
|
|
||||||
(ValueNs::ConstId(c), c.lookup(self.db.upcast()).container)
|
|
||||||
}
|
|
||||||
AssocItemId::TypeAliasId(_) => unreachable!(),
|
|
||||||
};
|
|
||||||
let substs = match container {
|
|
||||||
ItemContainerId::ImplId(impl_id) => {
|
|
||||||
let impl_substs = TyBuilder::subst_for_def(self.db, impl_id, None)
|
|
||||||
.fill_with_inference_vars(&mut self.table)
|
|
||||||
.build();
|
|
||||||
let impl_self_ty =
|
|
||||||
self.db.impl_self_ty(impl_id).substitute(Interner, &impl_substs);
|
|
||||||
self.unify(&impl_self_ty, &ty);
|
|
||||||
impl_substs
|
|
||||||
}
|
|
||||||
ItemContainerId::TraitId(trait_) => {
|
|
||||||
// we're picking this method
|
|
||||||
let trait_ref = TyBuilder::trait_ref(self.db, trait_)
|
|
||||||
.push(ty.clone())
|
|
||||||
.fill_with_inference_vars(&mut self.table)
|
|
||||||
.build();
|
|
||||||
self.push_obligation(trait_ref.clone().cast(Interner));
|
|
||||||
trait_ref.substitution
|
|
||||||
}
|
|
||||||
ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
|
|
||||||
never!("assoc item contained in module/extern block");
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if visible {
|
if visible {
|
||||||
Some((def, item, Some(substs), true))
|
Some((item, true))
|
||||||
} else {
|
} else {
|
||||||
if not_visible.is_none() {
|
if not_visible.is_none() {
|
||||||
not_visible = Some((def, item, Some(substs), false));
|
not_visible = Some((item, false));
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
let res = res.or(not_visible);
|
let res = res.or(not_visible);
|
||||||
if let Some((_, item, Some(ref substs), visible)) = res {
|
let (item, visible) = res?;
|
||||||
self.write_assoc_resolution(id, item, substs.clone());
|
|
||||||
if !visible {
|
let (def, container) = match item {
|
||||||
self.push_diagnostic(InferenceDiagnostic::PrivateAssocItem { id, item })
|
AssocItemId::FunctionId(f) => {
|
||||||
|
(ValueNs::FunctionId(f), f.lookup(self.db.upcast()).container)
|
||||||
}
|
}
|
||||||
|
AssocItemId::ConstId(c) => (ValueNs::ConstId(c), c.lookup(self.db.upcast()).container),
|
||||||
|
AssocItemId::TypeAliasId(_) => unreachable!(),
|
||||||
|
};
|
||||||
|
let substs = match container {
|
||||||
|
ItemContainerId::ImplId(impl_id) => {
|
||||||
|
let impl_substs = TyBuilder::subst_for_def(self.db, impl_id, None)
|
||||||
|
.fill_with_inference_vars(&mut self.table)
|
||||||
|
.build();
|
||||||
|
let impl_self_ty = self.db.impl_self_ty(impl_id).substitute(Interner, &impl_substs);
|
||||||
|
self.unify(&impl_self_ty, &ty);
|
||||||
|
impl_substs
|
||||||
|
}
|
||||||
|
ItemContainerId::TraitId(trait_) => {
|
||||||
|
// we're picking this method
|
||||||
|
let trait_ref = TyBuilder::trait_ref(self.db, trait_)
|
||||||
|
.push(ty.clone())
|
||||||
|
.fill_with_inference_vars(&mut self.table)
|
||||||
|
.build();
|
||||||
|
self.push_obligation(trait_ref.clone().cast(Interner));
|
||||||
|
trait_ref.substitution
|
||||||
|
}
|
||||||
|
ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
|
||||||
|
never!("assoc item contained in module/extern block");
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
self.write_assoc_resolution(id, item, substs.clone());
|
||||||
|
if !visible {
|
||||||
|
self.push_diagnostic(InferenceDiagnostic::PrivateAssocItem { id, item });
|
||||||
}
|
}
|
||||||
res.map(|(def, _, substs, _)| (def, substs))
|
Some((def, substs))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_enum_variant_on_ty(
|
fn resolve_enum_variant_on_ty(
|
||||||
|
@ -304,7 +287,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
ty: &Ty,
|
ty: &Ty,
|
||||||
name: &Name,
|
name: &Name,
|
||||||
id: ExprOrPatId,
|
id: ExprOrPatId,
|
||||||
) -> Option<(ValueNs, Option<Substitution>)> {
|
) -> Option<(ValueNs, Substitution)> {
|
||||||
let ty = self.resolve_ty_shallow(ty);
|
let ty = self.resolve_ty_shallow(ty);
|
||||||
let (enum_id, subst) = match ty.as_adt() {
|
let (enum_id, subst) = match ty.as_adt() {
|
||||||
Some((AdtId::EnumId(e), subst)) => (e, subst),
|
Some((AdtId::EnumId(e), subst)) => (e, subst),
|
||||||
|
@ -314,6 +297,6 @@ impl<'a> InferenceContext<'a> {
|
||||||
let local_id = enum_data.variant(name)?;
|
let local_id = enum_data.variant(name)?;
|
||||||
let variant = EnumVariantId { parent: enum_id, local_id };
|
let variant = EnumVariantId { parent: enum_id, local_id };
|
||||||
self.write_variant_resolution(id, variant.into());
|
self.write_variant_resolution(id, variant.into());
|
||||||
Some((ValueNs::EnumVariantId(variant), Some(subst.clone())))
|
Some((ValueNs::EnumVariantId(variant), subst.clone()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -704,14 +704,13 @@ impl<'a> fmt::Debug for InferenceTable<'a> {
|
||||||
mod resolve {
|
mod resolve {
|
||||||
use super::InferenceTable;
|
use super::InferenceTable;
|
||||||
use crate::{
|
use crate::{
|
||||||
ConcreteConst, Const, ConstData, ConstValue, DebruijnIndex, GenericArg, InferenceVar,
|
ConcreteConst, Const, ConstData, ConstScalar, ConstValue, DebruijnIndex, GenericArg,
|
||||||
Interner, Lifetime, Ty, TyVariableKind, VariableKind,
|
InferenceVar, Interner, Lifetime, Ty, TyVariableKind, VariableKind,
|
||||||
};
|
};
|
||||||
use chalk_ir::{
|
use chalk_ir::{
|
||||||
cast::Cast,
|
cast::Cast,
|
||||||
fold::{TypeFoldable, TypeFolder},
|
fold::{TypeFoldable, TypeFolder},
|
||||||
};
|
};
|
||||||
use hir_def::type_ref::ConstScalar;
|
|
||||||
|
|
||||||
#[derive(chalk_derive::FallibleTypeFolder)]
|
#[derive(chalk_derive::FallibleTypeFolder)]
|
||||||
#[has_interner(Interner)]
|
#[has_interner(Interner)]
|
||||||
|
|
|
@ -6,12 +6,12 @@ use chalk_ir::{
|
||||||
DebruijnIndex,
|
DebruijnIndex,
|
||||||
};
|
};
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
adt::VariantData, attr::Attrs, type_ref::ConstScalar, visibility::Visibility, AdtId,
|
adt::VariantData, attr::Attrs, visibility::Visibility, AdtId, EnumVariantId, HasModule, Lookup,
|
||||||
EnumVariantId, HasModule, Lookup, ModuleId, VariantId,
|
ModuleId, VariantId,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::HirDatabase, Binders, ConcreteConst, Const, ConstValue, Interner, Substitution, Ty, TyKind,
|
consteval::try_const_usize, db::HirDatabase, Binders, Interner, Substitution, Ty, TyKind,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Checks whether a type is visibly uninhabited from a particular module.
|
/// Checks whether a type is visibly uninhabited from a particular module.
|
||||||
|
@ -69,7 +69,7 @@ impl TypeVisitor<Interner> for UninhabitedFrom<'_> {
|
||||||
TyKind::Adt(adt, subst) => self.visit_adt(adt.0, subst),
|
TyKind::Adt(adt, subst) => self.visit_adt(adt.0, subst),
|
||||||
TyKind::Never => BREAK_VISIBLY_UNINHABITED,
|
TyKind::Never => BREAK_VISIBLY_UNINHABITED,
|
||||||
TyKind::Tuple(..) => ty.super_visit_with(self, outer_binder),
|
TyKind::Tuple(..) => ty.super_visit_with(self, outer_binder),
|
||||||
TyKind::Array(item_ty, len) => match try_usize_const(len) {
|
TyKind::Array(item_ty, len) => match try_const_usize(len) {
|
||||||
Some(0) | None => CONTINUE_OPAQUELY_INHABITED,
|
Some(0) | None => CONTINUE_OPAQUELY_INHABITED,
|
||||||
Some(1..) => item_ty.super_visit_with(self, outer_binder),
|
Some(1..) => item_ty.super_visit_with(self, outer_binder),
|
||||||
},
|
},
|
||||||
|
@ -160,14 +160,3 @@ impl UninhabitedFrom<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn try_usize_const(c: &Const) -> Option<u128> {
|
|
||||||
let data = &c.data(Interner);
|
|
||||||
if data.ty.kind(Interner) != &TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize)) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
match data.value {
|
|
||||||
ConstValue::Concrete(ConcreteConst { interned: ConstScalar::UInt(value) }) => Some(value),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
//! Implementation of the Chalk `Interner` trait, which allows customizing the
|
//! Implementation of the Chalk `Interner` trait, which allows customizing the
|
||||||
//! representation of the various objects Chalk deals with (types, goals etc.).
|
//! representation of the various objects Chalk deals with (types, goals etc.).
|
||||||
|
|
||||||
use crate::{chalk_db, tls, GenericArg};
|
use crate::{chalk_db, tls, ConstScalar, GenericArg};
|
||||||
use base_db::salsa::InternId;
|
use base_db::salsa::InternId;
|
||||||
use chalk_ir::{Goal, GoalData};
|
use chalk_ir::{Goal, GoalData};
|
||||||
use hir_def::{type_ref::ConstScalar, TypeAliasId};
|
use hir_def::TypeAliasId;
|
||||||
use intern::{impl_internable, Interned};
|
use intern::{impl_internable, Interned};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use std::{fmt, sync::Arc};
|
use std::{fmt, sync::Arc};
|
||||||
|
|
|
@ -11,7 +11,7 @@ use hir_def::{
|
||||||
};
|
};
|
||||||
use stdx::never;
|
use stdx::never;
|
||||||
|
|
||||||
use crate::{db::HirDatabase, Interner, Substitution, Ty};
|
use crate::{consteval::try_const_usize, db::HirDatabase, Interner, Substitution, Ty};
|
||||||
|
|
||||||
use self::adt::struct_variant_idx;
|
use self::adt::struct_variant_idx;
|
||||||
pub use self::{
|
pub use self::{
|
||||||
|
@ -122,17 +122,9 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
|
||||||
cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)?
|
cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)?
|
||||||
}
|
}
|
||||||
TyKind::Array(element, count) => {
|
TyKind::Array(element, count) => {
|
||||||
let count = match count.data(Interner).value {
|
let count = try_const_usize(&count).ok_or(LayoutError::UserError(
|
||||||
chalk_ir::ConstValue::Concrete(c) => match c.interned {
|
"mismatched type of const generic parameter".to_string(),
|
||||||
hir_def::type_ref::ConstScalar::Int(x) => x as u64,
|
))? as u64;
|
||||||
hir_def::type_ref::ConstScalar::UInt(x) => x as u64,
|
|
||||||
hir_def::type_ref::ConstScalar::Unknown => {
|
|
||||||
user_error!("unknown const generic parameter")
|
|
||||||
}
|
|
||||||
_ => user_error!("mismatched type of const generic parameter"),
|
|
||||||
},
|
|
||||||
_ => return Err(LayoutError::HasPlaceholder),
|
|
||||||
};
|
|
||||||
let element = layout_of_ty(db, element, krate)?;
|
let element = layout_of_ty(db, element, krate)?;
|
||||||
let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?;
|
let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?;
|
||||||
|
|
||||||
|
|
|
@ -76,17 +76,8 @@ pub fn layout_of_adt_query(
|
||||||
|min, max| Integer::repr_discr(&dl, &repr, min, max).unwrap_or((Integer::I8, false)),
|
|min, max| Integer::repr_discr(&dl, &repr, min, max).unwrap_or((Integer::I8, false)),
|
||||||
variants.iter_enumerated().filter_map(|(id, _)| {
|
variants.iter_enumerated().filter_map(|(id, _)| {
|
||||||
let AdtId::EnumId(e) = def else { return None };
|
let AdtId::EnumId(e) = def else { return None };
|
||||||
let d = match db
|
let d =
|
||||||
.const_eval_variant(EnumVariantId { parent: e, local_id: id.0 })
|
db.const_eval_discriminant(EnumVariantId { parent: e, local_id: id.0 }).ok()?;
|
||||||
.ok()?
|
|
||||||
{
|
|
||||||
crate::consteval::ComputedExpr::Literal(l) => match l {
|
|
||||||
hir_def::expr::Literal::Int(i, _) => i,
|
|
||||||
hir_def::expr::Literal::Uint(i, _) => i as i128,
|
|
||||||
_ => return None,
|
|
||||||
},
|
|
||||||
_ => return None,
|
|
||||||
};
|
|
||||||
Some((id, d))
|
Some((id, d))
|
||||||
}),
|
}),
|
||||||
// FIXME: The current code for niche-filling relies on variant indices
|
// FIXME: The current code for niche-filling relies on variant indices
|
||||||
|
|
|
@ -65,25 +65,17 @@ fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
|
||||||
})
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let hir_body = db.body(adt_id.into());
|
let hir_body = db.body(adt_id.into());
|
||||||
let pat = hir_body
|
let b = hir_body.bindings.iter().find(|x| x.1.name.to_smol_str() == "goal").unwrap().0;
|
||||||
.pats
|
|
||||||
.iter()
|
|
||||||
.find(|x| match x.1 {
|
|
||||||
hir_def::expr::Pat::Bind { name, .. } => name.to_smol_str() == "goal",
|
|
||||||
_ => false,
|
|
||||||
})
|
|
||||||
.unwrap()
|
|
||||||
.0;
|
|
||||||
let infer = db.infer(adt_id.into());
|
let infer = db.infer(adt_id.into());
|
||||||
let goal_ty = infer.type_of_pat[pat].clone();
|
let goal_ty = infer.type_of_binding[b].clone();
|
||||||
layout_of_ty(&db, &goal_ty, module_id.krate())
|
layout_of_ty(&db, &goal_ty, module_id.krate())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn check_size_and_align(ra_fixture: &str, minicore: &str, size: u64, align: u64) {
|
fn check_size_and_align(ra_fixture: &str, minicore: &str, size: u64, align: u64) {
|
||||||
let l = eval_goal(ra_fixture, minicore).unwrap();
|
let l = eval_goal(ra_fixture, minicore).unwrap();
|
||||||
assert_eq!(l.size.bytes(), size);
|
assert_eq!(l.size.bytes(), size, "size mismatch");
|
||||||
assert_eq!(l.align.abi.bytes(), align);
|
assert_eq!(l.align.abi.bytes(), align, "align mismatch");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
|
@ -300,4 +292,9 @@ fn enums_with_discriminants() {
|
||||||
C, // implicitly becomes 256, so we need two bytes
|
C, // implicitly becomes 256, so we need two bytes
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
size_and_align! {
|
||||||
|
enum Goal {
|
||||||
|
A = 1, // This one is (perhaps surprisingly) zero sized.
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,6 +13,7 @@ mod builder;
|
||||||
mod chalk_db;
|
mod chalk_db;
|
||||||
mod chalk_ext;
|
mod chalk_ext;
|
||||||
pub mod consteval;
|
pub mod consteval;
|
||||||
|
pub mod mir;
|
||||||
mod infer;
|
mod infer;
|
||||||
mod inhabitedness;
|
mod inhabitedness;
|
||||||
mod interner;
|
mod interner;
|
||||||
|
@ -34,7 +35,7 @@ mod tests;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test_db;
|
mod test_db;
|
||||||
|
|
||||||
use std::sync::Arc;
|
use std::{collections::HashMap, hash::Hash, sync::Arc};
|
||||||
|
|
||||||
use chalk_ir::{
|
use chalk_ir::{
|
||||||
fold::{Shift, TypeFoldable},
|
fold::{Shift, TypeFoldable},
|
||||||
|
@ -42,10 +43,11 @@ use chalk_ir::{
|
||||||
visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
|
visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
|
||||||
NoSolution, TyData,
|
NoSolution, TyData,
|
||||||
};
|
};
|
||||||
|
use either::Either;
|
||||||
use hir_def::{expr::ExprId, type_ref::Rawness, TypeOrConstParamId};
|
use hir_def::{expr::ExprId, type_ref::Rawness, TypeOrConstParamId};
|
||||||
use hir_expand::name;
|
use hir_expand::name;
|
||||||
use itertools::Either;
|
|
||||||
use la_arena::{Arena, Idx};
|
use la_arena::{Arena, Idx};
|
||||||
|
use mir::MirEvalError;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use traits::FnTrait;
|
use traits::FnTrait;
|
||||||
use utils::Generics;
|
use utils::Generics;
|
||||||
|
@ -145,6 +147,49 @@ pub type ConstrainedSubst = chalk_ir::ConstrainedSubst<Interner>;
|
||||||
pub type Guidance = chalk_solve::Guidance<Interner>;
|
pub type Guidance = chalk_solve::Guidance<Interner>;
|
||||||
pub type WhereClause = chalk_ir::WhereClause<Interner>;
|
pub type WhereClause = chalk_ir::WhereClause<Interner>;
|
||||||
|
|
||||||
|
/// A constant can have reference to other things. Memory map job is holding
|
||||||
|
/// the neccessary bits of memory of the const eval session to keep the constant
|
||||||
|
/// meaningful.
|
||||||
|
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
||||||
|
pub struct MemoryMap(pub HashMap<usize, Vec<u8>>);
|
||||||
|
|
||||||
|
impl MemoryMap {
|
||||||
|
fn insert(&mut self, addr: usize, x: Vec<u8>) {
|
||||||
|
self.0.insert(addr, x);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This functions convert each address by a function `f` which gets the byte intervals and assign an address
|
||||||
|
/// to them. It is useful when you want to load a constant with a memory map in a new memory. You can pass an
|
||||||
|
/// allocator function as `f` and it will return a mapping of old addresses to new addresses.
|
||||||
|
fn transform_addresses(
|
||||||
|
&self,
|
||||||
|
mut f: impl FnMut(&[u8]) -> Result<usize, MirEvalError>,
|
||||||
|
) -> Result<HashMap<usize, usize>, MirEvalError> {
|
||||||
|
self.0.iter().map(|x| Ok((*x.0, f(x.1)?))).collect()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A concrete constant value
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub enum ConstScalar {
|
||||||
|
Bytes(Vec<u8>, MemoryMap),
|
||||||
|
/// Case of an unknown value that rustc might know but we don't
|
||||||
|
// FIXME: this is a hack to get around chalk not being able to represent unevaluatable
|
||||||
|
// constants
|
||||||
|
// https://github.com/rust-lang/rust-analyzer/pull/8813#issuecomment-840679177
|
||||||
|
// https://rust-lang.zulipchat.com/#narrow/stream/144729-wg-traits/topic/Handling.20non.20evaluatable.20constants'.20equality/near/238386348
|
||||||
|
Unknown,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Hash for ConstScalar {
|
||||||
|
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||||
|
core::mem::discriminant(self).hash(state);
|
||||||
|
if let ConstScalar::Bytes(b, _) = self {
|
||||||
|
b.hash(state)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Return an index of a parameter in the generic type parameter list by it's id.
|
/// Return an index of a parameter in the generic type parameter list by it's id.
|
||||||
pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<usize> {
|
pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<usize> {
|
||||||
generics(db.upcast(), id.parent).param_idx(id)
|
generics(db.upcast(), id.parent).param_idx(id)
|
||||||
|
|
|
@ -16,6 +16,7 @@ use chalk_ir::{
|
||||||
cast::Cast, fold::Shift, fold::TypeFoldable, interner::HasInterner, Mutability, Safety,
|
cast::Cast, fold::Shift, fold::TypeFoldable, interner::HasInterner, Mutability, Safety,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use either::Either;
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
adt::StructKind,
|
adt::StructKind,
|
||||||
body::{Expander, LowerCtx},
|
body::{Expander, LowerCtx},
|
||||||
|
@ -26,16 +27,13 @@ use hir_def::{
|
||||||
lang_item::{lang_attr, LangItem},
|
lang_item::{lang_attr, LangItem},
|
||||||
path::{GenericArg, ModPath, Path, PathKind, PathSegment, PathSegments},
|
path::{GenericArg, ModPath, Path, PathKind, PathSegment, PathSegments},
|
||||||
resolver::{HasResolver, Resolver, TypeNs},
|
resolver::{HasResolver, Resolver, TypeNs},
|
||||||
type_ref::{
|
type_ref::{ConstRefOrPath, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef},
|
||||||
ConstScalarOrPath, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef,
|
|
||||||
},
|
|
||||||
AdtId, AssocItemId, ConstId, ConstParamId, EnumId, EnumVariantId, FunctionId, GenericDefId,
|
AdtId, AssocItemId, ConstId, ConstParamId, EnumId, EnumVariantId, FunctionId, GenericDefId,
|
||||||
HasModule, ImplId, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, StaticId, StructId,
|
HasModule, ImplId, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, StaticId, StructId,
|
||||||
TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, VariantId,
|
TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, VariantId,
|
||||||
};
|
};
|
||||||
use hir_expand::{name::Name, ExpandResult};
|
use hir_expand::{name::Name, ExpandResult};
|
||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
use itertools::Either;
|
|
||||||
use la_arena::{Arena, ArenaMap};
|
use la_arena::{Arena, ArenaMap};
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
|
@ -44,7 +42,7 @@ use syntax::ast;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
all_super_traits,
|
all_super_traits,
|
||||||
consteval::{intern_const_scalar, path_to_const, unknown_const, unknown_const_as_generic},
|
consteval::{intern_const_ref, path_to_const, unknown_const, unknown_const_as_generic},
|
||||||
db::HirDatabase,
|
db::HirDatabase,
|
||||||
make_binders,
|
make_binders,
|
||||||
mapping::{from_chalk_trait_id, ToChalk},
|
mapping::{from_chalk_trait_id, ToChalk},
|
||||||
|
@ -524,6 +522,10 @@ impl<'a> TyLoweringContext<'a> {
|
||||||
};
|
};
|
||||||
return (ty, None);
|
return (ty, None);
|
||||||
}
|
}
|
||||||
|
TypeNs::TraitAliasId(_) => {
|
||||||
|
// FIXME(trait_alias): Implement trait alias.
|
||||||
|
return (TyKind::Error.intern(Interner), None);
|
||||||
|
}
|
||||||
TypeNs::GenericParam(param_id) => {
|
TypeNs::GenericParam(param_id) => {
|
||||||
let generics = generics(
|
let generics = generics(
|
||||||
self.db.upcast(),
|
self.db.upcast(),
|
||||||
|
@ -879,6 +881,7 @@ impl<'a> TyLoweringContext<'a> {
|
||||||
) -> Option<TraitRef> {
|
) -> Option<TraitRef> {
|
||||||
let resolved =
|
let resolved =
|
||||||
match self.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), path.mod_path())? {
|
match self.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), path.mod_path())? {
|
||||||
|
// FIXME(trait_alias): We need to handle trait alias here.
|
||||||
TypeNs::TraitId(tr) => tr,
|
TypeNs::TraitId(tr) => tr,
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
|
@ -968,7 +971,7 @@ impl<'a> TyLoweringContext<'a> {
|
||||||
// - `Destruct` impls are built-in in 1.62 (current nightlies as of 08-04-2022), so until
|
// - `Destruct` impls are built-in in 1.62 (current nightlies as of 08-04-2022), so until
|
||||||
// the builtin impls are supported by Chalk, we ignore them here.
|
// the builtin impls are supported by Chalk, we ignore them here.
|
||||||
if let Some(lang) = lang_attr(self.db.upcast(), tr.hir_trait_id()) {
|
if let Some(lang) = lang_attr(self.db.upcast(), tr.hir_trait_id()) {
|
||||||
if lang == "drop" || lang == "destruct" {
|
if matches!(lang, LangItem::Drop | LangItem::Destruct) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1444,6 +1447,7 @@ pub(crate) fn trait_environment_query(
|
||||||
GenericDefId::FunctionId(f) => Some(f.lookup(db.upcast()).container),
|
GenericDefId::FunctionId(f) => Some(f.lookup(db.upcast()).container),
|
||||||
GenericDefId::AdtId(_) => None,
|
GenericDefId::AdtId(_) => None,
|
||||||
GenericDefId::TraitId(_) => None,
|
GenericDefId::TraitId(_) => None,
|
||||||
|
GenericDefId::TraitAliasId(_) => None,
|
||||||
GenericDefId::TypeAliasId(t) => Some(t.lookup(db.upcast()).container),
|
GenericDefId::TypeAliasId(t) => Some(t.lookup(db.upcast()).container),
|
||||||
GenericDefId::ImplId(_) => None,
|
GenericDefId::ImplId(_) => None,
|
||||||
GenericDefId::EnumVariantId(_) => None,
|
GenericDefId::EnumVariantId(_) => None,
|
||||||
|
@ -1583,10 +1587,10 @@ pub(crate) fn generic_defaults_recover(
|
||||||
.iter_id()
|
.iter_id()
|
||||||
.map(|id| {
|
.map(|id| {
|
||||||
let val = match id {
|
let val = match id {
|
||||||
itertools::Either::Left(_) => {
|
Either::Left(_) => {
|
||||||
GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
|
GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
|
||||||
}
|
}
|
||||||
itertools::Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
|
Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
|
||||||
};
|
};
|
||||||
crate::make_binders(db, &generic_params, val)
|
crate::make_binders(db, &generic_params, val)
|
||||||
})
|
})
|
||||||
|
@ -1919,7 +1923,7 @@ pub(crate) fn generic_arg_to_chalk<'a, T>(
|
||||||
arg: &'a GenericArg,
|
arg: &'a GenericArg,
|
||||||
this: &mut T,
|
this: &mut T,
|
||||||
for_type: impl FnOnce(&mut T, &TypeRef) -> Ty + 'a,
|
for_type: impl FnOnce(&mut T, &TypeRef) -> Ty + 'a,
|
||||||
for_const: impl FnOnce(&mut T, &ConstScalarOrPath, Ty) -> Const + 'a,
|
for_const: impl FnOnce(&mut T, &ConstRefOrPath, Ty) -> Const + 'a,
|
||||||
) -> Option<crate::GenericArg> {
|
) -> Option<crate::GenericArg> {
|
||||||
let kind = match kind_id {
|
let kind = match kind_id {
|
||||||
Either::Left(_) => ParamKind::Type,
|
Either::Left(_) => ParamKind::Type,
|
||||||
|
@ -1947,7 +1951,7 @@ pub(crate) fn generic_arg_to_chalk<'a, T>(
|
||||||
let p = p.mod_path();
|
let p = p.mod_path();
|
||||||
if p.kind == PathKind::Plain {
|
if p.kind == PathKind::Plain {
|
||||||
if let [n] = p.segments() {
|
if let [n] = p.segments() {
|
||||||
let c = ConstScalarOrPath::Path(n.clone());
|
let c = ConstRefOrPath::Path(n.clone());
|
||||||
return Some(
|
return Some(
|
||||||
GenericArgData::Const(for_const(this, &c, c_ty)).intern(Interner),
|
GenericArgData::Const(for_const(this, &c, c_ty)).intern(Interner),
|
||||||
);
|
);
|
||||||
|
@ -1964,14 +1968,14 @@ pub(crate) fn const_or_path_to_chalk(
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
resolver: &Resolver,
|
resolver: &Resolver,
|
||||||
expected_ty: Ty,
|
expected_ty: Ty,
|
||||||
value: &ConstScalarOrPath,
|
value: &ConstRefOrPath,
|
||||||
mode: ParamLoweringMode,
|
mode: ParamLoweringMode,
|
||||||
args: impl FnOnce() -> Generics,
|
args: impl FnOnce() -> Generics,
|
||||||
debruijn: DebruijnIndex,
|
debruijn: DebruijnIndex,
|
||||||
) -> Const {
|
) -> Const {
|
||||||
match value {
|
match value {
|
||||||
ConstScalarOrPath::Scalar(s) => intern_const_scalar(*s, expected_ty),
|
ConstRefOrPath::Scalar(s) => intern_const_ref(db, s, expected_ty, resolver.krate()),
|
||||||
ConstScalarOrPath::Path(n) => {
|
ConstRefOrPath::Path(n) => {
|
||||||
let path = ModPath::from_segments(PathKind::Plain, Some(n.clone()));
|
let path = ModPath::from_segments(PathKind::Plain, Some(n.clone()));
|
||||||
path_to_const(db, resolver, &path, mode, args, debruijn)
|
path_to_const(db, resolver, &path, mode, args, debruijn)
|
||||||
.unwrap_or_else(|| unknown_const(expected_ty))
|
.unwrap_or_else(|| unknown_const(expected_ty))
|
||||||
|
|
|
@ -579,8 +579,8 @@ impl ReceiverAdjustments {
|
||||||
ty = new_ty.clone();
|
ty = new_ty.clone();
|
||||||
adjust.push(Adjustment {
|
adjust.push(Adjustment {
|
||||||
kind: Adjust::Deref(match kind {
|
kind: Adjust::Deref(match kind {
|
||||||
// FIXME should we know the mutability here?
|
// FIXME should we know the mutability here, when autoref is `None`?
|
||||||
AutoderefKind::Overloaded => Some(OverloadedDeref(Mutability::Not)),
|
AutoderefKind::Overloaded => Some(OverloadedDeref(self.autoref)),
|
||||||
AutoderefKind::Builtin => None,
|
AutoderefKind::Builtin => None,
|
||||||
}),
|
}),
|
||||||
target: new_ty,
|
target: new_ty,
|
||||||
|
@ -660,10 +660,10 @@ pub fn lookup_impl_const(
|
||||||
env: Arc<TraitEnvironment>,
|
env: Arc<TraitEnvironment>,
|
||||||
const_id: ConstId,
|
const_id: ConstId,
|
||||||
subs: Substitution,
|
subs: Substitution,
|
||||||
) -> ConstId {
|
) -> (ConstId, Substitution) {
|
||||||
let trait_id = match const_id.lookup(db.upcast()).container {
|
let trait_id = match const_id.lookup(db.upcast()).container {
|
||||||
ItemContainerId::TraitId(id) => id,
|
ItemContainerId::TraitId(id) => id,
|
||||||
_ => return const_id,
|
_ => return (const_id, subs),
|
||||||
};
|
};
|
||||||
let substitution = Substitution::from_iter(Interner, subs.iter(Interner));
|
let substitution = Substitution::from_iter(Interner, subs.iter(Interner));
|
||||||
let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution };
|
let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution };
|
||||||
|
@ -671,12 +671,14 @@ pub fn lookup_impl_const(
|
||||||
let const_data = db.const_data(const_id);
|
let const_data = db.const_data(const_id);
|
||||||
let name = match const_data.name.as_ref() {
|
let name = match const_data.name.as_ref() {
|
||||||
Some(name) => name,
|
Some(name) => name,
|
||||||
None => return const_id,
|
None => return (const_id, subs),
|
||||||
};
|
};
|
||||||
|
|
||||||
lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name)
|
lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name)
|
||||||
.and_then(|assoc| if let AssocItemId::ConstId(id) = assoc { Some(id) } else { None })
|
.and_then(
|
||||||
.unwrap_or(const_id)
|
|assoc| if let (AssocItemId::ConstId(id), s) = assoc { Some((id, s)) } else { None },
|
||||||
|
)
|
||||||
|
.unwrap_or((const_id, subs))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Looks up the impl method that actually runs for the trait method `func`.
|
/// Looks up the impl method that actually runs for the trait method `func`.
|
||||||
|
@ -687,10 +689,10 @@ pub fn lookup_impl_method(
|
||||||
env: Arc<TraitEnvironment>,
|
env: Arc<TraitEnvironment>,
|
||||||
func: FunctionId,
|
func: FunctionId,
|
||||||
fn_subst: Substitution,
|
fn_subst: Substitution,
|
||||||
) -> FunctionId {
|
) -> (FunctionId, Substitution) {
|
||||||
let trait_id = match func.lookup(db.upcast()).container {
|
let trait_id = match func.lookup(db.upcast()).container {
|
||||||
ItemContainerId::TraitId(id) => id,
|
ItemContainerId::TraitId(id) => id,
|
||||||
_ => return func,
|
_ => return (func, fn_subst),
|
||||||
};
|
};
|
||||||
let trait_params = db.generic_params(trait_id.into()).type_or_consts.len();
|
let trait_params = db.generic_params(trait_id.into()).type_or_consts.len();
|
||||||
let fn_params = fn_subst.len(Interner) - trait_params;
|
let fn_params = fn_subst.len(Interner) - trait_params;
|
||||||
|
@ -701,8 +703,14 @@ pub fn lookup_impl_method(
|
||||||
|
|
||||||
let name = &db.function_data(func).name;
|
let name = &db.function_data(func).name;
|
||||||
lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name)
|
lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name)
|
||||||
.and_then(|assoc| if let AssocItemId::FunctionId(id) = assoc { Some(id) } else { None })
|
.and_then(|assoc| {
|
||||||
.unwrap_or(func)
|
if let (AssocItemId::FunctionId(id), subst) = assoc {
|
||||||
|
Some((id, subst))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.unwrap_or((func, fn_subst))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lookup_impl_assoc_item_for_trait_ref(
|
fn lookup_impl_assoc_item_for_trait_ref(
|
||||||
|
@ -710,7 +718,7 @@ fn lookup_impl_assoc_item_for_trait_ref(
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
env: Arc<TraitEnvironment>,
|
env: Arc<TraitEnvironment>,
|
||||||
name: &Name,
|
name: &Name,
|
||||||
) -> Option<AssocItemId> {
|
) -> Option<(AssocItemId, Substitution)> {
|
||||||
let self_ty = trait_ref.self_type_parameter(Interner);
|
let self_ty = trait_ref.self_type_parameter(Interner);
|
||||||
let self_ty_fp = TyFingerprint::for_trait_impl(&self_ty)?;
|
let self_ty_fp = TyFingerprint::for_trait_impl(&self_ty)?;
|
||||||
let impls = db.trait_impls_in_deps(env.krate);
|
let impls = db.trait_impls_in_deps(env.krate);
|
||||||
|
@ -718,8 +726,8 @@ fn lookup_impl_assoc_item_for_trait_ref(
|
||||||
|
|
||||||
let table = InferenceTable::new(db, env);
|
let table = InferenceTable::new(db, env);
|
||||||
|
|
||||||
let impl_data = find_matching_impl(impls, table, trait_ref)?;
|
let (impl_data, impl_subst) = find_matching_impl(impls, table, trait_ref)?;
|
||||||
impl_data.items.iter().find_map(|&it| match it {
|
let item = impl_data.items.iter().find_map(|&it| match it {
|
||||||
AssocItemId::FunctionId(f) => {
|
AssocItemId::FunctionId(f) => {
|
||||||
(db.function_data(f).name == *name).then_some(AssocItemId::FunctionId(f))
|
(db.function_data(f).name == *name).then_some(AssocItemId::FunctionId(f))
|
||||||
}
|
}
|
||||||
|
@ -730,14 +738,15 @@ fn lookup_impl_assoc_item_for_trait_ref(
|
||||||
.map(|n| n == name)
|
.map(|n| n == name)
|
||||||
.and_then(|result| if result { Some(AssocItemId::ConstId(c)) } else { None }),
|
.and_then(|result| if result { Some(AssocItemId::ConstId(c)) } else { None }),
|
||||||
AssocItemId::TypeAliasId(_) => None,
|
AssocItemId::TypeAliasId(_) => None,
|
||||||
})
|
})?;
|
||||||
|
Some((item, impl_subst))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_matching_impl(
|
fn find_matching_impl(
|
||||||
mut impls: impl Iterator<Item = ImplId>,
|
mut impls: impl Iterator<Item = ImplId>,
|
||||||
mut table: InferenceTable<'_>,
|
mut table: InferenceTable<'_>,
|
||||||
actual_trait_ref: TraitRef,
|
actual_trait_ref: TraitRef,
|
||||||
) -> Option<Arc<ImplData>> {
|
) -> Option<(Arc<ImplData>, Substitution)> {
|
||||||
let db = table.db;
|
let db = table.db;
|
||||||
loop {
|
loop {
|
||||||
let impl_ = impls.next()?;
|
let impl_ = impls.next()?;
|
||||||
|
@ -758,7 +767,7 @@ fn find_matching_impl(
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|b| b.cast(Interner));
|
.map(|b| b.cast(Interner));
|
||||||
let goal = crate::Goal::all(Interner, wcs);
|
let goal = crate::Goal::all(Interner, wcs);
|
||||||
table.try_obligation(goal).map(|_| impl_data)
|
table.try_obligation(goal).map(|_| (impl_data, table.resolve_completely(impl_substs)))
|
||||||
});
|
});
|
||||||
if r.is_some() {
|
if r.is_some() {
|
||||||
break r;
|
break r;
|
||||||
|
@ -821,9 +830,9 @@ pub fn iterate_method_candidates_dyn(
|
||||||
|
|
||||||
let mut table = InferenceTable::new(db, env.clone());
|
let mut table = InferenceTable::new(db, env.clone());
|
||||||
let ty = table.instantiate_canonical(ty.clone());
|
let ty = table.instantiate_canonical(ty.clone());
|
||||||
let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty);
|
let deref_chain = autoderef_method_receiver(&mut table, ty);
|
||||||
|
|
||||||
let result = deref_chain.into_iter().zip(adj).try_for_each(|(receiver_ty, adj)| {
|
let result = deref_chain.into_iter().try_for_each(|(receiver_ty, adj)| {
|
||||||
iterate_method_candidates_with_autoref(
|
iterate_method_candidates_with_autoref(
|
||||||
&receiver_ty,
|
&receiver_ty,
|
||||||
adj,
|
adj,
|
||||||
|
@ -867,16 +876,20 @@ fn iterate_method_candidates_with_autoref(
|
||||||
return ControlFlow::Continue(());
|
return ControlFlow::Continue(());
|
||||||
}
|
}
|
||||||
|
|
||||||
iterate_method_candidates_by_receiver(
|
let mut iterate_method_candidates_by_receiver = move |receiver_ty, first_adjustment| {
|
||||||
receiver_ty,
|
iterate_method_candidates_by_receiver(
|
||||||
first_adjustment.clone(),
|
receiver_ty,
|
||||||
db,
|
first_adjustment,
|
||||||
env.clone(),
|
db,
|
||||||
traits_in_scope,
|
env.clone(),
|
||||||
visible_from_module,
|
traits_in_scope,
|
||||||
name,
|
visible_from_module,
|
||||||
&mut callback,
|
name,
|
||||||
)?;
|
&mut callback,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
iterate_method_candidates_by_receiver(receiver_ty, first_adjustment.clone())?;
|
||||||
|
|
||||||
let refed = Canonical {
|
let refed = Canonical {
|
||||||
value: TyKind::Ref(Mutability::Not, static_lifetime(), receiver_ty.value.clone())
|
value: TyKind::Ref(Mutability::Not, static_lifetime(), receiver_ty.value.clone())
|
||||||
|
@ -884,16 +897,7 @@ fn iterate_method_candidates_with_autoref(
|
||||||
binders: receiver_ty.binders.clone(),
|
binders: receiver_ty.binders.clone(),
|
||||||
};
|
};
|
||||||
|
|
||||||
iterate_method_candidates_by_receiver(
|
iterate_method_candidates_by_receiver(&refed, first_adjustment.with_autoref(Mutability::Not))?;
|
||||||
&refed,
|
|
||||||
first_adjustment.with_autoref(Mutability::Not),
|
|
||||||
db,
|
|
||||||
env.clone(),
|
|
||||||
traits_in_scope,
|
|
||||||
visible_from_module,
|
|
||||||
name,
|
|
||||||
&mut callback,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let ref_muted = Canonical {
|
let ref_muted = Canonical {
|
||||||
value: TyKind::Ref(Mutability::Mut, static_lifetime(), receiver_ty.value.clone())
|
value: TyKind::Ref(Mutability::Mut, static_lifetime(), receiver_ty.value.clone())
|
||||||
|
@ -904,12 +908,6 @@ fn iterate_method_candidates_with_autoref(
|
||||||
iterate_method_candidates_by_receiver(
|
iterate_method_candidates_by_receiver(
|
||||||
&ref_muted,
|
&ref_muted,
|
||||||
first_adjustment.with_autoref(Mutability::Mut),
|
first_adjustment.with_autoref(Mutability::Mut),
|
||||||
db,
|
|
||||||
env,
|
|
||||||
traits_in_scope,
|
|
||||||
visible_from_module,
|
|
||||||
name,
|
|
||||||
&mut callback,
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1210,8 +1208,8 @@ pub fn resolve_indexing_op(
|
||||||
) -> Option<ReceiverAdjustments> {
|
) -> Option<ReceiverAdjustments> {
|
||||||
let mut table = InferenceTable::new(db, env.clone());
|
let mut table = InferenceTable::new(db, env.clone());
|
||||||
let ty = table.instantiate_canonical(ty);
|
let ty = table.instantiate_canonical(ty);
|
||||||
let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty);
|
let deref_chain = autoderef_method_receiver(&mut table, ty);
|
||||||
for (ty, adj) in deref_chain.into_iter().zip(adj) {
|
for (ty, adj) in deref_chain {
|
||||||
let goal = generic_implements_goal(db, env.clone(), index_trait, &ty);
|
let goal = generic_implements_goal(db, env.clone(), index_trait, &ty);
|
||||||
if db.trait_solve(env.krate, goal.cast(Interner)).is_some() {
|
if db.trait_solve(env.krate, goal.cast(Interner)).is_some() {
|
||||||
return Some(adj);
|
return Some(adj);
|
||||||
|
@ -1421,25 +1419,24 @@ fn generic_implements_goal(
|
||||||
fn autoderef_method_receiver(
|
fn autoderef_method_receiver(
|
||||||
table: &mut InferenceTable<'_>,
|
table: &mut InferenceTable<'_>,
|
||||||
ty: Ty,
|
ty: Ty,
|
||||||
) -> (Vec<Canonical<Ty>>, Vec<ReceiverAdjustments>) {
|
) -> Vec<(Canonical<Ty>, ReceiverAdjustments)> {
|
||||||
let (mut deref_chain, mut adjustments): (Vec<_>, Vec<_>) = (Vec::new(), Vec::new());
|
let mut deref_chain: Vec<_> = Vec::new();
|
||||||
let mut autoderef = autoderef::Autoderef::new(table, ty);
|
let mut autoderef = autoderef::Autoderef::new(table, ty);
|
||||||
while let Some((ty, derefs)) = autoderef.next() {
|
while let Some((ty, derefs)) = autoderef.next() {
|
||||||
deref_chain.push(autoderef.table.canonicalize(ty).value);
|
deref_chain.push((
|
||||||
adjustments.push(ReceiverAdjustments {
|
autoderef.table.canonicalize(ty).value,
|
||||||
autoref: None,
|
ReceiverAdjustments { autoref: None, autoderefs: derefs, unsize_array: false },
|
||||||
autoderefs: derefs,
|
));
|
||||||
unsize_array: false,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
// As a last step, we can do array unsizing (that's the only unsizing that rustc does for method receivers!)
|
// As a last step, we can do array unsizing (that's the only unsizing that rustc does for method receivers!)
|
||||||
if let (Some((TyKind::Array(parameters, _), binders)), Some(adj)) = (
|
if let Some((TyKind::Array(parameters, _), binders, adj)) =
|
||||||
deref_chain.last().map(|ty| (ty.value.kind(Interner), ty.binders.clone())),
|
deref_chain.last().map(|(ty, adj)| (ty.value.kind(Interner), ty.binders.clone(), adj))
|
||||||
adjustments.last().cloned(),
|
{
|
||||||
) {
|
|
||||||
let unsized_ty = TyKind::Slice(parameters.clone()).intern(Interner);
|
let unsized_ty = TyKind::Slice(parameters.clone()).intern(Interner);
|
||||||
deref_chain.push(Canonical { value: unsized_ty, binders });
|
deref_chain.push((
|
||||||
adjustments.push(ReceiverAdjustments { unsize_array: true, ..adj });
|
Canonical { value: unsized_ty, binders },
|
||||||
|
ReceiverAdjustments { unsize_array: true, ..adj.clone() },
|
||||||
|
));
|
||||||
}
|
}
|
||||||
(deref_chain, adjustments)
|
deref_chain
|
||||||
}
|
}
|
||||||
|
|
863
crates/hir-ty/src/mir.rs
Normal file
863
crates/hir-ty/src/mir.rs
Normal file
|
@ -0,0 +1,863 @@
|
||||||
|
//! MIR definitions and implementation
|
||||||
|
|
||||||
|
use std::{fmt::Display, iter};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
infer::PointerCast, Const, ConstScalar, InferenceResult, Interner, MemoryMap, Substitution, Ty,
|
||||||
|
};
|
||||||
|
use chalk_ir::Mutability;
|
||||||
|
use hir_def::{
|
||||||
|
expr::{BindingId, Expr, ExprId, Ordering, PatId},
|
||||||
|
DefWithBodyId, FieldId, UnionId, VariantId,
|
||||||
|
};
|
||||||
|
use la_arena::{Arena, ArenaMap, Idx, RawIdx};
|
||||||
|
|
||||||
|
mod eval;
|
||||||
|
mod lower;
|
||||||
|
mod borrowck;
|
||||||
|
mod pretty;
|
||||||
|
|
||||||
|
pub use borrowck::{borrowck_query, BorrowckResult, MutabilityReason};
|
||||||
|
pub use eval::{interpret_mir, pad16, Evaluator, MirEvalError};
|
||||||
|
pub use lower::{lower_to_mir, mir_body_query, mir_body_recover, MirLowerError};
|
||||||
|
use smallvec::{smallvec, SmallVec};
|
||||||
|
use stdx::impl_from;
|
||||||
|
|
||||||
|
use super::consteval::{intern_const_scalar, try_const_usize};
|
||||||
|
|
||||||
|
pub type BasicBlockId = Idx<BasicBlock>;
|
||||||
|
pub type LocalId = Idx<Local>;
|
||||||
|
|
||||||
|
fn return_slot() -> LocalId {
|
||||||
|
LocalId::from_raw(RawIdx::from(0))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
pub struct Local {
|
||||||
|
pub ty: Ty,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An operand in MIR represents a "value" in Rust, the definition of which is undecided and part of
|
||||||
|
/// the memory model. One proposal for a definition of values can be found [on UCG][value-def].
|
||||||
|
///
|
||||||
|
/// [value-def]: https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/value-domain.md
|
||||||
|
///
|
||||||
|
/// The most common way to create values is via loading a place. Loading a place is an operation
|
||||||
|
/// which reads the memory of the place and converts it to a value. This is a fundamentally *typed*
|
||||||
|
/// operation. The nature of the value produced depends on the type of the conversion. Furthermore,
|
||||||
|
/// there may be other effects: if the type has a validity constraint loading the place might be UB
|
||||||
|
/// if the validity constraint is not met.
|
||||||
|
///
|
||||||
|
/// **Needs clarification:** Ralf proposes that loading a place not have side-effects.
|
||||||
|
/// This is what is implemented in miri today. Are these the semantics we want for MIR? Is this
|
||||||
|
/// something we can even decide without knowing more about Rust's memory model?
|
||||||
|
///
|
||||||
|
/// **Needs clarifiation:** Is loading a place that has its variant index set well-formed? Miri
|
||||||
|
/// currently implements it, but it seems like this may be something to check against in the
|
||||||
|
/// validator.
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
|
pub enum Operand {
|
||||||
|
/// Creates a value by loading the given place.
|
||||||
|
///
|
||||||
|
/// Before drop elaboration, the type of the place must be `Copy`. After drop elaboration there
|
||||||
|
/// is no such requirement.
|
||||||
|
Copy(Place),
|
||||||
|
|
||||||
|
/// Creates a value by performing loading the place, just like the `Copy` operand.
|
||||||
|
///
|
||||||
|
/// This *may* additionally overwrite the place with `uninit` bytes, depending on how we decide
|
||||||
|
/// in [UCG#188]. You should not emit MIR that may attempt a subsequent second load of this
|
||||||
|
/// place without first re-initializing it.
|
||||||
|
///
|
||||||
|
/// [UCG#188]: https://github.com/rust-lang/unsafe-code-guidelines/issues/188
|
||||||
|
Move(Place),
|
||||||
|
/// Constants are already semantically values, and remain unchanged.
|
||||||
|
Constant(Const),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Operand {
|
||||||
|
fn from_concrete_const(data: Vec<u8>, memory_map: MemoryMap, ty: Ty) -> Self {
|
||||||
|
Operand::Constant(intern_const_scalar(ConstScalar::Bytes(data, memory_map), ty))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_bytes(data: Vec<u8>, ty: Ty) -> Self {
|
||||||
|
Operand::from_concrete_const(data, MemoryMap::default(), ty)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn const_zst(ty: Ty) -> Operand {
|
||||||
|
Self::from_bytes(vec![], ty)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
|
pub enum ProjectionElem<V, T> {
|
||||||
|
Deref,
|
||||||
|
Field(FieldId),
|
||||||
|
TupleField(usize),
|
||||||
|
Index(V),
|
||||||
|
ConstantIndex { offset: u64, min_length: u64, from_end: bool },
|
||||||
|
Subslice { from: u64, to: u64, from_end: bool },
|
||||||
|
//Downcast(Option<Symbol>, VariantIdx),
|
||||||
|
OpaqueCast(T),
|
||||||
|
}
|
||||||
|
|
||||||
|
type PlaceElem = ProjectionElem<LocalId, Ty>;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub struct Place {
|
||||||
|
pub local: LocalId,
|
||||||
|
pub projection: Vec<PlaceElem>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<LocalId> for Place {
|
||||||
|
fn from(local: LocalId) -> Self {
|
||||||
|
Self { local, projection: vec![] }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
|
pub enum AggregateKind {
|
||||||
|
/// The type is of the element
|
||||||
|
Array(Ty),
|
||||||
|
/// The type is of the tuple
|
||||||
|
Tuple(Ty),
|
||||||
|
Adt(VariantId, Substitution),
|
||||||
|
Union(UnionId, FieldId),
|
||||||
|
//Closure(LocalDefId, SubstsRef),
|
||||||
|
//Generator(LocalDefId, SubstsRef, Movability),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
|
||||||
|
pub struct SwitchTargets {
|
||||||
|
/// Possible values. The locations to branch to in each case
|
||||||
|
/// are found in the corresponding indices from the `targets` vector.
|
||||||
|
values: SmallVec<[u128; 1]>,
|
||||||
|
|
||||||
|
/// Possible branch sites. The last element of this vector is used
|
||||||
|
/// for the otherwise branch, so targets.len() == values.len() + 1
|
||||||
|
/// should hold.
|
||||||
|
//
|
||||||
|
// This invariant is quite non-obvious and also could be improved.
|
||||||
|
// One way to make this invariant is to have something like this instead:
|
||||||
|
//
|
||||||
|
// branches: Vec<(ConstInt, BasicBlock)>,
|
||||||
|
// otherwise: Option<BasicBlock> // exhaustive if None
|
||||||
|
//
|
||||||
|
// However we’ve decided to keep this as-is until we figure a case
|
||||||
|
// where some other approach seems to be strictly better than other.
|
||||||
|
targets: SmallVec<[BasicBlockId; 2]>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SwitchTargets {
|
||||||
|
/// Creates switch targets from an iterator of values and target blocks.
|
||||||
|
///
|
||||||
|
/// The iterator may be empty, in which case the `SwitchInt` instruction is equivalent to
|
||||||
|
/// `goto otherwise;`.
|
||||||
|
pub fn new(
|
||||||
|
targets: impl Iterator<Item = (u128, BasicBlockId)>,
|
||||||
|
otherwise: BasicBlockId,
|
||||||
|
) -> Self {
|
||||||
|
let (values, mut targets): (SmallVec<_>, SmallVec<_>) = targets.unzip();
|
||||||
|
targets.push(otherwise);
|
||||||
|
Self { values, targets }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Builds a switch targets definition that jumps to `then` if the tested value equals `value`,
|
||||||
|
/// and to `else_` if not.
|
||||||
|
pub fn static_if(value: u128, then: BasicBlockId, else_: BasicBlockId) -> Self {
|
||||||
|
Self { values: smallvec![value], targets: smallvec![then, else_] }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the fallback target that is jumped to when none of the values match the operand.
|
||||||
|
pub fn otherwise(&self) -> BasicBlockId {
|
||||||
|
*self.targets.last().unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns an iterator over the switch targets.
|
||||||
|
///
|
||||||
|
/// The iterator will yield tuples containing the value and corresponding target to jump to, not
|
||||||
|
/// including the `otherwise` fallback target.
|
||||||
|
///
|
||||||
|
/// Note that this may yield 0 elements. Only the `otherwise` branch is mandatory.
|
||||||
|
pub fn iter(&self) -> impl Iterator<Item = (u128, BasicBlockId)> + '_ {
|
||||||
|
iter::zip(&self.values, &self.targets).map(|(x, y)| (*x, *y))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a slice with all possible jump targets (including the fallback target).
|
||||||
|
pub fn all_targets(&self) -> &[BasicBlockId] {
|
||||||
|
&self.targets
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Finds the `BasicBlock` to which this `SwitchInt` will branch given the
|
||||||
|
/// specific value. This cannot fail, as it'll return the `otherwise`
|
||||||
|
/// branch if there's not a specific match for the value.
|
||||||
|
pub fn target_for_value(&self, value: u128) -> BasicBlockId {
|
||||||
|
self.iter().find_map(|(v, t)| (v == value).then_some(t)).unwrap_or_else(|| self.otherwise())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
|
pub enum Terminator {
|
||||||
|
/// Block has one successor; we continue execution there.
|
||||||
|
Goto { target: BasicBlockId },
|
||||||
|
|
||||||
|
/// Switches based on the computed value.
|
||||||
|
///
|
||||||
|
/// First, evaluates the `discr` operand. The type of the operand must be a signed or unsigned
|
||||||
|
/// integer, char, or bool, and must match the given type. Then, if the list of switch targets
|
||||||
|
/// contains the computed value, continues execution at the associated basic block. Otherwise,
|
||||||
|
/// continues execution at the "otherwise" basic block.
|
||||||
|
///
|
||||||
|
/// Target values may not appear more than once.
|
||||||
|
SwitchInt {
|
||||||
|
/// The discriminant value being tested.
|
||||||
|
discr: Operand,
|
||||||
|
|
||||||
|
targets: SwitchTargets,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Indicates that the landing pad is finished and that the process should continue unwinding.
|
||||||
|
///
|
||||||
|
/// Like a return, this marks the end of this invocation of the function.
|
||||||
|
///
|
||||||
|
/// Only permitted in cleanup blocks. `Resume` is not permitted with `-C unwind=abort` after
|
||||||
|
/// deaggregation runs.
|
||||||
|
Resume,
|
||||||
|
|
||||||
|
/// Indicates that the landing pad is finished and that the process should abort.
|
||||||
|
///
|
||||||
|
/// Used to prevent unwinding for foreign items or with `-C unwind=abort`. Only permitted in
|
||||||
|
/// cleanup blocks.
|
||||||
|
Abort,
|
||||||
|
|
||||||
|
/// Returns from the function.
|
||||||
|
///
|
||||||
|
/// Like function calls, the exact semantics of returns in Rust are unclear. Returning very
|
||||||
|
/// likely at least assigns the value currently in the return place (`_0`) to the place
|
||||||
|
/// specified in the associated `Call` terminator in the calling function, as if assigned via
|
||||||
|
/// `dest = move _0`. It might additionally do other things, like have side-effects in the
|
||||||
|
/// aliasing model.
|
||||||
|
///
|
||||||
|
/// If the body is a generator body, this has slightly different semantics; it instead causes a
|
||||||
|
/// `GeneratorState::Returned(_0)` to be created (as if by an `Aggregate` rvalue) and assigned
|
||||||
|
/// to the return place.
|
||||||
|
Return,
|
||||||
|
|
||||||
|
/// Indicates a terminator that can never be reached.
|
||||||
|
///
|
||||||
|
/// Executing this terminator is UB.
|
||||||
|
Unreachable,
|
||||||
|
|
||||||
|
/// The behavior of this statement differs significantly before and after drop elaboration.
|
||||||
|
/// After drop elaboration, `Drop` executes the drop glue for the specified place, after which
|
||||||
|
/// it continues execution/unwinds at the given basic blocks. It is possible that executing drop
|
||||||
|
/// glue is special - this would be part of Rust's memory model. (**FIXME**: due we have an
|
||||||
|
/// issue tracking if drop glue has any interesting semantics in addition to those of a function
|
||||||
|
/// call?)
|
||||||
|
///
|
||||||
|
/// `Drop` before drop elaboration is a *conditional* execution of the drop glue. Specifically, the
|
||||||
|
/// `Drop` will be executed if...
|
||||||
|
///
|
||||||
|
/// **Needs clarification**: End of that sentence. This in effect should document the exact
|
||||||
|
/// behavior of drop elaboration. The following sounds vaguely right, but I'm not quite sure:
|
||||||
|
///
|
||||||
|
/// > The drop glue is executed if, among all statements executed within this `Body`, an assignment to
|
||||||
|
/// > the place or one of its "parents" occurred more recently than a move out of it. This does not
|
||||||
|
/// > consider indirect assignments.
|
||||||
|
Drop { place: Place, target: BasicBlockId, unwind: Option<BasicBlockId> },
|
||||||
|
|
||||||
|
/// Drops the place and assigns a new value to it.
|
||||||
|
///
|
||||||
|
/// This first performs the exact same operation as the pre drop-elaboration `Drop` terminator;
|
||||||
|
/// it then additionally assigns the `value` to the `place` as if by an assignment statement.
|
||||||
|
/// This assignment occurs both in the unwind and the regular code paths. The semantics are best
|
||||||
|
/// explained by the elaboration:
|
||||||
|
///
|
||||||
|
/// ```ignore (MIR)
|
||||||
|
/// BB0 {
|
||||||
|
/// DropAndReplace(P <- V, goto BB1, unwind BB2)
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// becomes
|
||||||
|
///
|
||||||
|
/// ```ignore (MIR)
|
||||||
|
/// BB0 {
|
||||||
|
/// Drop(P, goto BB1, unwind BB2)
|
||||||
|
/// }
|
||||||
|
/// BB1 {
|
||||||
|
/// // P is now uninitialized
|
||||||
|
/// P <- V
|
||||||
|
/// }
|
||||||
|
/// BB2 {
|
||||||
|
/// // P is now uninitialized -- its dtor panicked
|
||||||
|
/// P <- V
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// Disallowed after drop elaboration.
|
||||||
|
DropAndReplace {
|
||||||
|
place: Place,
|
||||||
|
value: Operand,
|
||||||
|
target: BasicBlockId,
|
||||||
|
unwind: Option<BasicBlockId>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Roughly speaking, evaluates the `func` operand and the arguments, and starts execution of
|
||||||
|
/// the referred to function. The operand types must match the argument types of the function.
|
||||||
|
/// The return place type must match the return type. The type of the `func` operand must be
|
||||||
|
/// callable, meaning either a function pointer, a function type, or a closure type.
|
||||||
|
///
|
||||||
|
/// **Needs clarification**: The exact semantics of this. Current backends rely on `move`
|
||||||
|
/// operands not aliasing the return place. It is unclear how this is justified in MIR, see
|
||||||
|
/// [#71117].
|
||||||
|
///
|
||||||
|
/// [#71117]: https://github.com/rust-lang/rust/issues/71117
|
||||||
|
Call {
|
||||||
|
/// The function that’s being called.
|
||||||
|
func: Operand,
|
||||||
|
/// Arguments the function is called with.
|
||||||
|
/// These are owned by the callee, which is free to modify them.
|
||||||
|
/// This allows the memory occupied by "by-value" arguments to be
|
||||||
|
/// reused across function calls without duplicating the contents.
|
||||||
|
args: Vec<Operand>,
|
||||||
|
/// Where the returned value will be written
|
||||||
|
destination: Place,
|
||||||
|
/// Where to go after this call returns. If none, the call necessarily diverges.
|
||||||
|
target: Option<BasicBlockId>,
|
||||||
|
/// Cleanups to be done if the call unwinds.
|
||||||
|
cleanup: Option<BasicBlockId>,
|
||||||
|
/// `true` if this is from a call in HIR rather than from an overloaded
|
||||||
|
/// operator. True for overloaded function call.
|
||||||
|
from_hir_call: bool,
|
||||||
|
// This `Span` is the span of the function, without the dot and receiver
|
||||||
|
// (e.g. `foo(a, b)` in `x.foo(a, b)`
|
||||||
|
//fn_span: Span,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Evaluates the operand, which must have type `bool`. If it is not equal to `expected`,
|
||||||
|
/// initiates a panic. Initiating a panic corresponds to a `Call` terminator with some
|
||||||
|
/// unspecified constant as the function to call, all the operands stored in the `AssertMessage`
|
||||||
|
/// as parameters, and `None` for the destination. Keep in mind that the `cleanup` path is not
|
||||||
|
/// necessarily executed even in the case of a panic, for example in `-C panic=abort`. If the
|
||||||
|
/// assertion does not fail, execution continues at the specified basic block.
|
||||||
|
Assert {
|
||||||
|
cond: Operand,
|
||||||
|
expected: bool,
|
||||||
|
//msg: AssertMessage,
|
||||||
|
target: BasicBlockId,
|
||||||
|
cleanup: Option<BasicBlockId>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Marks a suspend point.
|
||||||
|
///
|
||||||
|
/// Like `Return` terminators in generator bodies, this computes `value` and then a
|
||||||
|
/// `GeneratorState::Yielded(value)` as if by `Aggregate` rvalue. That value is then assigned to
|
||||||
|
/// the return place of the function calling this one, and execution continues in the calling
|
||||||
|
/// function. When next invoked with the same first argument, execution of this function
|
||||||
|
/// continues at the `resume` basic block, with the second argument written to the `resume_arg`
|
||||||
|
/// place. If the generator is dropped before then, the `drop` basic block is invoked.
|
||||||
|
///
|
||||||
|
/// Not permitted in bodies that are not generator bodies, or after generator lowering.
|
||||||
|
///
|
||||||
|
/// **Needs clarification**: What about the evaluation order of the `resume_arg` and `value`?
|
||||||
|
Yield {
|
||||||
|
/// The value to return.
|
||||||
|
value: Operand,
|
||||||
|
/// Where to resume to.
|
||||||
|
resume: BasicBlockId,
|
||||||
|
/// The place to store the resume argument in.
|
||||||
|
resume_arg: Place,
|
||||||
|
/// Cleanup to be done if the generator is dropped at this suspend point.
|
||||||
|
drop: Option<BasicBlockId>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Indicates the end of dropping a generator.
|
||||||
|
///
|
||||||
|
/// Semantically just a `return` (from the generators drop glue). Only permitted in the same situations
|
||||||
|
/// as `yield`.
|
||||||
|
///
|
||||||
|
/// **Needs clarification**: Is that even correct? The generator drop code is always confusing
|
||||||
|
/// to me, because it's not even really in the current body.
|
||||||
|
///
|
||||||
|
/// **Needs clarification**: Are there type system constraints on these terminators? Should
|
||||||
|
/// there be a "block type" like `cleanup` blocks for them?
|
||||||
|
GeneratorDrop,
|
||||||
|
|
||||||
|
/// A block where control flow only ever takes one real path, but borrowck needs to be more
|
||||||
|
/// conservative.
|
||||||
|
///
|
||||||
|
/// At runtime this is semantically just a goto.
|
||||||
|
///
|
||||||
|
/// Disallowed after drop elaboration.
|
||||||
|
FalseEdge {
|
||||||
|
/// The target normal control flow will take.
|
||||||
|
real_target: BasicBlockId,
|
||||||
|
/// A block control flow could conceptually jump to, but won't in
|
||||||
|
/// practice.
|
||||||
|
imaginary_target: BasicBlockId,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// A terminator for blocks that only take one path in reality, but where we reserve the right
|
||||||
|
/// to unwind in borrowck, even if it won't happen in practice. This can arise in infinite loops
|
||||||
|
/// with no function calls for example.
|
||||||
|
///
|
||||||
|
/// At runtime this is semantically just a goto.
|
||||||
|
///
|
||||||
|
/// Disallowed after drop elaboration.
|
||||||
|
FalseUnwind {
|
||||||
|
/// The target normal control flow will take.
|
||||||
|
real_target: BasicBlockId,
|
||||||
|
/// The imaginary cleanup block link. This particular path will never be taken
|
||||||
|
/// in practice, but in order to avoid fragility we want to always
|
||||||
|
/// consider it in borrowck. We don't want to accept programs which
|
||||||
|
/// pass borrowck only when `panic=abort` or some assertions are disabled
|
||||||
|
/// due to release vs. debug mode builds. This needs to be an `Option` because
|
||||||
|
/// of the `remove_noop_landing_pads` and `abort_unwinding_calls` passes.
|
||||||
|
unwind: Option<BasicBlockId>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
|
pub enum BorrowKind {
|
||||||
|
/// Data must be immutable and is aliasable.
|
||||||
|
Shared,
|
||||||
|
|
||||||
|
/// The immediately borrowed place must be immutable, but projections from
|
||||||
|
/// it don't need to be. For example, a shallow borrow of `a.b` doesn't
|
||||||
|
/// conflict with a mutable borrow of `a.b.c`.
|
||||||
|
///
|
||||||
|
/// This is used when lowering matches: when matching on a place we want to
|
||||||
|
/// ensure that place have the same value from the start of the match until
|
||||||
|
/// an arm is selected. This prevents this code from compiling:
|
||||||
|
/// ```compile_fail,E0510
|
||||||
|
/// let mut x = &Some(0);
|
||||||
|
/// match *x {
|
||||||
|
/// None => (),
|
||||||
|
/// Some(_) if { x = &None; false } => (),
|
||||||
|
/// Some(_) => (),
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
/// This can't be a shared borrow because mutably borrowing (*x as Some).0
|
||||||
|
/// should not prevent `if let None = x { ... }`, for example, because the
|
||||||
|
/// mutating `(*x as Some).0` can't affect the discriminant of `x`.
|
||||||
|
/// We can also report errors with this kind of borrow differently.
|
||||||
|
Shallow,
|
||||||
|
|
||||||
|
/// Data must be immutable but not aliasable. This kind of borrow
|
||||||
|
/// cannot currently be expressed by the user and is used only in
|
||||||
|
/// implicit closure bindings. It is needed when the closure is
|
||||||
|
/// borrowing or mutating a mutable referent, e.g.:
|
||||||
|
/// ```
|
||||||
|
/// let mut z = 3;
|
||||||
|
/// let x: &mut isize = &mut z;
|
||||||
|
/// let y = || *x += 5;
|
||||||
|
/// ```
|
||||||
|
/// If we were to try to translate this closure into a more explicit
|
||||||
|
/// form, we'd encounter an error with the code as written:
|
||||||
|
/// ```compile_fail,E0594
|
||||||
|
/// struct Env<'a> { x: &'a &'a mut isize }
|
||||||
|
/// let mut z = 3;
|
||||||
|
/// let x: &mut isize = &mut z;
|
||||||
|
/// let y = (&mut Env { x: &x }, fn_ptr); // Closure is pair of env and fn
|
||||||
|
/// fn fn_ptr(env: &mut Env) { **env.x += 5; }
|
||||||
|
/// ```
|
||||||
|
/// This is then illegal because you cannot mutate an `&mut` found
|
||||||
|
/// in an aliasable location. To solve, you'd have to translate with
|
||||||
|
/// an `&mut` borrow:
|
||||||
|
/// ```compile_fail,E0596
|
||||||
|
/// struct Env<'a> { x: &'a mut &'a mut isize }
|
||||||
|
/// let mut z = 3;
|
||||||
|
/// let x: &mut isize = &mut z;
|
||||||
|
/// let y = (&mut Env { x: &mut x }, fn_ptr); // changed from &x to &mut x
|
||||||
|
/// fn fn_ptr(env: &mut Env) { **env.x += 5; }
|
||||||
|
/// ```
|
||||||
|
/// Now the assignment to `**env.x` is legal, but creating a
|
||||||
|
/// mutable pointer to `x` is not because `x` is not mutable. We
|
||||||
|
/// could fix this by declaring `x` as `let mut x`. This is ok in
|
||||||
|
/// user code, if awkward, but extra weird for closures, since the
|
||||||
|
/// borrow is hidden.
|
||||||
|
///
|
||||||
|
/// So we introduce a "unique imm" borrow -- the referent is
|
||||||
|
/// immutable, but not aliasable. This solves the problem. For
|
||||||
|
/// simplicity, we don't give users the way to express this
|
||||||
|
/// borrow, it's just used when translating closures.
|
||||||
|
Unique,
|
||||||
|
|
||||||
|
/// Data is mutable and not aliasable.
|
||||||
|
Mut {
|
||||||
|
/// `true` if this borrow arose from method-call auto-ref
|
||||||
|
/// (i.e., `adjustment::Adjust::Borrow`).
|
||||||
|
allow_two_phase_borrow: bool,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BorrowKind {
|
||||||
|
fn from_hir(m: hir_def::type_ref::Mutability) -> Self {
|
||||||
|
match m {
|
||||||
|
hir_def::type_ref::Mutability::Shared => BorrowKind::Shared,
|
||||||
|
hir_def::type_ref::Mutability::Mut => BorrowKind::Mut { allow_two_phase_borrow: false },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_chalk(m: Mutability) -> Self {
|
||||||
|
match m {
|
||||||
|
Mutability::Not => BorrowKind::Shared,
|
||||||
|
Mutability::Mut => BorrowKind::Mut { allow_two_phase_borrow: false },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
pub enum UnOp {
|
||||||
|
/// The `!` operator for logical inversion
|
||||||
|
Not,
|
||||||
|
/// The `-` operator for negation
|
||||||
|
Neg,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
|
pub enum BinOp {
|
||||||
|
/// The `+` operator (addition)
|
||||||
|
Add,
|
||||||
|
/// The `-` operator (subtraction)
|
||||||
|
Sub,
|
||||||
|
/// The `*` operator (multiplication)
|
||||||
|
Mul,
|
||||||
|
/// The `/` operator (division)
|
||||||
|
///
|
||||||
|
/// Division by zero is UB, because the compiler should have inserted checks
|
||||||
|
/// prior to this.
|
||||||
|
Div,
|
||||||
|
/// The `%` operator (modulus)
|
||||||
|
///
|
||||||
|
/// Using zero as the modulus (second operand) is UB, because the compiler
|
||||||
|
/// should have inserted checks prior to this.
|
||||||
|
Rem,
|
||||||
|
/// The `^` operator (bitwise xor)
|
||||||
|
BitXor,
|
||||||
|
/// The `&` operator (bitwise and)
|
||||||
|
BitAnd,
|
||||||
|
/// The `|` operator (bitwise or)
|
||||||
|
BitOr,
|
||||||
|
/// The `<<` operator (shift left)
|
||||||
|
///
|
||||||
|
/// The offset is truncated to the size of the first operand before shifting.
|
||||||
|
Shl,
|
||||||
|
/// The `>>` operator (shift right)
|
||||||
|
///
|
||||||
|
/// The offset is truncated to the size of the first operand before shifting.
|
||||||
|
Shr,
|
||||||
|
/// The `==` operator (equality)
|
||||||
|
Eq,
|
||||||
|
/// The `<` operator (less than)
|
||||||
|
Lt,
|
||||||
|
/// The `<=` operator (less than or equal to)
|
||||||
|
Le,
|
||||||
|
/// The `!=` operator (not equal to)
|
||||||
|
Ne,
|
||||||
|
/// The `>=` operator (greater than or equal to)
|
||||||
|
Ge,
|
||||||
|
/// The `>` operator (greater than)
|
||||||
|
Gt,
|
||||||
|
/// The `ptr.offset` operator
|
||||||
|
Offset,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for BinOp {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.write_str(match self {
|
||||||
|
BinOp::Add => "+",
|
||||||
|
BinOp::Sub => "-",
|
||||||
|
BinOp::Mul => "*",
|
||||||
|
BinOp::Div => "/",
|
||||||
|
BinOp::Rem => "%",
|
||||||
|
BinOp::BitXor => "^",
|
||||||
|
BinOp::BitAnd => "&",
|
||||||
|
BinOp::BitOr => "|",
|
||||||
|
BinOp::Shl => "<<",
|
||||||
|
BinOp::Shr => ">>",
|
||||||
|
BinOp::Eq => "==",
|
||||||
|
BinOp::Lt => "<",
|
||||||
|
BinOp::Le => "<=",
|
||||||
|
BinOp::Ne => "!=",
|
||||||
|
BinOp::Ge => ">=",
|
||||||
|
BinOp::Gt => ">",
|
||||||
|
BinOp::Offset => "`offset`",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<hir_def::expr::ArithOp> for BinOp {
|
||||||
|
fn from(value: hir_def::expr::ArithOp) -> Self {
|
||||||
|
match value {
|
||||||
|
hir_def::expr::ArithOp::Add => BinOp::Add,
|
||||||
|
hir_def::expr::ArithOp::Mul => BinOp::Mul,
|
||||||
|
hir_def::expr::ArithOp::Sub => BinOp::Sub,
|
||||||
|
hir_def::expr::ArithOp::Div => BinOp::Div,
|
||||||
|
hir_def::expr::ArithOp::Rem => BinOp::Rem,
|
||||||
|
hir_def::expr::ArithOp::Shl => BinOp::Shl,
|
||||||
|
hir_def::expr::ArithOp::Shr => BinOp::Shr,
|
||||||
|
hir_def::expr::ArithOp::BitXor => BinOp::BitXor,
|
||||||
|
hir_def::expr::ArithOp::BitOr => BinOp::BitOr,
|
||||||
|
hir_def::expr::ArithOp::BitAnd => BinOp::BitAnd,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<hir_def::expr::CmpOp> for BinOp {
|
||||||
|
fn from(value: hir_def::expr::CmpOp) -> Self {
|
||||||
|
match value {
|
||||||
|
hir_def::expr::CmpOp::Eq { negated: false } => BinOp::Eq,
|
||||||
|
hir_def::expr::CmpOp::Eq { negated: true } => BinOp::Ne,
|
||||||
|
hir_def::expr::CmpOp::Ord { ordering: Ordering::Greater, strict: false } => BinOp::Ge,
|
||||||
|
hir_def::expr::CmpOp::Ord { ordering: Ordering::Greater, strict: true } => BinOp::Gt,
|
||||||
|
hir_def::expr::CmpOp::Ord { ordering: Ordering::Less, strict: false } => BinOp::Le,
|
||||||
|
hir_def::expr::CmpOp::Ord { ordering: Ordering::Less, strict: true } => BinOp::Lt,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Operand> for Rvalue {
|
||||||
|
fn from(x: Operand) -> Self {
|
||||||
|
Self::Use(x)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
|
pub enum CastKind {
|
||||||
|
/// An exposing pointer to address cast. A cast between a pointer and an integer type, or
|
||||||
|
/// between a function pointer and an integer type.
|
||||||
|
/// See the docs on `expose_addr` for more details.
|
||||||
|
PointerExposeAddress,
|
||||||
|
/// An address-to-pointer cast that picks up an exposed provenance.
|
||||||
|
/// See the docs on `from_exposed_addr` for more details.
|
||||||
|
PointerFromExposedAddress,
|
||||||
|
/// All sorts of pointer-to-pointer casts. Note that reference-to-raw-ptr casts are
|
||||||
|
/// translated into `&raw mut/const *r`, i.e., they are not actually casts.
|
||||||
|
Pointer(PointerCast),
|
||||||
|
/// Cast into a dyn* object.
|
||||||
|
DynStar,
|
||||||
|
IntToInt,
|
||||||
|
FloatToInt,
|
||||||
|
FloatToFloat,
|
||||||
|
IntToFloat,
|
||||||
|
PtrToPtr,
|
||||||
|
FnPtrToPtr,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
|
pub enum Rvalue {
|
||||||
|
/// Yields the operand unchanged
|
||||||
|
Use(Operand),
|
||||||
|
|
||||||
|
/// Creates an array where each element is the value of the operand.
|
||||||
|
///
|
||||||
|
/// This is the cause of a bug in the case where the repetition count is zero because the value
|
||||||
|
/// is not dropped, see [#74836].
|
||||||
|
///
|
||||||
|
/// Corresponds to source code like `[x; 32]`.
|
||||||
|
///
|
||||||
|
/// [#74836]: https://github.com/rust-lang/rust/issues/74836
|
||||||
|
//Repeat(Operand, ty::Const),
|
||||||
|
|
||||||
|
/// Creates a reference of the indicated kind to the place.
|
||||||
|
///
|
||||||
|
/// There is not much to document here, because besides the obvious parts the semantics of this
|
||||||
|
/// are essentially entirely a part of the aliasing model. There are many UCG issues discussing
|
||||||
|
/// exactly what the behavior of this operation should be.
|
||||||
|
///
|
||||||
|
/// `Shallow` borrows are disallowed after drop lowering.
|
||||||
|
Ref(BorrowKind, Place),
|
||||||
|
|
||||||
|
/// Creates a pointer/reference to the given thread local.
|
||||||
|
///
|
||||||
|
/// The yielded type is a `*mut T` if the static is mutable, otherwise if the static is extern a
|
||||||
|
/// `*const T`, and if neither of those apply a `&T`.
|
||||||
|
///
|
||||||
|
/// **Note:** This is a runtime operation that actually executes code and is in this sense more
|
||||||
|
/// like a function call. Also, eliminating dead stores of this rvalue causes `fn main() {}` to
|
||||||
|
/// SIGILL for some reason that I (JakobDegen) never got a chance to look into.
|
||||||
|
///
|
||||||
|
/// **Needs clarification**: Are there weird additional semantics here related to the runtime
|
||||||
|
/// nature of this operation?
|
||||||
|
//ThreadLocalRef(DefId),
|
||||||
|
|
||||||
|
/// Creates a pointer with the indicated mutability to the place.
|
||||||
|
///
|
||||||
|
/// This is generated by pointer casts like `&v as *const _` or raw address of expressions like
|
||||||
|
/// `&raw v` or `addr_of!(v)`.
|
||||||
|
///
|
||||||
|
/// Like with references, the semantics of this operation are heavily dependent on the aliasing
|
||||||
|
/// model.
|
||||||
|
//AddressOf(Mutability, Place),
|
||||||
|
|
||||||
|
/// Yields the length of the place, as a `usize`.
|
||||||
|
///
|
||||||
|
/// If the type of the place is an array, this is the array length. For slices (`[T]`, not
|
||||||
|
/// `&[T]`) this accesses the place's metadata to determine the length. This rvalue is
|
||||||
|
/// ill-formed for places of other types.
|
||||||
|
Len(Place),
|
||||||
|
|
||||||
|
/// Performs essentially all of the casts that can be performed via `as`.
|
||||||
|
///
|
||||||
|
/// This allows for casts from/to a variety of types.
|
||||||
|
///
|
||||||
|
/// **FIXME**: Document exactly which `CastKind`s allow which types of casts. Figure out why
|
||||||
|
/// `ArrayToPointer` and `MutToConstPointer` are special.
|
||||||
|
Cast(CastKind, Operand, Ty),
|
||||||
|
|
||||||
|
// FIXME link to `pointer::offset` when it hits stable.
|
||||||
|
/// * `Offset` has the same semantics as `pointer::offset`, except that the second
|
||||||
|
/// parameter may be a `usize` as well.
|
||||||
|
/// * The comparison operations accept `bool`s, `char`s, signed or unsigned integers, floats,
|
||||||
|
/// raw pointers, or function pointers and return a `bool`. The types of the operands must be
|
||||||
|
/// matching, up to the usual caveat of the lifetimes in function pointers.
|
||||||
|
/// * Left and right shift operations accept signed or unsigned integers not necessarily of the
|
||||||
|
/// same type and return a value of the same type as their LHS. Like in Rust, the RHS is
|
||||||
|
/// truncated as needed.
|
||||||
|
/// * The `Bit*` operations accept signed integers, unsigned integers, or bools with matching
|
||||||
|
/// types and return a value of that type.
|
||||||
|
/// * The remaining operations accept signed integers, unsigned integers, or floats with
|
||||||
|
/// matching types and return a value of that type.
|
||||||
|
//BinaryOp(BinOp, Box<(Operand, Operand)>),
|
||||||
|
|
||||||
|
/// Same as `BinaryOp`, but yields `(T, bool)` with a `bool` indicating an error condition.
|
||||||
|
///
|
||||||
|
/// When overflow checking is disabled and we are generating run-time code, the error condition
|
||||||
|
/// is false. Otherwise, and always during CTFE, the error condition is determined as described
|
||||||
|
/// below.
|
||||||
|
///
|
||||||
|
/// For addition, subtraction, and multiplication on integers the error condition is set when
|
||||||
|
/// the infinite precision result would be unequal to the actual result.
|
||||||
|
///
|
||||||
|
/// For shift operations on integers the error condition is set when the value of right-hand
|
||||||
|
/// side is greater than or equal to the number of bits in the type of the left-hand side, or
|
||||||
|
/// when the value of right-hand side is negative.
|
||||||
|
///
|
||||||
|
/// Other combinations of types and operators are unsupported.
|
||||||
|
CheckedBinaryOp(BinOp, Operand, Operand),
|
||||||
|
|
||||||
|
/// Computes a value as described by the operation.
|
||||||
|
//NullaryOp(NullOp, Ty),
|
||||||
|
|
||||||
|
/// Exactly like `BinaryOp`, but less operands.
|
||||||
|
///
|
||||||
|
/// Also does two's-complement arithmetic. Negation requires a signed integer or a float;
|
||||||
|
/// bitwise not requires a signed integer, unsigned integer, or bool. Both operation kinds
|
||||||
|
/// return a value with the same type as their operand.
|
||||||
|
UnaryOp(UnOp, Operand),
|
||||||
|
|
||||||
|
/// Computes the discriminant of the place, returning it as an integer of type
|
||||||
|
/// [`discriminant_ty`]. Returns zero for types without discriminant.
|
||||||
|
///
|
||||||
|
/// The validity requirements for the underlying value are undecided for this rvalue, see
|
||||||
|
/// [#91095]. Note too that the value of the discriminant is not the same thing as the
|
||||||
|
/// variant index; use [`discriminant_for_variant`] to convert.
|
||||||
|
///
|
||||||
|
/// [`discriminant_ty`]: crate::ty::Ty::discriminant_ty
|
||||||
|
/// [#91095]: https://github.com/rust-lang/rust/issues/91095
|
||||||
|
/// [`discriminant_for_variant`]: crate::ty::Ty::discriminant_for_variant
|
||||||
|
Discriminant(Place),
|
||||||
|
|
||||||
|
/// Creates an aggregate value, like a tuple or struct.
|
||||||
|
///
|
||||||
|
/// This is needed because dataflow analysis needs to distinguish
|
||||||
|
/// `dest = Foo { x: ..., y: ... }` from `dest.x = ...; dest.y = ...;` in the case that `Foo`
|
||||||
|
/// has a destructor.
|
||||||
|
///
|
||||||
|
/// Disallowed after deaggregation for all aggregate kinds except `Array` and `Generator`. After
|
||||||
|
/// generator lowering, `Generator` aggregate kinds are disallowed too.
|
||||||
|
Aggregate(AggregateKind, Vec<Operand>),
|
||||||
|
|
||||||
|
/// Transmutes a `*mut u8` into shallow-initialized `Box<T>`.
|
||||||
|
///
|
||||||
|
/// This is different from a normal transmute because dataflow analysis will treat the box as
|
||||||
|
/// initialized but its content as uninitialized. Like other pointer casts, this in general
|
||||||
|
/// affects alias analysis.
|
||||||
|
ShallowInitBox(Operand, Ty),
|
||||||
|
|
||||||
|
/// A CopyForDeref is equivalent to a read from a place at the
|
||||||
|
/// codegen level, but is treated specially by drop elaboration. When such a read happens, it
|
||||||
|
/// is guaranteed (via nature of the mir_opt `Derefer` in rustc_mir_transform/src/deref_separator)
|
||||||
|
/// that the only use of the returned value is a deref operation, immediately
|
||||||
|
/// followed by one or more projections. Drop elaboration treats this rvalue as if the
|
||||||
|
/// read never happened and just projects further. This allows simplifying various MIR
|
||||||
|
/// optimizations and codegen backends that previously had to handle deref operations anywhere
|
||||||
|
/// in a place.
|
||||||
|
CopyForDeref(Place),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
|
pub enum StatementKind {
|
||||||
|
Assign(Place, Rvalue),
|
||||||
|
//FakeRead(Box<(FakeReadCause, Place)>),
|
||||||
|
//SetDiscriminant {
|
||||||
|
// place: Box<Place>,
|
||||||
|
// variant_index: VariantIdx,
|
||||||
|
//},
|
||||||
|
Deinit(Place),
|
||||||
|
StorageLive(LocalId),
|
||||||
|
StorageDead(LocalId),
|
||||||
|
//Retag(RetagKind, Box<Place>),
|
||||||
|
//AscribeUserType(Place, UserTypeProjection, Variance),
|
||||||
|
//Intrinsic(Box<NonDivergingIntrinsic>),
|
||||||
|
Nop,
|
||||||
|
}
|
||||||
|
impl StatementKind {
|
||||||
|
fn with_span(self, span: MirSpan) -> Statement {
|
||||||
|
Statement { kind: self, span }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
|
pub struct Statement {
|
||||||
|
pub kind: StatementKind,
|
||||||
|
pub span: MirSpan,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default, PartialEq, Eq)]
|
||||||
|
pub struct BasicBlock {
|
||||||
|
/// List of statements in this block.
|
||||||
|
pub statements: Vec<Statement>,
|
||||||
|
|
||||||
|
/// Terminator for this block.
|
||||||
|
///
|
||||||
|
/// N.B., this should generally ONLY be `None` during construction.
|
||||||
|
/// Therefore, you should generally access it via the
|
||||||
|
/// `terminator()` or `terminator_mut()` methods. The only
|
||||||
|
/// exception is that certain passes, such as `simplify_cfg`, swap
|
||||||
|
/// out the terminator temporarily with `None` while they continue
|
||||||
|
/// to recurse over the set of basic blocks.
|
||||||
|
pub terminator: Option<Terminator>,
|
||||||
|
|
||||||
|
/// If true, this block lies on an unwind path. This is used
|
||||||
|
/// during codegen where distinct kinds of basic blocks may be
|
||||||
|
/// generated (particularly for MSVC cleanup). Unwind blocks must
|
||||||
|
/// only branch to other unwind blocks.
|
||||||
|
pub is_cleanup: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
pub struct MirBody {
|
||||||
|
pub basic_blocks: Arena<BasicBlock>,
|
||||||
|
pub locals: Arena<Local>,
|
||||||
|
pub start_block: BasicBlockId,
|
||||||
|
pub owner: DefWithBodyId,
|
||||||
|
pub arg_count: usize,
|
||||||
|
pub binding_locals: ArenaMap<BindingId, LocalId>,
|
||||||
|
pub param_locals: Vec<LocalId>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn const_as_usize(c: &Const) -> usize {
|
||||||
|
try_const_usize(c).unwrap() as usize
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||||
|
pub enum MirSpan {
|
||||||
|
ExprId(ExprId),
|
||||||
|
PatId(PatId),
|
||||||
|
Unknown,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl_from!(ExprId, PatId for MirSpan);
|
223
crates/hir-ty/src/mir/borrowck.rs
Normal file
223
crates/hir-ty/src/mir/borrowck.rs
Normal file
|
@ -0,0 +1,223 @@
|
||||||
|
//! MIR borrow checker, which is used in diagnostics like `unused_mut`
|
||||||
|
|
||||||
|
// Currently it is an ad-hoc implementation, only useful for mutability analysis. Feel free to remove all of these
|
||||||
|
// if needed for implementing a proper borrow checker.
|
||||||
|
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use hir_def::DefWithBodyId;
|
||||||
|
use la_arena::ArenaMap;
|
||||||
|
use stdx::never;
|
||||||
|
|
||||||
|
use crate::db::HirDatabase;
|
||||||
|
|
||||||
|
use super::{
|
||||||
|
BasicBlockId, BorrowKind, LocalId, MirBody, MirLowerError, MirSpan, Place, ProjectionElem,
|
||||||
|
Rvalue, StatementKind, Terminator,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
/// Stores spans which implies that the local should be mutable.
|
||||||
|
pub enum MutabilityReason {
|
||||||
|
Mut { spans: Vec<MirSpan> },
|
||||||
|
Not,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub struct BorrowckResult {
|
||||||
|
pub mir_body: Arc<MirBody>,
|
||||||
|
pub mutability_of_locals: ArenaMap<LocalId, MutabilityReason>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn borrowck_query(
|
||||||
|
db: &dyn HirDatabase,
|
||||||
|
def: DefWithBodyId,
|
||||||
|
) -> Result<Arc<BorrowckResult>, MirLowerError> {
|
||||||
|
let _p = profile::span("borrowck_query");
|
||||||
|
let body = db.mir_body(def)?;
|
||||||
|
let r = BorrowckResult { mutability_of_locals: mutability_of_locals(&body), mir_body: body };
|
||||||
|
Ok(Arc::new(r))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_place_direct(lvalue: &Place) -> bool {
|
||||||
|
!lvalue.projection.iter().any(|x| *x == ProjectionElem::Deref)
|
||||||
|
}
|
||||||
|
|
||||||
|
enum ProjectionCase {
|
||||||
|
/// Projection is a local
|
||||||
|
Direct,
|
||||||
|
/// Projection is some field or slice of a local
|
||||||
|
DirectPart,
|
||||||
|
/// Projection is deref of something
|
||||||
|
Indirect,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn place_case(lvalue: &Place) -> ProjectionCase {
|
||||||
|
let mut is_part_of = false;
|
||||||
|
for proj in lvalue.projection.iter().rev() {
|
||||||
|
match proj {
|
||||||
|
ProjectionElem::Deref => return ProjectionCase::Indirect, // It's indirect
|
||||||
|
ProjectionElem::ConstantIndex { .. }
|
||||||
|
| ProjectionElem::Subslice { .. }
|
||||||
|
| ProjectionElem::Field(_)
|
||||||
|
| ProjectionElem::TupleField(_)
|
||||||
|
| ProjectionElem::Index(_) => {
|
||||||
|
is_part_of = true;
|
||||||
|
}
|
||||||
|
ProjectionElem::OpaqueCast(_) => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if is_part_of {
|
||||||
|
ProjectionCase::DirectPart
|
||||||
|
} else {
|
||||||
|
ProjectionCase::Direct
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a map from basic blocks to the set of locals that might be ever initialized before
|
||||||
|
/// the start of the block. Only `StorageDead` can remove something from this map, and we ignore
|
||||||
|
/// `Uninit` and `drop` and similars after initialization.
|
||||||
|
fn ever_initialized_map(body: &MirBody) -> ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> {
|
||||||
|
let mut result: ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> =
|
||||||
|
body.basic_blocks.iter().map(|x| (x.0, ArenaMap::default())).collect();
|
||||||
|
fn dfs(
|
||||||
|
body: &MirBody,
|
||||||
|
b: BasicBlockId,
|
||||||
|
l: LocalId,
|
||||||
|
result: &mut ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>>,
|
||||||
|
) {
|
||||||
|
let mut is_ever_initialized = result[b][l]; // It must be filled, as we use it as mark for dfs
|
||||||
|
let block = &body.basic_blocks[b];
|
||||||
|
for statement in &block.statements {
|
||||||
|
match &statement.kind {
|
||||||
|
StatementKind::Assign(p, _) => {
|
||||||
|
if p.projection.len() == 0 && p.local == l {
|
||||||
|
is_ever_initialized = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
StatementKind::StorageDead(p) => {
|
||||||
|
if *p == l {
|
||||||
|
is_ever_initialized = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
StatementKind::Deinit(_) | StatementKind::Nop | StatementKind::StorageLive(_) => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let Some(terminator) = &block.terminator else {
|
||||||
|
never!("Terminator should be none only in construction");
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
let targets = match terminator {
|
||||||
|
Terminator::Goto { target } => vec![*target],
|
||||||
|
Terminator::SwitchInt { targets, .. } => targets.all_targets().to_vec(),
|
||||||
|
Terminator::Resume
|
||||||
|
| Terminator::Abort
|
||||||
|
| Terminator::Return
|
||||||
|
| Terminator::Unreachable => vec![],
|
||||||
|
Terminator::Call { target, cleanup, destination, .. } => {
|
||||||
|
if destination.projection.len() == 0 && destination.local == l {
|
||||||
|
is_ever_initialized = true;
|
||||||
|
}
|
||||||
|
target.into_iter().chain(cleanup.into_iter()).copied().collect()
|
||||||
|
}
|
||||||
|
Terminator::Drop { .. }
|
||||||
|
| Terminator::DropAndReplace { .. }
|
||||||
|
| Terminator::Assert { .. }
|
||||||
|
| Terminator::Yield { .. }
|
||||||
|
| Terminator::GeneratorDrop
|
||||||
|
| Terminator::FalseEdge { .. }
|
||||||
|
| Terminator::FalseUnwind { .. } => {
|
||||||
|
never!("We don't emit these MIR terminators yet");
|
||||||
|
vec![]
|
||||||
|
}
|
||||||
|
};
|
||||||
|
for target in targets {
|
||||||
|
if !result[target].contains_idx(l) || !result[target][l] && is_ever_initialized {
|
||||||
|
result[target].insert(l, is_ever_initialized);
|
||||||
|
dfs(body, target, l, result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for &l in &body.param_locals {
|
||||||
|
result[body.start_block].insert(l, true);
|
||||||
|
dfs(body, body.start_block, l, &mut result);
|
||||||
|
}
|
||||||
|
for l in body.locals.iter().map(|x| x.0) {
|
||||||
|
if !result[body.start_block].contains_idx(l) {
|
||||||
|
result[body.start_block].insert(l, false);
|
||||||
|
dfs(body, body.start_block, l, &mut result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
fn mutability_of_locals(body: &MirBody) -> ArenaMap<LocalId, MutabilityReason> {
|
||||||
|
let mut result: ArenaMap<LocalId, MutabilityReason> =
|
||||||
|
body.locals.iter().map(|x| (x.0, MutabilityReason::Not)).collect();
|
||||||
|
let mut push_mut_span = |local, span| match &mut result[local] {
|
||||||
|
MutabilityReason::Mut { spans } => spans.push(span),
|
||||||
|
x @ MutabilityReason::Not => *x = MutabilityReason::Mut { spans: vec![span] },
|
||||||
|
};
|
||||||
|
let ever_init_maps = ever_initialized_map(body);
|
||||||
|
for (block_id, mut ever_init_map) in ever_init_maps.into_iter() {
|
||||||
|
let block = &body.basic_blocks[block_id];
|
||||||
|
for statement in &block.statements {
|
||||||
|
match &statement.kind {
|
||||||
|
StatementKind::Assign(place, value) => {
|
||||||
|
match place_case(place) {
|
||||||
|
ProjectionCase::Direct => {
|
||||||
|
if ever_init_map.get(place.local).copied().unwrap_or_default() {
|
||||||
|
push_mut_span(place.local, statement.span);
|
||||||
|
} else {
|
||||||
|
ever_init_map.insert(place.local, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ProjectionCase::DirectPart => {
|
||||||
|
// Partial initialization is not supported, so it is definitely `mut`
|
||||||
|
push_mut_span(place.local, statement.span);
|
||||||
|
}
|
||||||
|
ProjectionCase::Indirect => (),
|
||||||
|
}
|
||||||
|
if let Rvalue::Ref(BorrowKind::Mut { .. }, p) = value {
|
||||||
|
if is_place_direct(p) {
|
||||||
|
push_mut_span(p.local, statement.span);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
StatementKind::StorageDead(p) => {
|
||||||
|
ever_init_map.insert(*p, false);
|
||||||
|
}
|
||||||
|
StatementKind::Deinit(_) | StatementKind::StorageLive(_) | StatementKind::Nop => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let Some(terminator) = &block.terminator else {
|
||||||
|
never!("Terminator should be none only in construction");
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
match terminator {
|
||||||
|
Terminator::Goto { .. }
|
||||||
|
| Terminator::Resume
|
||||||
|
| Terminator::Abort
|
||||||
|
| Terminator::Return
|
||||||
|
| Terminator::Unreachable
|
||||||
|
| Terminator::FalseEdge { .. }
|
||||||
|
| Terminator::FalseUnwind { .. }
|
||||||
|
| Terminator::GeneratorDrop
|
||||||
|
| Terminator::SwitchInt { .. }
|
||||||
|
| Terminator::Drop { .. }
|
||||||
|
| Terminator::DropAndReplace { .. }
|
||||||
|
| Terminator::Assert { .. }
|
||||||
|
| Terminator::Yield { .. } => (),
|
||||||
|
Terminator::Call { destination, .. } => {
|
||||||
|
if destination.projection.len() == 0 {
|
||||||
|
if ever_init_map.get(destination.local).copied().unwrap_or_default() {
|
||||||
|
push_mut_span(destination.local, MirSpan::Unknown);
|
||||||
|
} else {
|
||||||
|
ever_init_map.insert(destination.local, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
1253
crates/hir-ty/src/mir/eval.rs
Normal file
1253
crates/hir-ty/src/mir/eval.rs
Normal file
File diff suppressed because it is too large
Load diff
1577
crates/hir-ty/src/mir/lower.rs
Normal file
1577
crates/hir-ty/src/mir/lower.rs
Normal file
File diff suppressed because it is too large
Load diff
237
crates/hir-ty/src/mir/lower/as_place.rs
Normal file
237
crates/hir-ty/src/mir/lower/as_place.rs
Normal file
|
@ -0,0 +1,237 @@
|
||||||
|
//! MIR lowering for places
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
use hir_expand::name;
|
||||||
|
|
||||||
|
macro_rules! not_supported {
|
||||||
|
($x: expr) => {
|
||||||
|
return Err(MirLowerError::NotSupported(format!($x)))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MirLowerCtx<'_> {
|
||||||
|
fn lower_expr_to_some_place_without_adjust(
|
||||||
|
&mut self,
|
||||||
|
expr_id: ExprId,
|
||||||
|
prev_block: BasicBlockId,
|
||||||
|
) -> Result<Option<(Place, BasicBlockId)>> {
|
||||||
|
let ty = self.expr_ty(expr_id);
|
||||||
|
let place = self.temp(ty)?;
|
||||||
|
let Some(current) = self.lower_expr_to_place_without_adjust(expr_id, place.into(), prev_block)? else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
Ok(Some((place.into(), current)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lower_expr_to_some_place_with_adjust(
|
||||||
|
&mut self,
|
||||||
|
expr_id: ExprId,
|
||||||
|
prev_block: BasicBlockId,
|
||||||
|
adjustments: &[Adjustment],
|
||||||
|
) -> Result<Option<(Place, BasicBlockId)>> {
|
||||||
|
let ty =
|
||||||
|
adjustments.last().map(|x| x.target.clone()).unwrap_or_else(|| self.expr_ty(expr_id));
|
||||||
|
let place = self.temp(ty)?;
|
||||||
|
let Some(current) = self.lower_expr_to_place_with_adjust(expr_id, place.into(), prev_block, adjustments)? else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
Ok(Some((place.into(), current)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn lower_expr_as_place_with_adjust(
|
||||||
|
&mut self,
|
||||||
|
current: BasicBlockId,
|
||||||
|
expr_id: ExprId,
|
||||||
|
upgrade_rvalue: bool,
|
||||||
|
adjustments: &[Adjustment],
|
||||||
|
) -> Result<Option<(Place, BasicBlockId)>> {
|
||||||
|
let try_rvalue = |this: &mut MirLowerCtx<'_>| {
|
||||||
|
if !upgrade_rvalue {
|
||||||
|
return Err(MirLowerError::MutatingRvalue);
|
||||||
|
}
|
||||||
|
this.lower_expr_to_some_place_with_adjust(expr_id, current, adjustments)
|
||||||
|
};
|
||||||
|
if let Some((last, rest)) = adjustments.split_last() {
|
||||||
|
match last.kind {
|
||||||
|
Adjust::Deref(None) => {
|
||||||
|
let Some(mut x) = self.lower_expr_as_place_with_adjust(
|
||||||
|
current,
|
||||||
|
expr_id,
|
||||||
|
upgrade_rvalue,
|
||||||
|
rest,
|
||||||
|
)? else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
x.0.projection.push(ProjectionElem::Deref);
|
||||||
|
Ok(Some(x))
|
||||||
|
}
|
||||||
|
Adjust::Deref(Some(od)) => {
|
||||||
|
let Some((r, current)) = self.lower_expr_as_place_with_adjust(
|
||||||
|
current,
|
||||||
|
expr_id,
|
||||||
|
upgrade_rvalue,
|
||||||
|
rest,
|
||||||
|
)? else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
self.lower_overloaded_deref(
|
||||||
|
current,
|
||||||
|
r,
|
||||||
|
rest.last()
|
||||||
|
.map(|x| x.target.clone())
|
||||||
|
.unwrap_or_else(|| self.expr_ty(expr_id)),
|
||||||
|
last.target.clone(),
|
||||||
|
expr_id.into(),
|
||||||
|
match od.0 {
|
||||||
|
Some(Mutability::Mut) => true,
|
||||||
|
Some(Mutability::Not) => false,
|
||||||
|
None => {
|
||||||
|
not_supported!("implicit overloaded deref with unknown mutability")
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
Adjust::NeverToAny | Adjust::Borrow(_) | Adjust::Pointer(_) => try_rvalue(self),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
self.lower_expr_as_place_without_adjust(current, expr_id, upgrade_rvalue)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn lower_expr_as_place(
|
||||||
|
&mut self,
|
||||||
|
current: BasicBlockId,
|
||||||
|
expr_id: ExprId,
|
||||||
|
upgrade_rvalue: bool,
|
||||||
|
) -> Result<Option<(Place, BasicBlockId)>> {
|
||||||
|
match self.infer.expr_adjustments.get(&expr_id) {
|
||||||
|
Some(a) => self.lower_expr_as_place_with_adjust(current, expr_id, upgrade_rvalue, a),
|
||||||
|
None => self.lower_expr_as_place_without_adjust(current, expr_id, upgrade_rvalue),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn lower_expr_as_place_without_adjust(
|
||||||
|
&mut self,
|
||||||
|
current: BasicBlockId,
|
||||||
|
expr_id: ExprId,
|
||||||
|
upgrade_rvalue: bool,
|
||||||
|
) -> Result<Option<(Place, BasicBlockId)>> {
|
||||||
|
let try_rvalue = |this: &mut MirLowerCtx<'_>| {
|
||||||
|
if !upgrade_rvalue {
|
||||||
|
return Err(MirLowerError::MutatingRvalue);
|
||||||
|
}
|
||||||
|
this.lower_expr_to_some_place_without_adjust(expr_id, current)
|
||||||
|
};
|
||||||
|
match &self.body.exprs[expr_id] {
|
||||||
|
Expr::Path(p) => {
|
||||||
|
let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id);
|
||||||
|
let Some(pr) = resolver.resolve_path_in_value_ns(self.db.upcast(), p.mod_path()) else {
|
||||||
|
return Err(MirLowerError::unresolved_path(self.db, p));
|
||||||
|
};
|
||||||
|
let pr = match pr {
|
||||||
|
ResolveValueResult::ValueNs(v) => v,
|
||||||
|
ResolveValueResult::Partial(..) => return try_rvalue(self),
|
||||||
|
};
|
||||||
|
match pr {
|
||||||
|
ValueNs::LocalBinding(pat_id) => {
|
||||||
|
Ok(Some((self.result.binding_locals[pat_id].into(), current)))
|
||||||
|
}
|
||||||
|
_ => try_rvalue(self),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Expr::UnaryOp { expr, op } => match op {
|
||||||
|
hir_def::expr::UnaryOp::Deref => {
|
||||||
|
if !matches!(
|
||||||
|
self.expr_ty(*expr).kind(Interner),
|
||||||
|
TyKind::Ref(..) | TyKind::Raw(..)
|
||||||
|
) {
|
||||||
|
let Some(_) = self.lower_expr_as_place(current, *expr, true)? else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
not_supported!("explicit overloaded deref");
|
||||||
|
}
|
||||||
|
let Some((mut r, current)) = self.lower_expr_as_place(current, *expr, true)? else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
r.projection.push(ProjectionElem::Deref);
|
||||||
|
Ok(Some((r, current)))
|
||||||
|
}
|
||||||
|
_ => try_rvalue(self),
|
||||||
|
},
|
||||||
|
Expr::Field { expr, .. } => {
|
||||||
|
let Some((mut r, current)) = self.lower_expr_as_place(current, *expr, true)? else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
self.push_field_projection(&mut r, expr_id)?;
|
||||||
|
Ok(Some((r, current)))
|
||||||
|
}
|
||||||
|
Expr::Index { base, index } => {
|
||||||
|
let base_ty = self.expr_ty_after_adjustments(*base);
|
||||||
|
let index_ty = self.expr_ty_after_adjustments(*index);
|
||||||
|
if index_ty != TyBuilder::usize()
|
||||||
|
|| !matches!(base_ty.kind(Interner), TyKind::Array(..) | TyKind::Slice(..))
|
||||||
|
{
|
||||||
|
not_supported!("overloaded index");
|
||||||
|
}
|
||||||
|
let Some((mut p_base, current)) =
|
||||||
|
self.lower_expr_as_place(current, *base, true)? else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
let l_index = self.temp(self.expr_ty_after_adjustments(*index))?;
|
||||||
|
let Some(current) = self.lower_expr_to_place(*index, l_index.into(), current)? else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
p_base.projection.push(ProjectionElem::Index(l_index));
|
||||||
|
Ok(Some((p_base, current)))
|
||||||
|
}
|
||||||
|
_ => try_rvalue(self),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lower_overloaded_deref(
|
||||||
|
&mut self,
|
||||||
|
current: BasicBlockId,
|
||||||
|
place: Place,
|
||||||
|
source_ty: Ty,
|
||||||
|
target_ty: Ty,
|
||||||
|
span: MirSpan,
|
||||||
|
mutability: bool,
|
||||||
|
) -> Result<Option<(Place, BasicBlockId)>> {
|
||||||
|
let (chalk_mut, trait_lang_item, trait_method_name, borrow_kind) = if !mutability {
|
||||||
|
(Mutability::Not, LangItem::Deref, name![deref], BorrowKind::Shared)
|
||||||
|
} else {
|
||||||
|
(
|
||||||
|
Mutability::Mut,
|
||||||
|
LangItem::DerefMut,
|
||||||
|
name![deref_mut],
|
||||||
|
BorrowKind::Mut { allow_two_phase_borrow: false },
|
||||||
|
)
|
||||||
|
};
|
||||||
|
let ty_ref = TyKind::Ref(chalk_mut, static_lifetime(), source_ty.clone()).intern(Interner);
|
||||||
|
let target_ty_ref = TyKind::Ref(chalk_mut, static_lifetime(), target_ty).intern(Interner);
|
||||||
|
let ref_place: Place = self.temp(ty_ref)?.into();
|
||||||
|
self.push_assignment(current, ref_place.clone(), Rvalue::Ref(borrow_kind, place), span);
|
||||||
|
let deref_trait = self
|
||||||
|
.resolve_lang_item(trait_lang_item)?
|
||||||
|
.as_trait()
|
||||||
|
.ok_or(MirLowerError::LangItemNotFound(trait_lang_item))?;
|
||||||
|
let deref_fn = self
|
||||||
|
.db
|
||||||
|
.trait_data(deref_trait)
|
||||||
|
.method_by_name(&trait_method_name)
|
||||||
|
.ok_or(MirLowerError::LangItemNotFound(trait_lang_item))?;
|
||||||
|
let deref_fn_op = Operand::const_zst(
|
||||||
|
TyKind::FnDef(
|
||||||
|
self.db.intern_callable_def(CallableDefId::FunctionId(deref_fn)).into(),
|
||||||
|
Substitution::from1(Interner, source_ty),
|
||||||
|
)
|
||||||
|
.intern(Interner),
|
||||||
|
);
|
||||||
|
let mut result: Place = self.temp(target_ty_ref)?.into();
|
||||||
|
let Some(current) = self.lower_call(deref_fn_op, vec![Operand::Copy(ref_place)], result.clone(), current, false)? else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
result.projection.push(ProjectionElem::Deref);
|
||||||
|
Ok(Some((result, current)))
|
||||||
|
}
|
||||||
|
}
|
348
crates/hir-ty/src/mir/pretty.rs
Normal file
348
crates/hir-ty/src/mir/pretty.rs
Normal file
|
@ -0,0 +1,348 @@
|
||||||
|
//! A pretty-printer for MIR.
|
||||||
|
|
||||||
|
use std::fmt::{Display, Write};
|
||||||
|
|
||||||
|
use hir_def::{body::Body, expr::BindingId};
|
||||||
|
use hir_expand::name::Name;
|
||||||
|
use la_arena::ArenaMap;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
db::HirDatabase,
|
||||||
|
display::HirDisplay,
|
||||||
|
mir::{PlaceElem, ProjectionElem, StatementKind, Terminator},
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::{
|
||||||
|
AggregateKind, BasicBlockId, BorrowKind, LocalId, MirBody, Operand, Place, Rvalue, UnOp,
|
||||||
|
};
|
||||||
|
|
||||||
|
impl MirBody {
|
||||||
|
pub fn pretty_print(&self, db: &dyn HirDatabase) -> String {
|
||||||
|
let hir_body = db.body(self.owner);
|
||||||
|
let mut ctx = MirPrettyCtx::new(self, &hir_body, db);
|
||||||
|
ctx.for_body();
|
||||||
|
ctx.result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct MirPrettyCtx<'a> {
|
||||||
|
body: &'a MirBody,
|
||||||
|
hir_body: &'a Body,
|
||||||
|
db: &'a dyn HirDatabase,
|
||||||
|
result: String,
|
||||||
|
ident: String,
|
||||||
|
local_to_binding: ArenaMap<LocalId, BindingId>,
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! w {
|
||||||
|
($dst:expr, $($arg:tt)*) => {
|
||||||
|
{ let _ = write!($dst, $($arg)*); }
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! wln {
|
||||||
|
($dst:expr) => {
|
||||||
|
{ let _ = writeln!($dst); }
|
||||||
|
};
|
||||||
|
($dst:expr, $($arg:tt)*) => {
|
||||||
|
{ let _ = writeln!($dst, $($arg)*); }
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Write for MirPrettyCtx<'_> {
|
||||||
|
fn write_str(&mut self, s: &str) -> std::fmt::Result {
|
||||||
|
let mut it = s.split('\n'); // note: `.lines()` is wrong here
|
||||||
|
self.write(it.next().unwrap_or_default());
|
||||||
|
for line in it {
|
||||||
|
self.write_line();
|
||||||
|
self.write(line);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
enum LocalName {
|
||||||
|
Unknown(LocalId),
|
||||||
|
Binding(Name, LocalId),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for LocalName {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
LocalName::Unknown(l) => write!(f, "_{}", u32::from(l.into_raw())),
|
||||||
|
LocalName::Binding(n, l) => write!(f, "{n}_{}", u32::from(l.into_raw())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> MirPrettyCtx<'a> {
|
||||||
|
fn for_body(&mut self) {
|
||||||
|
self.with_block(|this| {
|
||||||
|
this.locals();
|
||||||
|
wln!(this);
|
||||||
|
this.blocks();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn with_block(&mut self, f: impl FnOnce(&mut MirPrettyCtx<'_>)) {
|
||||||
|
self.ident += " ";
|
||||||
|
wln!(self, "{{");
|
||||||
|
f(self);
|
||||||
|
for _ in 0..4 {
|
||||||
|
self.result.pop();
|
||||||
|
self.ident.pop();
|
||||||
|
}
|
||||||
|
wln!(self, "}}");
|
||||||
|
}
|
||||||
|
|
||||||
|
fn new(body: &'a MirBody, hir_body: &'a Body, db: &'a dyn HirDatabase) -> Self {
|
||||||
|
let local_to_binding = body.binding_locals.iter().map(|(x, y)| (*y, x)).collect();
|
||||||
|
MirPrettyCtx {
|
||||||
|
body,
|
||||||
|
db,
|
||||||
|
result: String::new(),
|
||||||
|
ident: String::new(),
|
||||||
|
local_to_binding,
|
||||||
|
hir_body,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_line(&mut self) {
|
||||||
|
self.result.push('\n');
|
||||||
|
self.result += &self.ident;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write(&mut self, line: &str) {
|
||||||
|
self.result += line;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn locals(&mut self) {
|
||||||
|
for (id, local) in self.body.locals.iter() {
|
||||||
|
wln!(self, "let {}: {};", self.local_name(id), local.ty.display(self.db));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn local_name(&self, local: LocalId) -> LocalName {
|
||||||
|
match self.local_to_binding.get(local) {
|
||||||
|
Some(b) => LocalName::Binding(self.hir_body.bindings[*b].name.clone(), local),
|
||||||
|
None => LocalName::Unknown(local),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn basic_block_id(&self, basic_block_id: BasicBlockId) -> String {
|
||||||
|
format!("'bb{}", u32::from(basic_block_id.into_raw()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn blocks(&mut self) {
|
||||||
|
for (id, block) in self.body.basic_blocks.iter() {
|
||||||
|
wln!(self);
|
||||||
|
w!(self, "{}: ", self.basic_block_id(id));
|
||||||
|
self.with_block(|this| {
|
||||||
|
for statement in &block.statements {
|
||||||
|
match &statement.kind {
|
||||||
|
StatementKind::Assign(l, r) => {
|
||||||
|
this.place(l);
|
||||||
|
w!(this, " = ");
|
||||||
|
this.rvalue(r);
|
||||||
|
wln!(this, ";");
|
||||||
|
}
|
||||||
|
StatementKind::StorageDead(p) => {
|
||||||
|
wln!(this, "StorageDead({})", this.local_name(*p));
|
||||||
|
}
|
||||||
|
StatementKind::StorageLive(p) => {
|
||||||
|
wln!(this, "StorageLive({})", this.local_name(*p));
|
||||||
|
}
|
||||||
|
StatementKind::Deinit(p) => {
|
||||||
|
w!(this, "Deinit(");
|
||||||
|
this.place(p);
|
||||||
|
wln!(this, ");");
|
||||||
|
}
|
||||||
|
StatementKind::Nop => wln!(this, "Nop;"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
match &block.terminator {
|
||||||
|
Some(terminator) => match terminator {
|
||||||
|
Terminator::Goto { target } => {
|
||||||
|
wln!(this, "goto 'bb{};", u32::from(target.into_raw()))
|
||||||
|
}
|
||||||
|
Terminator::SwitchInt { discr, targets } => {
|
||||||
|
w!(this, "switch ");
|
||||||
|
this.operand(discr);
|
||||||
|
w!(this, " ");
|
||||||
|
this.with_block(|this| {
|
||||||
|
for (c, b) in targets.iter() {
|
||||||
|
wln!(this, "{c} => {},", this.basic_block_id(b));
|
||||||
|
}
|
||||||
|
wln!(this, "_ => {},", this.basic_block_id(targets.otherwise()));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Terminator::Call { func, args, destination, target, .. } => {
|
||||||
|
w!(this, "Call ");
|
||||||
|
this.with_block(|this| {
|
||||||
|
w!(this, "func: ");
|
||||||
|
this.operand(func);
|
||||||
|
wln!(this, ",");
|
||||||
|
w!(this, "args: [");
|
||||||
|
this.operand_list(args);
|
||||||
|
wln!(this, "],");
|
||||||
|
w!(this, "destination: ");
|
||||||
|
this.place(destination);
|
||||||
|
wln!(this, ",");
|
||||||
|
w!(this, "target: ");
|
||||||
|
match target {
|
||||||
|
Some(t) => w!(this, "{}", this.basic_block_id(*t)),
|
||||||
|
None => w!(this, "<unreachable>"),
|
||||||
|
}
|
||||||
|
wln!(this, ",");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
_ => wln!(this, "{:?};", terminator),
|
||||||
|
},
|
||||||
|
None => wln!(this, "<no-terminator>;"),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn place(&mut self, p: &Place) {
|
||||||
|
fn f(this: &mut MirPrettyCtx<'_>, local: LocalId, projections: &[PlaceElem]) {
|
||||||
|
let Some((last, head)) = projections.split_last() else {
|
||||||
|
// no projection
|
||||||
|
w!(this, "{}", this.local_name(local));
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
match last {
|
||||||
|
ProjectionElem::Deref => {
|
||||||
|
w!(this, "(*");
|
||||||
|
f(this, local, head);
|
||||||
|
w!(this, ")");
|
||||||
|
}
|
||||||
|
ProjectionElem::Field(field) => {
|
||||||
|
let variant_data = field.parent.variant_data(this.db.upcast());
|
||||||
|
let name = &variant_data.fields()[field.local_id].name;
|
||||||
|
match field.parent {
|
||||||
|
hir_def::VariantId::EnumVariantId(e) => {
|
||||||
|
w!(this, "(");
|
||||||
|
f(this, local, head);
|
||||||
|
let variant_name =
|
||||||
|
&this.db.enum_data(e.parent).variants[e.local_id].name;
|
||||||
|
w!(this, " as {}).{}", variant_name, name);
|
||||||
|
}
|
||||||
|
hir_def::VariantId::StructId(_) | hir_def::VariantId::UnionId(_) => {
|
||||||
|
f(this, local, head);
|
||||||
|
w!(this, ".{name}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ProjectionElem::TupleField(x) => {
|
||||||
|
f(this, local, head);
|
||||||
|
w!(this, ".{}", x);
|
||||||
|
}
|
||||||
|
ProjectionElem::Index(l) => {
|
||||||
|
f(this, local, head);
|
||||||
|
w!(this, "[{}]", this.local_name(*l));
|
||||||
|
}
|
||||||
|
x => {
|
||||||
|
f(this, local, head);
|
||||||
|
w!(this, ".{:?}", x);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
f(self, p.local, &p.projection);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn operand(&mut self, r: &Operand) {
|
||||||
|
match r {
|
||||||
|
Operand::Copy(p) | Operand::Move(p) => {
|
||||||
|
// MIR at the time of writing doesn't have difference between move and copy, so we show them
|
||||||
|
// equally. Feel free to change it.
|
||||||
|
self.place(p);
|
||||||
|
}
|
||||||
|
Operand::Constant(c) => w!(self, "Const({})", c.display(self.db)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rvalue(&mut self, r: &Rvalue) {
|
||||||
|
match r {
|
||||||
|
Rvalue::Use(op) => self.operand(op),
|
||||||
|
Rvalue::Ref(r, p) => {
|
||||||
|
match r {
|
||||||
|
BorrowKind::Shared => w!(self, "&"),
|
||||||
|
BorrowKind::Shallow => w!(self, "&shallow "),
|
||||||
|
BorrowKind::Unique => w!(self, "&uniq "),
|
||||||
|
BorrowKind::Mut { .. } => w!(self, "&mut "),
|
||||||
|
}
|
||||||
|
self.place(p);
|
||||||
|
}
|
||||||
|
Rvalue::Aggregate(AggregateKind::Tuple(_), x) => {
|
||||||
|
w!(self, "(");
|
||||||
|
self.operand_list(x);
|
||||||
|
w!(self, ")");
|
||||||
|
}
|
||||||
|
Rvalue::Aggregate(AggregateKind::Array(_), x) => {
|
||||||
|
w!(self, "[");
|
||||||
|
self.operand_list(x);
|
||||||
|
w!(self, "]");
|
||||||
|
}
|
||||||
|
Rvalue::Aggregate(AggregateKind::Adt(_, _), x) => {
|
||||||
|
w!(self, "Adt(");
|
||||||
|
self.operand_list(x);
|
||||||
|
w!(self, ")");
|
||||||
|
}
|
||||||
|
Rvalue::Aggregate(AggregateKind::Union(_, _), x) => {
|
||||||
|
w!(self, "Union(");
|
||||||
|
self.operand_list(x);
|
||||||
|
w!(self, ")");
|
||||||
|
}
|
||||||
|
Rvalue::Len(p) => {
|
||||||
|
w!(self, "Len(");
|
||||||
|
self.place(p);
|
||||||
|
w!(self, ")");
|
||||||
|
}
|
||||||
|
Rvalue::Cast(ck, op, ty) => {
|
||||||
|
w!(self, "Discriminant({ck:?}");
|
||||||
|
self.operand(op);
|
||||||
|
w!(self, "{})", ty.display(self.db));
|
||||||
|
}
|
||||||
|
Rvalue::CheckedBinaryOp(b, o1, o2) => {
|
||||||
|
self.operand(o1);
|
||||||
|
w!(self, " {b} ");
|
||||||
|
self.operand(o2);
|
||||||
|
}
|
||||||
|
Rvalue::UnaryOp(u, o) => {
|
||||||
|
let u = match u {
|
||||||
|
UnOp::Not => "!",
|
||||||
|
UnOp::Neg => "-",
|
||||||
|
};
|
||||||
|
w!(self, "{u} ");
|
||||||
|
self.operand(o);
|
||||||
|
}
|
||||||
|
Rvalue::Discriminant(p) => {
|
||||||
|
w!(self, "Discriminant(");
|
||||||
|
self.place(p);
|
||||||
|
w!(self, ")");
|
||||||
|
}
|
||||||
|
Rvalue::ShallowInitBox(op, _) => {
|
||||||
|
w!(self, "ShallowInitBox(");
|
||||||
|
self.operand(op);
|
||||||
|
w!(self, ")");
|
||||||
|
}
|
||||||
|
Rvalue::CopyForDeref(p) => {
|
||||||
|
w!(self, "CopyForDeref(");
|
||||||
|
self.place(p);
|
||||||
|
w!(self, ")");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn operand_list(&mut self, x: &[Operand]) {
|
||||||
|
let mut it = x.iter();
|
||||||
|
if let Some(first) = it.next() {
|
||||||
|
self.operand(first);
|
||||||
|
for op in it {
|
||||||
|
w!(self, ", ");
|
||||||
|
self.operand(op);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -61,22 +61,27 @@ fn setup_tracing() -> Option<tracing::subscriber::DefaultGuard> {
|
||||||
Some(tracing::subscriber::set_default(subscriber))
|
Some(tracing::subscriber::set_default(subscriber))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
fn check_types(ra_fixture: &str) {
|
fn check_types(ra_fixture: &str) {
|
||||||
check_impl(ra_fixture, false, true, false)
|
check_impl(ra_fixture, false, true, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
fn check_types_source_code(ra_fixture: &str) {
|
fn check_types_source_code(ra_fixture: &str) {
|
||||||
check_impl(ra_fixture, false, true, true)
|
check_impl(ra_fixture, false, true, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
fn check_no_mismatches(ra_fixture: &str) {
|
fn check_no_mismatches(ra_fixture: &str) {
|
||||||
check_impl(ra_fixture, true, false, false)
|
check_impl(ra_fixture, true, false, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
fn check(ra_fixture: &str) {
|
fn check(ra_fixture: &str) {
|
||||||
check_impl(ra_fixture, false, false, false)
|
check_impl(ra_fixture, false, false, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_source: bool) {
|
fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_source: bool) {
|
||||||
let _tracing = setup_tracing();
|
let _tracing = setup_tracing();
|
||||||
let (db, files) = TestDB::with_many_files(ra_fixture);
|
let (db, files) = TestDB::with_many_files(ra_fixture);
|
||||||
|
@ -158,7 +163,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
|
||||||
} else {
|
} else {
|
||||||
ty.display_test(&db).to_string()
|
ty.display_test(&db).to_string()
|
||||||
};
|
};
|
||||||
assert_eq!(actual, expected);
|
assert_eq!(actual, expected, "type annotation differs at {:#?}", range.range);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -174,7 +179,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
|
||||||
} else {
|
} else {
|
||||||
ty.display_test(&db).to_string()
|
ty.display_test(&db).to_string()
|
||||||
};
|
};
|
||||||
assert_eq!(actual, expected);
|
assert_eq!(actual, expected, "type annotation differs at {:#?}", range.range);
|
||||||
}
|
}
|
||||||
if let Some(expected) = adjustments.remove(&range) {
|
if let Some(expected) = adjustments.remove(&range) {
|
||||||
let adjustments = inference_result
|
let adjustments = inference_result
|
||||||
|
@ -191,30 +196,11 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (pat, mismatch) in inference_result.pat_type_mismatches() {
|
for (expr_or_pat, mismatch) in inference_result.type_mismatches() {
|
||||||
let node = match pat_node(&body_source_map, pat, &db) {
|
let Some(node) = (match expr_or_pat {
|
||||||
Some(value) => value,
|
hir_def::expr::ExprOrPatId::ExprId(expr) => expr_node(&body_source_map, expr, &db),
|
||||||
None => continue,
|
hir_def::expr::ExprOrPatId::PatId(pat) => pat_node(&body_source_map, pat, &db),
|
||||||
};
|
}) else { continue; };
|
||||||
let range = node.as_ref().original_file_range(&db);
|
|
||||||
let actual = format!(
|
|
||||||
"expected {}, got {}",
|
|
||||||
mismatch.expected.display_test(&db),
|
|
||||||
mismatch.actual.display_test(&db)
|
|
||||||
);
|
|
||||||
match mismatches.remove(&range) {
|
|
||||||
Some(annotation) => assert_eq!(actual, annotation),
|
|
||||||
None => format_to!(unexpected_type_mismatches, "{:?}: {}\n", range.range, actual),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (expr, mismatch) in inference_result.expr_type_mismatches() {
|
|
||||||
let node = match body_source_map.expr_syntax(expr) {
|
|
||||||
Ok(sp) => {
|
|
||||||
let root = db.parse_or_expand(sp.file_id).unwrap();
|
|
||||||
sp.map(|ptr| ptr.to_node(&root).syntax().clone())
|
|
||||||
}
|
|
||||||
Err(SyntheticSyntax) => continue,
|
|
||||||
};
|
|
||||||
let range = node.as_ref().original_file_range(&db);
|
let range = node.as_ref().original_file_range(&db);
|
||||||
let actual = format!(
|
let actual = format!(
|
||||||
"expected {}, got {}",
|
"expected {}, got {}",
|
||||||
|
|
|
@ -258,6 +258,7 @@ fn test() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn coerce_autoderef_block() {
|
fn coerce_autoderef_block() {
|
||||||
|
// FIXME: We should know mutability in overloaded deref
|
||||||
check_no_mismatches(
|
check_no_mismatches(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: deref
|
//- minicore: deref
|
||||||
|
@ -267,7 +268,7 @@ fn takes_ref_str(x: &str) {}
|
||||||
fn returns_string() -> String { loop {} }
|
fn returns_string() -> String { loop {} }
|
||||||
fn test() {
|
fn test() {
|
||||||
takes_ref_str(&{ returns_string() });
|
takes_ref_str(&{ returns_string() });
|
||||||
// ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Not))), Borrow(Ref(Not))
|
// ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(None))), Borrow(Ref(Not))
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
|
|
|
@ -73,3 +73,24 @@ fn test(x: bool) -> &'static str {
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn non_unit_block_expr_stmt_no_semi() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
fn test(x: bool) {
|
||||||
|
if x {
|
||||||
|
"notok"
|
||||||
|
//^^^^^^^ expected (), got &str
|
||||||
|
} else {
|
||||||
|
"ok"
|
||||||
|
//^^^^ expected (), got &str
|
||||||
|
}
|
||||||
|
match x { true => true, false => 0 }
|
||||||
|
//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected (), got bool
|
||||||
|
//^ expected bool, got i32
|
||||||
|
()
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -1167,7 +1167,6 @@ fn test() {
|
||||||
123..167 '{ ...o(); }': ()
|
123..167 '{ ...o(); }': ()
|
||||||
133..134 's': &S
|
133..134 's': &S
|
||||||
137..151 'unsafe { f() }': &S
|
137..151 'unsafe { f() }': &S
|
||||||
137..151 'unsafe { f() }': &S
|
|
||||||
146..147 'f': fn f() -> &S
|
146..147 'f': fn f() -> &S
|
||||||
146..149 'f()': &S
|
146..149 'f()': &S
|
||||||
157..158 's': &S
|
157..158 's': &S
|
||||||
|
@ -1253,6 +1252,7 @@ fn foo<T: Trait>(a: &T) {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn autoderef_visibility_field() {
|
fn autoderef_visibility_field() {
|
||||||
|
// FIXME: We should know mutability in overloaded deref
|
||||||
check(
|
check(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: deref
|
//- minicore: deref
|
||||||
|
@ -1274,7 +1274,7 @@ mod a {
|
||||||
mod b {
|
mod b {
|
||||||
fn foo() {
|
fn foo() {
|
||||||
let x = super::a::Bar::new().0;
|
let x = super::a::Bar::new().0;
|
||||||
// ^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Not)))
|
// ^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(None)))
|
||||||
// ^^^^^^^^^^^^^^^^^^^^^^ type: char
|
// ^^^^^^^^^^^^^^^^^^^^^^ type: char
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -476,7 +476,7 @@ fn infer_adt_pattern() {
|
||||||
183..184 'x': usize
|
183..184 'x': usize
|
||||||
190..191 'x': usize
|
190..191 'x': usize
|
||||||
201..205 'E::B': E
|
201..205 'E::B': E
|
||||||
209..212 'foo': bool
|
209..212 'foo': {unknown}
|
||||||
216..217 '1': usize
|
216..217 '1': usize
|
||||||
227..231 'E::B': E
|
227..231 'E::B': E
|
||||||
235..237 '10': usize
|
235..237 '10': usize
|
||||||
|
@ -953,9 +953,9 @@ fn main() {
|
||||||
42..51 'true | ()': bool
|
42..51 'true | ()': bool
|
||||||
49..51 '()': ()
|
49..51 '()': ()
|
||||||
57..59 '{}': ()
|
57..59 '{}': ()
|
||||||
68..80 '(() | true,)': ((),)
|
68..80 '(() | true,)': (bool,)
|
||||||
69..71 '()': ()
|
69..71 '()': ()
|
||||||
69..78 '() | true': ()
|
69..78 '() | true': bool
|
||||||
74..78 'true': bool
|
74..78 'true': bool
|
||||||
74..78 'true': bool
|
74..78 'true': bool
|
||||||
84..86 '{}': ()
|
84..86 '{}': ()
|
||||||
|
@ -964,19 +964,15 @@ fn main() {
|
||||||
96..102 '_ | ()': bool
|
96..102 '_ | ()': bool
|
||||||
100..102 '()': ()
|
100..102 '()': ()
|
||||||
108..110 '{}': ()
|
108..110 '{}': ()
|
||||||
119..128 '(() | _,)': ((),)
|
119..128 '(() | _,)': (bool,)
|
||||||
120..122 '()': ()
|
120..122 '()': ()
|
||||||
120..126 '() | _': ()
|
120..126 '() | _': bool
|
||||||
125..126 '_': bool
|
125..126 '_': bool
|
||||||
132..134 '{}': ()
|
132..134 '{}': ()
|
||||||
49..51: expected bool, got ()
|
49..51: expected bool, got ()
|
||||||
68..80: expected (bool,), got ((),)
|
|
||||||
69..71: expected bool, got ()
|
69..71: expected bool, got ()
|
||||||
69..78: expected bool, got ()
|
|
||||||
100..102: expected bool, got ()
|
100..102: expected bool, got ()
|
||||||
119..128: expected (bool,), got ((),)
|
|
||||||
120..122: expected bool, got ()
|
120..122: expected bool, got ()
|
||||||
120..126: expected bool, got ()
|
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -1092,3 +1088,19 @@ fn my_fn(foo: ...) {}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ref_pat_mutability() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
fn foo() {
|
||||||
|
let &() = &();
|
||||||
|
let &mut () = &mut ();
|
||||||
|
let &mut () = &();
|
||||||
|
//^^^^^^^ expected &(), got &mut ()
|
||||||
|
let &() = &mut ();
|
||||||
|
//^^^ expected &mut (), got &()
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -270,7 +270,7 @@ fn infer_std_crash_5() {
|
||||||
61..320 '{ ... }': ()
|
61..320 '{ ... }': ()
|
||||||
75..79 'name': &{unknown}
|
75..79 'name': &{unknown}
|
||||||
82..166 'if doe... }': &{unknown}
|
82..166 'if doe... }': &{unknown}
|
||||||
85..98 'doesnt_matter': bool
|
85..98 'doesnt_matter': {unknown}
|
||||||
99..128 '{ ... }': &{unknown}
|
99..128 '{ ... }': &{unknown}
|
||||||
113..118 'first': &{unknown}
|
113..118 'first': &{unknown}
|
||||||
134..166 '{ ... }': &{unknown}
|
134..166 '{ ... }': &{unknown}
|
||||||
|
@ -279,7 +279,7 @@ fn infer_std_crash_5() {
|
||||||
181..188 'content': &{unknown}
|
181..188 'content': &{unknown}
|
||||||
191..313 'if ICE... }': &{unknown}
|
191..313 'if ICE... }': &{unknown}
|
||||||
194..231 'ICE_RE..._VALUE': {unknown}
|
194..231 'ICE_RE..._VALUE': {unknown}
|
||||||
194..247 'ICE_RE...&name)': bool
|
194..247 'ICE_RE...&name)': {unknown}
|
||||||
241..246 '&name': &&{unknown}
|
241..246 '&name': &&{unknown}
|
||||||
242..246 'name': &{unknown}
|
242..246 'name': &{unknown}
|
||||||
248..276 '{ ... }': &{unknown}
|
248..276 '{ ... }': &{unknown}
|
||||||
|
@ -1015,9 +1015,9 @@ fn cfg_tail() {
|
||||||
20..31 '{ "first" }': ()
|
20..31 '{ "first" }': ()
|
||||||
22..29 '"first"': &str
|
22..29 '"first"': &str
|
||||||
72..190 '{ ...] 13 }': ()
|
72..190 '{ ...] 13 }': ()
|
||||||
78..88 '{ "fake" }': &str
|
78..88 '{ "fake" }': ()
|
||||||
80..86 '"fake"': &str
|
80..86 '"fake"': &str
|
||||||
93..103 '{ "fake" }': &str
|
93..103 '{ "fake" }': ()
|
||||||
95..101 '"fake"': &str
|
95..101 '"fake"': &str
|
||||||
108..120 '{ "second" }': ()
|
108..120 '{ "second" }': ()
|
||||||
110..118 '"second"': &str
|
110..118 '"second"': &str
|
||||||
|
@ -1744,3 +1744,15 @@ fn foo(b: Bar) {
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn regression_14305() {
|
||||||
|
check_no_mismatches(
|
||||||
|
r#"
|
||||||
|
//- minicore: add
|
||||||
|
trait Tr {}
|
||||||
|
impl Tr for [u8; C] {}
|
||||||
|
const C: usize = 2 + 2;
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -352,7 +352,6 @@ unsafe fn baz(u: MyUnion) {
|
||||||
71..89 'MyUnio...o: 0 }': MyUnion
|
71..89 'MyUnio...o: 0 }': MyUnion
|
||||||
86..87 '0': u32
|
86..87 '0': u32
|
||||||
95..113 'unsafe...(u); }': ()
|
95..113 'unsafe...(u); }': ()
|
||||||
95..113 'unsafe...(u); }': ()
|
|
||||||
104..107 'baz': fn baz(MyUnion)
|
104..107 'baz': fn baz(MyUnion)
|
||||||
104..110 'baz(u)': ()
|
104..110 'baz(u)': ()
|
||||||
108..109 'u': MyUnion
|
108..109 'u': MyUnion
|
||||||
|
@ -360,7 +359,6 @@ unsafe fn baz(u: MyUnion) {
|
||||||
126..146 'MyUnio... 0.0 }': MyUnion
|
126..146 'MyUnio... 0.0 }': MyUnion
|
||||||
141..144 '0.0': f32
|
141..144 '0.0': f32
|
||||||
152..170 'unsafe...(u); }': ()
|
152..170 'unsafe...(u); }': ()
|
||||||
152..170 'unsafe...(u); }': ()
|
|
||||||
161..164 'baz': fn baz(MyUnion)
|
161..164 'baz': fn baz(MyUnion)
|
||||||
161..167 'baz(u)': ()
|
161..167 'baz(u)': ()
|
||||||
165..166 'u': MyUnion
|
165..166 'u': MyUnion
|
||||||
|
@ -2077,22 +2075,17 @@ async fn main() {
|
||||||
16..193 '{ ...2 }; }': ()
|
16..193 '{ ...2 }; }': ()
|
||||||
26..27 'x': i32
|
26..27 'x': i32
|
||||||
30..43 'unsafe { 92 }': i32
|
30..43 'unsafe { 92 }': i32
|
||||||
30..43 'unsafe { 92 }': i32
|
|
||||||
39..41 '92': i32
|
39..41 '92': i32
|
||||||
53..54 'y': impl Future<Output = ()>
|
53..54 'y': impl Future<Output = ()>
|
||||||
57..85 'async ...wait }': ()
|
|
||||||
57..85 'async ...wait }': impl Future<Output = ()>
|
57..85 'async ...wait }': impl Future<Output = ()>
|
||||||
65..77 'async { () }': ()
|
|
||||||
65..77 'async { () }': impl Future<Output = ()>
|
65..77 'async { () }': impl Future<Output = ()>
|
||||||
65..83 'async ....await': ()
|
65..83 'async ....await': ()
|
||||||
73..75 '()': ()
|
73..75 '()': ()
|
||||||
95..96 'z': ControlFlow<(), ()>
|
95..96 'z': ControlFlow<(), ()>
|
||||||
130..140 'try { () }': ()
|
|
||||||
130..140 'try { () }': ControlFlow<(), ()>
|
130..140 'try { () }': ControlFlow<(), ()>
|
||||||
136..138 '()': ()
|
136..138 '()': ()
|
||||||
150..151 'w': i32
|
150..151 'w': i32
|
||||||
154..166 'const { 92 }': i32
|
154..166 'const { 92 }': i32
|
||||||
154..166 'const { 92 }': i32
|
|
||||||
162..164 '92': i32
|
162..164 '92': i32
|
||||||
176..177 't': i32
|
176..177 't': i32
|
||||||
180..190 ''a: { 92 }': i32
|
180..190 ''a: { 92 }': i32
|
||||||
|
@ -2122,7 +2115,6 @@ fn main() {
|
||||||
83..84 'f': F
|
83..84 'f': F
|
||||||
89..91 '{}': ()
|
89..91 '{}': ()
|
||||||
103..231 '{ ... }); }': ()
|
103..231 '{ ... }); }': ()
|
||||||
109..161 'async ... }': Result<(), ()>
|
|
||||||
109..161 'async ... }': impl Future<Output = Result<(), ()>>
|
109..161 'async ... }': impl Future<Output = Result<(), ()>>
|
||||||
125..139 'return Err(())': !
|
125..139 'return Err(())': !
|
||||||
132..135 'Err': Err<(), ()>(()) -> Result<(), ()>
|
132..135 'Err': Err<(), ()>(()) -> Result<(), ()>
|
||||||
|
@ -2134,7 +2126,6 @@ fn main() {
|
||||||
167..171 'test': fn test<(), (), || -> impl Future<Output = Result<(), ()>>, impl Future<Output = Result<(), ()>>>(|| -> impl Future<Output = Result<(), ()>>)
|
167..171 'test': fn test<(), (), || -> impl Future<Output = Result<(), ()>>, impl Future<Output = Result<(), ()>>>(|| -> impl Future<Output = Result<(), ()>>)
|
||||||
167..228 'test(|... })': ()
|
167..228 'test(|... })': ()
|
||||||
172..227 '|| asy... }': || -> impl Future<Output = Result<(), ()>>
|
172..227 '|| asy... }': || -> impl Future<Output = Result<(), ()>>
|
||||||
175..227 'async ... }': Result<(), ()>
|
|
||||||
175..227 'async ... }': impl Future<Output = Result<(), ()>>
|
175..227 'async ... }': impl Future<Output = Result<(), ()>>
|
||||||
191..205 'return Err(())': !
|
191..205 'return Err(())': !
|
||||||
198..201 'Err': Err<(), ()>(()) -> Result<(), ()>
|
198..201 'Err': Err<(), ()>(()) -> Result<(), ()>
|
||||||
|
@ -3283,3 +3274,18 @@ fn func() {
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn issue_14275() {
|
||||||
|
// FIXME: evaluate const generic
|
||||||
|
check_types(
|
||||||
|
r#"
|
||||||
|
struct Foo<const T: bool>;
|
||||||
|
fn main() {
|
||||||
|
const B: bool = false;
|
||||||
|
let foo = Foo::<B>;
|
||||||
|
//^^^ Foo<_>
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -5,6 +5,7 @@ use std::iter;
|
||||||
|
|
||||||
use base_db::CrateId;
|
use base_db::CrateId;
|
||||||
use chalk_ir::{cast::Cast, fold::Shift, BoundVar, DebruijnIndex};
|
use chalk_ir::{cast::Cast, fold::Shift, BoundVar, DebruijnIndex};
|
||||||
|
use either::Either;
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
db::DefDatabase,
|
db::DefDatabase,
|
||||||
generics::{
|
generics::{
|
||||||
|
@ -19,7 +20,6 @@ use hir_def::{
|
||||||
};
|
};
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
use itertools::Either;
|
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
|
|
||||||
|
@ -315,7 +315,10 @@ fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option<Generic
|
||||||
GenericDefId::TypeAliasId(it) => it.lookup(db).container,
|
GenericDefId::TypeAliasId(it) => it.lookup(db).container,
|
||||||
GenericDefId::ConstId(it) => it.lookup(db).container,
|
GenericDefId::ConstId(it) => it.lookup(db).container,
|
||||||
GenericDefId::EnumVariantId(it) => return Some(it.parent.into()),
|
GenericDefId::EnumVariantId(it) => return Some(it.parent.into()),
|
||||||
GenericDefId::AdtId(_) | GenericDefId::TraitId(_) | GenericDefId::ImplId(_) => return None,
|
GenericDefId::AdtId(_)
|
||||||
|
| GenericDefId::TraitId(_)
|
||||||
|
| GenericDefId::ImplId(_)
|
||||||
|
| GenericDefId::TraitAliasId(_) => return None,
|
||||||
};
|
};
|
||||||
|
|
||||||
match container {
|
match container {
|
||||||
|
|
|
@ -14,7 +14,8 @@ use syntax::{ast, AstNode};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
Adt, AssocItem, Const, ConstParam, Enum, Field, Function, GenericParam, Impl, LifetimeParam,
|
Adt, AssocItem, Const, ConstParam, Enum, Field, Function, GenericParam, Impl, LifetimeParam,
|
||||||
Macro, Module, ModuleDef, Static, Struct, Trait, TypeAlias, TypeParam, Union, Variant,
|
Macro, Module, ModuleDef, Static, Struct, Trait, TraitAlias, TypeAlias, TypeParam, Union,
|
||||||
|
Variant,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub trait HasAttrs {
|
pub trait HasAttrs {
|
||||||
|
@ -60,6 +61,7 @@ impl_has_attrs![
|
||||||
(Static, StaticId),
|
(Static, StaticId),
|
||||||
(Const, ConstId),
|
(Const, ConstId),
|
||||||
(Trait, TraitId),
|
(Trait, TraitId),
|
||||||
|
(TraitAlias, TraitAliasId),
|
||||||
(TypeAlias, TypeAliasId),
|
(TypeAlias, TypeAliasId),
|
||||||
(Macro, MacroId),
|
(Macro, MacroId),
|
||||||
(Function, FunctionId),
|
(Function, FunctionId),
|
||||||
|
@ -134,6 +136,7 @@ fn resolve_doc_path(
|
||||||
AttrDefId::StaticId(it) => it.resolver(db.upcast()),
|
AttrDefId::StaticId(it) => it.resolver(db.upcast()),
|
||||||
AttrDefId::ConstId(it) => it.resolver(db.upcast()),
|
AttrDefId::ConstId(it) => it.resolver(db.upcast()),
|
||||||
AttrDefId::TraitId(it) => it.resolver(db.upcast()),
|
AttrDefId::TraitId(it) => it.resolver(db.upcast()),
|
||||||
|
AttrDefId::TraitAliasId(it) => it.resolver(db.upcast()),
|
||||||
AttrDefId::TypeAliasId(it) => it.resolver(db.upcast()),
|
AttrDefId::TypeAliasId(it) => it.resolver(db.upcast()),
|
||||||
AttrDefId::ImplId(it) => it.resolver(db.upcast()),
|
AttrDefId::ImplId(it) => it.resolver(db.upcast()),
|
||||||
AttrDefId::ExternBlockId(it) => it.resolver(db.upcast()),
|
AttrDefId::ExternBlockId(it) => it.resolver(db.upcast()),
|
||||||
|
|
|
@ -5,8 +5,9 @@
|
||||||
//! But we need this for at least LRU caching at the query level.
|
//! But we need this for at least LRU caching at the query level.
|
||||||
pub use hir_def::db::*;
|
pub use hir_def::db::*;
|
||||||
pub use hir_expand::db::{
|
pub use hir_expand::db::{
|
||||||
AstDatabase, AstDatabaseStorage, AstIdMapQuery, HygieneFrameQuery, InternMacroCallQuery,
|
AstDatabase, AstDatabaseStorage, AstIdMapQuery, ExpandProcMacroQuery, HygieneFrameQuery,
|
||||||
MacroArgTextQuery, MacroDefQuery, MacroExpandQuery, ParseMacroExpansionQuery,
|
InternMacroCallQuery, MacroArgTextQuery, MacroDefQuery, MacroExpandErrorQuery,
|
||||||
|
MacroExpandQuery, ParseMacroExpansionQuery,
|
||||||
};
|
};
|
||||||
pub use hir_ty::db::*;
|
pub use hir_ty::db::*;
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@ use hir_def::path::ModPath;
|
||||||
use hir_expand::{name::Name, HirFileId, InFile};
|
use hir_expand::{name::Name, HirFileId, InFile};
|
||||||
use syntax::{ast, AstPtr, SyntaxNodePtr, TextRange};
|
use syntax::{ast, AstPtr, SyntaxNodePtr, TextRange};
|
||||||
|
|
||||||
use crate::{AssocItem, Field, MacroKind, Type};
|
use crate::{AssocItem, Field, Local, MacroKind, Type};
|
||||||
|
|
||||||
macro_rules! diagnostics {
|
macro_rules! diagnostics {
|
||||||
($($diag:ident,)*) => {
|
($($diag:ident,)*) => {
|
||||||
|
@ -31,6 +31,7 @@ macro_rules! diagnostics {
|
||||||
|
|
||||||
diagnostics![
|
diagnostics![
|
||||||
BreakOutsideOfLoop,
|
BreakOutsideOfLoop,
|
||||||
|
ExpectedFunction,
|
||||||
InactiveCode,
|
InactiveCode,
|
||||||
IncorrectCase,
|
IncorrectCase,
|
||||||
InvalidDeriveTarget,
|
InvalidDeriveTarget,
|
||||||
|
@ -40,6 +41,7 @@ diagnostics![
|
||||||
MissingFields,
|
MissingFields,
|
||||||
MissingMatchArms,
|
MissingMatchArms,
|
||||||
MissingUnsafe,
|
MissingUnsafe,
|
||||||
|
NeedMut,
|
||||||
NoSuchField,
|
NoSuchField,
|
||||||
PrivateAssocItem,
|
PrivateAssocItem,
|
||||||
PrivateField,
|
PrivateField,
|
||||||
|
@ -47,10 +49,13 @@ diagnostics![
|
||||||
TypeMismatch,
|
TypeMismatch,
|
||||||
UnimplementedBuiltinMacro,
|
UnimplementedBuiltinMacro,
|
||||||
UnresolvedExternCrate,
|
UnresolvedExternCrate,
|
||||||
|
UnresolvedField,
|
||||||
UnresolvedImport,
|
UnresolvedImport,
|
||||||
UnresolvedMacroCall,
|
UnresolvedMacroCall,
|
||||||
|
UnresolvedMethodCall,
|
||||||
UnresolvedModule,
|
UnresolvedModule,
|
||||||
UnresolvedProcMacro,
|
UnresolvedProcMacro,
|
||||||
|
UnusedMut,
|
||||||
];
|
];
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -130,6 +135,28 @@ pub struct PrivateAssocItem {
|
||||||
pub item: AssocItem,
|
pub item: AssocItem,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ExpectedFunction {
|
||||||
|
pub call: InFile<AstPtr<ast::Expr>>,
|
||||||
|
pub found: Type,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct UnresolvedField {
|
||||||
|
pub expr: InFile<AstPtr<ast::Expr>>,
|
||||||
|
pub receiver: Type,
|
||||||
|
pub name: Name,
|
||||||
|
pub method_with_same_name_exists: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct UnresolvedMethodCall {
|
||||||
|
pub expr: InFile<AstPtr<ast::Expr>>,
|
||||||
|
pub receiver: Type,
|
||||||
|
pub name: Name,
|
||||||
|
pub field_with_same_name: Option<Type>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct PrivateField {
|
pub struct PrivateField {
|
||||||
pub expr: InFile<AstPtr<ast::Expr>>,
|
pub expr: InFile<AstPtr<ast::Expr>>,
|
||||||
|
@ -140,6 +167,7 @@ pub struct PrivateField {
|
||||||
pub struct BreakOutsideOfLoop {
|
pub struct BreakOutsideOfLoop {
|
||||||
pub expr: InFile<AstPtr<ast::Expr>>,
|
pub expr: InFile<AstPtr<ast::Expr>>,
|
||||||
pub is_break: bool,
|
pub is_break: bool,
|
||||||
|
pub bad_value_break: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -171,17 +199,26 @@ pub struct MismatchedArgCount {
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct MissingMatchArms {
|
pub struct MissingMatchArms {
|
||||||
pub file: HirFileId,
|
pub scrutinee_expr: InFile<AstPtr<ast::Expr>>,
|
||||||
pub match_expr: AstPtr<ast::Expr>,
|
|
||||||
pub uncovered_patterns: String,
|
pub uncovered_patterns: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct TypeMismatch {
|
pub struct TypeMismatch {
|
||||||
// FIXME: add mismatches in patterns as well
|
pub expr_or_pat: Either<InFile<AstPtr<ast::Expr>>, InFile<AstPtr<ast::Pat>>>,
|
||||||
pub expr: InFile<AstPtr<ast::Expr>>,
|
|
||||||
pub expected: Type,
|
pub expected: Type,
|
||||||
pub actual: Type,
|
pub actual: Type,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct NeedMut {
|
||||||
|
pub local: Local,
|
||||||
|
pub span: InFile<SyntaxNodePtr>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct UnusedMut {
|
||||||
|
pub local: Local,
|
||||||
|
}
|
||||||
|
|
||||||
pub use hir_ty::diagnostics::IncorrectCase;
|
pub use hir_ty::diagnostics::IncorrectCase;
|
||||||
|
|
|
@ -17,15 +17,23 @@ use hir_ty::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
Adt, Const, ConstParam, Enum, Field, Function, GenericParam, HasCrate, HasVisibility,
|
Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Enum, Field, Function, GenericParam,
|
||||||
LifetimeParam, Macro, Module, Static, Struct, Trait, TyBuilder, Type, TypeAlias,
|
HasCrate, HasVisibility, LifetimeParam, Macro, Module, Static, Struct, Trait, TraitAlias,
|
||||||
TypeOrConstParam, TypeParam, Union, Variant,
|
TyBuilder, Type, TypeAlias, TypeOrConstParam, TypeParam, Union, Variant,
|
||||||
};
|
};
|
||||||
|
|
||||||
impl HirDisplay for Function {
|
impl HirDisplay for Function {
|
||||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||||
let data = f.db.function_data(self.id);
|
let db = f.db;
|
||||||
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
|
let data = db.function_data(self.id);
|
||||||
|
let container = self.as_assoc_item(db).map(|it| it.container(db));
|
||||||
|
let mut module = self.module(db);
|
||||||
|
if let Some(AssocItemContainer::Impl(_)) = container {
|
||||||
|
// Block-local impls are "hoisted" to the nearest (non-block) module.
|
||||||
|
module = module.nearest_non_block_module(db);
|
||||||
|
}
|
||||||
|
let module_id = module.id;
|
||||||
|
write_visibility(module_id, self.visibility(db), f)?;
|
||||||
if data.has_default_kw() {
|
if data.has_default_kw() {
|
||||||
f.write_str("default ")?;
|
f.write_str("default ")?;
|
||||||
}
|
}
|
||||||
|
@ -35,7 +43,7 @@ impl HirDisplay for Function {
|
||||||
if data.has_async_kw() {
|
if data.has_async_kw() {
|
||||||
f.write_str("async ")?;
|
f.write_str("async ")?;
|
||||||
}
|
}
|
||||||
if self.is_unsafe_to_call(f.db) {
|
if self.is_unsafe_to_call(db) {
|
||||||
f.write_str("unsafe ")?;
|
f.write_str("unsafe ")?;
|
||||||
}
|
}
|
||||||
if let Some(abi) = &data.abi {
|
if let Some(abi) = &data.abi {
|
||||||
|
@ -50,7 +58,7 @@ impl HirDisplay for Function {
|
||||||
|
|
||||||
let write_self_param = |ty: &TypeRef, f: &mut HirFormatter<'_>| match ty {
|
let write_self_param = |ty: &TypeRef, f: &mut HirFormatter<'_>| match ty {
|
||||||
TypeRef::Path(p) if p.is_self_type() => f.write_str("self"),
|
TypeRef::Path(p) if p.is_self_type() => f.write_str("self"),
|
||||||
TypeRef::Reference(inner, lifetime, mut_) if matches!(&**inner,TypeRef::Path(p) if p.is_self_type()) =>
|
TypeRef::Reference(inner, lifetime, mut_) if matches!(&**inner, TypeRef::Path(p) if p.is_self_type()) =>
|
||||||
{
|
{
|
||||||
f.write_char('&')?;
|
f.write_char('&')?;
|
||||||
if let Some(lifetime) = lifetime {
|
if let Some(lifetime) = lifetime {
|
||||||
|
@ -442,8 +450,15 @@ fn write_where_clause(def: GenericDefId, f: &mut HirFormatter<'_>) -> Result<(),
|
||||||
|
|
||||||
impl HirDisplay for Const {
|
impl HirDisplay for Const {
|
||||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||||
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
|
let db = f.db;
|
||||||
let data = f.db.const_data(self.id);
|
let container = self.as_assoc_item(db).map(|it| it.container(db));
|
||||||
|
let mut module = self.module(db);
|
||||||
|
if let Some(AssocItemContainer::Impl(_)) = container {
|
||||||
|
// Block-local impls are "hoisted" to the nearest (non-block) module.
|
||||||
|
module = module.nearest_non_block_module(db);
|
||||||
|
}
|
||||||
|
write_visibility(module.id, self.visibility(db), f)?;
|
||||||
|
let data = db.const_data(self.id);
|
||||||
f.write_str("const ")?;
|
f.write_str("const ")?;
|
||||||
match &data.name {
|
match &data.name {
|
||||||
Some(name) => write!(f, "{name}: ")?,
|
Some(name) => write!(f, "{name}: ")?,
|
||||||
|
@ -486,6 +501,22 @@ impl HirDisplay for Trait {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl HirDisplay for TraitAlias {
|
||||||
|
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||||
|
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
|
||||||
|
let data = f.db.trait_alias_data(self.id);
|
||||||
|
write!(f, "trait {}", data.name)?;
|
||||||
|
let def_id = GenericDefId::TraitAliasId(self.id);
|
||||||
|
write_generic_params(def_id, f)?;
|
||||||
|
f.write_str(" = ")?;
|
||||||
|
// FIXME: Currently we lower every bounds in a trait alias as a trait bound on `Self` i.e.
|
||||||
|
// `trait Foo = Bar` is stored and displayed as `trait Foo = where Self: Bar`, which might
|
||||||
|
// be less readable.
|
||||||
|
write_where_clause(def_id, f)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl HirDisplay for TypeAlias {
|
impl HirDisplay for TypeAlias {
|
||||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||||
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
|
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
//! are splitting the hir.
|
//! are splitting the hir.
|
||||||
|
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
expr::{LabelId, PatId},
|
expr::{BindingId, LabelId},
|
||||||
AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, GenericDefId, GenericParamId,
|
AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, GenericDefId, GenericParamId,
|
||||||
ModuleDefId, VariantId,
|
ModuleDefId, VariantId,
|
||||||
};
|
};
|
||||||
|
@ -37,6 +37,7 @@ from_id![
|
||||||
(hir_def::EnumId, crate::Enum),
|
(hir_def::EnumId, crate::Enum),
|
||||||
(hir_def::TypeAliasId, crate::TypeAlias),
|
(hir_def::TypeAliasId, crate::TypeAlias),
|
||||||
(hir_def::TraitId, crate::Trait),
|
(hir_def::TraitId, crate::Trait),
|
||||||
|
(hir_def::TraitAliasId, crate::TraitAlias),
|
||||||
(hir_def::StaticId, crate::Static),
|
(hir_def::StaticId, crate::Static),
|
||||||
(hir_def::ConstId, crate::Const),
|
(hir_def::ConstId, crate::Const),
|
||||||
(hir_def::FunctionId, crate::Function),
|
(hir_def::FunctionId, crate::Function),
|
||||||
|
@ -110,6 +111,7 @@ impl From<ModuleDefId> for ModuleDef {
|
||||||
ModuleDefId::ConstId(it) => ModuleDef::Const(it.into()),
|
ModuleDefId::ConstId(it) => ModuleDef::Const(it.into()),
|
||||||
ModuleDefId::StaticId(it) => ModuleDef::Static(it.into()),
|
ModuleDefId::StaticId(it) => ModuleDef::Static(it.into()),
|
||||||
ModuleDefId::TraitId(it) => ModuleDef::Trait(it.into()),
|
ModuleDefId::TraitId(it) => ModuleDef::Trait(it.into()),
|
||||||
|
ModuleDefId::TraitAliasId(it) => ModuleDef::TraitAlias(it.into()),
|
||||||
ModuleDefId::TypeAliasId(it) => ModuleDef::TypeAlias(it.into()),
|
ModuleDefId::TypeAliasId(it) => ModuleDef::TypeAlias(it.into()),
|
||||||
ModuleDefId::BuiltinType(it) => ModuleDef::BuiltinType(it.into()),
|
ModuleDefId::BuiltinType(it) => ModuleDef::BuiltinType(it.into()),
|
||||||
ModuleDefId::MacroId(it) => ModuleDef::Macro(it.into()),
|
ModuleDefId::MacroId(it) => ModuleDef::Macro(it.into()),
|
||||||
|
@ -127,6 +129,7 @@ impl From<ModuleDef> for ModuleDefId {
|
||||||
ModuleDef::Const(it) => ModuleDefId::ConstId(it.into()),
|
ModuleDef::Const(it) => ModuleDefId::ConstId(it.into()),
|
||||||
ModuleDef::Static(it) => ModuleDefId::StaticId(it.into()),
|
ModuleDef::Static(it) => ModuleDefId::StaticId(it.into()),
|
||||||
ModuleDef::Trait(it) => ModuleDefId::TraitId(it.into()),
|
ModuleDef::Trait(it) => ModuleDefId::TraitId(it.into()),
|
||||||
|
ModuleDef::TraitAlias(it) => ModuleDefId::TraitAliasId(it.into()),
|
||||||
ModuleDef::TypeAlias(it) => ModuleDefId::TypeAliasId(it.into()),
|
ModuleDef::TypeAlias(it) => ModuleDefId::TypeAliasId(it.into()),
|
||||||
ModuleDef::BuiltinType(it) => ModuleDefId::BuiltinType(it.into()),
|
ModuleDef::BuiltinType(it) => ModuleDefId::BuiltinType(it.into()),
|
||||||
ModuleDef::Macro(it) => ModuleDefId::MacroId(it.into()),
|
ModuleDef::Macro(it) => ModuleDefId::MacroId(it.into()),
|
||||||
|
@ -172,6 +175,7 @@ impl From<GenericDef> for GenericDefId {
|
||||||
GenericDef::Function(it) => GenericDefId::FunctionId(it.id),
|
GenericDef::Function(it) => GenericDefId::FunctionId(it.id),
|
||||||
GenericDef::Adt(it) => GenericDefId::AdtId(it.into()),
|
GenericDef::Adt(it) => GenericDefId::AdtId(it.into()),
|
||||||
GenericDef::Trait(it) => GenericDefId::TraitId(it.id),
|
GenericDef::Trait(it) => GenericDefId::TraitId(it.id),
|
||||||
|
GenericDef::TraitAlias(it) => GenericDefId::TraitAliasId(it.id),
|
||||||
GenericDef::TypeAlias(it) => GenericDefId::TypeAliasId(it.id),
|
GenericDef::TypeAlias(it) => GenericDefId::TypeAliasId(it.id),
|
||||||
GenericDef::Impl(it) => GenericDefId::ImplId(it.id),
|
GenericDef::Impl(it) => GenericDefId::ImplId(it.id),
|
||||||
GenericDef::Variant(it) => GenericDefId::EnumVariantId(it.into()),
|
GenericDef::Variant(it) => GenericDefId::EnumVariantId(it.into()),
|
||||||
|
@ -186,6 +190,7 @@ impl From<GenericDefId> for GenericDef {
|
||||||
GenericDefId::FunctionId(it) => GenericDef::Function(it.into()),
|
GenericDefId::FunctionId(it) => GenericDef::Function(it.into()),
|
||||||
GenericDefId::AdtId(it) => GenericDef::Adt(it.into()),
|
GenericDefId::AdtId(it) => GenericDef::Adt(it.into()),
|
||||||
GenericDefId::TraitId(it) => GenericDef::Trait(it.into()),
|
GenericDefId::TraitId(it) => GenericDef::Trait(it.into()),
|
||||||
|
GenericDefId::TraitAliasId(it) => GenericDef::TraitAlias(it.into()),
|
||||||
GenericDefId::TypeAliasId(it) => GenericDef::TypeAlias(it.into()),
|
GenericDefId::TypeAliasId(it) => GenericDef::TypeAlias(it.into()),
|
||||||
GenericDefId::ImplId(it) => GenericDef::Impl(it.into()),
|
GenericDefId::ImplId(it) => GenericDef::Impl(it.into()),
|
||||||
GenericDefId::EnumVariantId(it) => GenericDef::Variant(it.into()),
|
GenericDefId::EnumVariantId(it) => GenericDef::Variant(it.into()),
|
||||||
|
@ -246,9 +251,9 @@ impl From<AssocItem> for GenericDefId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<(DefWithBodyId, PatId)> for Local {
|
impl From<(DefWithBodyId, BindingId)> for Local {
|
||||||
fn from((parent, pat_id): (DefWithBodyId, PatId)) -> Self {
|
fn from((parent, binding_id): (DefWithBodyId, BindingId)) -> Self {
|
||||||
Local { parent, pat_id }
|
Local { parent, binding_id }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,8 +10,9 @@ use hir_expand::InFile;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::HirDatabase, Adt, Const, Enum, Field, FieldSource, Function, Impl, LifetimeParam, Macro,
|
db::HirDatabase, Adt, Const, Enum, Field, FieldSource, Function, Impl, LifetimeParam,
|
||||||
Module, Static, Struct, Trait, TypeAlias, TypeOrConstParam, Union, Variant,
|
LocalSource, Macro, Module, Static, Struct, Trait, TraitAlias, TypeAlias, TypeOrConstParam,
|
||||||
|
Union, Variant,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub trait HasSource {
|
pub trait HasSource {
|
||||||
|
@ -122,6 +123,12 @@ impl HasSource for Trait {
|
||||||
Some(self.id.lookup(db.upcast()).source(db.upcast()))
|
Some(self.id.lookup(db.upcast()).source(db.upcast()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
impl HasSource for TraitAlias {
|
||||||
|
type Ast = ast::TraitAlias;
|
||||||
|
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||||
|
Some(self.id.lookup(db.upcast()).source(db.upcast()))
|
||||||
|
}
|
||||||
|
}
|
||||||
impl HasSource for TypeAlias {
|
impl HasSource for TypeAlias {
|
||||||
type Ast = ast::TypeAlias;
|
type Ast = ast::TypeAlias;
|
||||||
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||||
|
@ -158,7 +165,7 @@ impl HasSource for Impl {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HasSource for TypeOrConstParam {
|
impl HasSource for TypeOrConstParam {
|
||||||
type Ast = Either<ast::TypeOrConstParam, ast::Trait>;
|
type Ast = Either<ast::TypeOrConstParam, ast::TraitOrAlias>;
|
||||||
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||||
let child_source = self.id.parent.child_source(db.upcast());
|
let child_source = self.id.parent.child_source(db.upcast());
|
||||||
Some(child_source.map(|it| it[self.id.local_id].clone()))
|
Some(child_source.map(|it| it[self.id.local_id].clone()))
|
||||||
|
@ -172,3 +179,11 @@ impl HasSource for LifetimeParam {
|
||||||
Some(child_source.map(|it| it[self.id.local_id].clone()))
|
Some(child_source.map(|it| it[self.id.local_id].clone()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl HasSource for LocalSource {
|
||||||
|
type Ast = Either<ast::IdentPat, ast::SelfParam>;
|
||||||
|
|
||||||
|
fn source(self, _: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||||
|
Some(self.source)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -41,34 +41,34 @@ use either::Either;
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
adt::VariantData,
|
adt::VariantData,
|
||||||
body::{BodyDiagnostic, SyntheticSyntax},
|
body::{BodyDiagnostic, SyntheticSyntax},
|
||||||
expr::{BindingAnnotation, ExprOrPatId, LabelId, Pat, PatId},
|
expr::{BindingAnnotation, BindingId, ExprOrPatId, LabelId, Pat},
|
||||||
generics::{TypeOrConstParamData, TypeParamProvenance},
|
generics::{LifetimeParamData, TypeOrConstParamData, TypeParamProvenance},
|
||||||
item_tree::ItemTreeNode,
|
item_tree::ItemTreeNode,
|
||||||
lang_item::{LangItem, LangItemTarget},
|
lang_item::{LangItem, LangItemTarget},
|
||||||
layout::{Layout, LayoutError, ReprOptions},
|
layout::{Layout, LayoutError, ReprOptions},
|
||||||
nameres::{self, diagnostics::DefDiagnostic},
|
nameres::{self, diagnostics::DefDiagnostic, ModuleOrigin},
|
||||||
per_ns::PerNs,
|
per_ns::PerNs,
|
||||||
resolver::{HasResolver, Resolver},
|
resolver::{HasResolver, Resolver},
|
||||||
src::HasSource as _,
|
src::HasSource as _,
|
||||||
type_ref::ConstScalar,
|
|
||||||
AdtId, AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, DefWithBodyId, EnumId,
|
AdtId, AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, DefWithBodyId, EnumId,
|
||||||
EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
|
EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
|
||||||
LocalEnumVariantId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId,
|
LocalEnumVariantId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId,
|
||||||
TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
|
TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
|
||||||
};
|
};
|
||||||
use hir_expand::{name::name, MacroCallKind};
|
use hir_expand::{name::name, MacroCallKind};
|
||||||
use hir_ty::{
|
use hir_ty::{
|
||||||
all_super_traits, autoderef,
|
all_super_traits, autoderef,
|
||||||
consteval::{unknown_const_as_generic, ComputedExpr, ConstEvalError, ConstExt},
|
consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt},
|
||||||
diagnostics::BodyValidationDiagnostic,
|
diagnostics::BodyValidationDiagnostic,
|
||||||
|
display::HexifiedConst,
|
||||||
layout::layout_of_ty,
|
layout::layout_of_ty,
|
||||||
method_resolution::{self, TyFingerprint},
|
method_resolution::{self, TyFingerprint},
|
||||||
|
mir::{self, interpret_mir},
|
||||||
primitive::UintTy,
|
primitive::UintTy,
|
||||||
traits::FnTrait,
|
traits::FnTrait,
|
||||||
AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId,
|
AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId,
|
||||||
ConcreteConst, ConstValue, GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar,
|
GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution,
|
||||||
Substitution, TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind,
|
TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, WhereClause,
|
||||||
WhereClause,
|
|
||||||
};
|
};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use nameres::diagnostics::DefDiagnosticKind;
|
use nameres::diagnostics::DefDiagnosticKind;
|
||||||
|
@ -77,7 +77,7 @@ use rustc_hash::FxHashSet;
|
||||||
use stdx::{impl_from, never};
|
use stdx::{impl_from, never};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasAttrs as _, HasDocComments, HasName},
|
ast::{self, HasAttrs as _, HasDocComments, HasName},
|
||||||
AstNode, AstPtr, SmolStr, SyntaxNodePtr, TextRange, T,
|
AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, T,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::db::{DefDatabase, HirDatabase};
|
use crate::db::{DefDatabase, HirDatabase};
|
||||||
|
@ -85,12 +85,12 @@ use crate::db::{DefDatabase, HirDatabase};
|
||||||
pub use crate::{
|
pub use crate::{
|
||||||
attrs::{HasAttrs, Namespace},
|
attrs::{HasAttrs, Namespace},
|
||||||
diagnostics::{
|
diagnostics::{
|
||||||
AnyDiagnostic, BreakOutsideOfLoop, InactiveCode, IncorrectCase, InvalidDeriveTarget,
|
AnyDiagnostic, BreakOutsideOfLoop, ExpectedFunction, InactiveCode, IncorrectCase,
|
||||||
MacroError, MalformedDerive, MismatchedArgCount, MissingFields, MissingMatchArms,
|
InvalidDeriveTarget, MacroError, MalformedDerive, MismatchedArgCount, MissingFields,
|
||||||
MissingUnsafe, NoSuchField, PrivateAssocItem, PrivateField,
|
MissingMatchArms, MissingUnsafe, NeedMut, NoSuchField, PrivateAssocItem, PrivateField,
|
||||||
ReplaceFilterMapNextWithFindMap, TypeMismatch, UnimplementedBuiltinMacro,
|
ReplaceFilterMapNextWithFindMap, TypeMismatch, UnimplementedBuiltinMacro,
|
||||||
UnresolvedExternCrate, UnresolvedImport, UnresolvedMacroCall, UnresolvedModule,
|
UnresolvedExternCrate, UnresolvedField, UnresolvedImport, UnresolvedMacroCall,
|
||||||
UnresolvedProcMacro,
|
UnresolvedMethodCall, UnresolvedModule, UnresolvedProcMacro, UnusedMut,
|
||||||
},
|
},
|
||||||
has_source::HasSource,
|
has_source::HasSource,
|
||||||
semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits},
|
semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits},
|
||||||
|
@ -130,6 +130,7 @@ pub use {
|
||||||
},
|
},
|
||||||
hir_ty::{
|
hir_ty::{
|
||||||
display::{HirDisplay, HirDisplayError, HirWrite},
|
display::{HirDisplay, HirDisplayError, HirWrite},
|
||||||
|
mir::MirEvalError,
|
||||||
PointerCast, Safety,
|
PointerCast, Safety,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@ -272,6 +273,7 @@ pub enum ModuleDef {
|
||||||
Const(Const),
|
Const(Const),
|
||||||
Static(Static),
|
Static(Static),
|
||||||
Trait(Trait),
|
Trait(Trait),
|
||||||
|
TraitAlias(TraitAlias),
|
||||||
TypeAlias(TypeAlias),
|
TypeAlias(TypeAlias),
|
||||||
BuiltinType(BuiltinType),
|
BuiltinType(BuiltinType),
|
||||||
Macro(Macro),
|
Macro(Macro),
|
||||||
|
@ -284,6 +286,7 @@ impl_from!(
|
||||||
Const,
|
Const,
|
||||||
Static,
|
Static,
|
||||||
Trait,
|
Trait,
|
||||||
|
TraitAlias,
|
||||||
TypeAlias,
|
TypeAlias,
|
||||||
BuiltinType,
|
BuiltinType,
|
||||||
Macro
|
Macro
|
||||||
|
@ -310,6 +313,7 @@ impl ModuleDef {
|
||||||
ModuleDef::Const(it) => Some(it.module(db)),
|
ModuleDef::Const(it) => Some(it.module(db)),
|
||||||
ModuleDef::Static(it) => Some(it.module(db)),
|
ModuleDef::Static(it) => Some(it.module(db)),
|
||||||
ModuleDef::Trait(it) => Some(it.module(db)),
|
ModuleDef::Trait(it) => Some(it.module(db)),
|
||||||
|
ModuleDef::TraitAlias(it) => Some(it.module(db)),
|
||||||
ModuleDef::TypeAlias(it) => Some(it.module(db)),
|
ModuleDef::TypeAlias(it) => Some(it.module(db)),
|
||||||
ModuleDef::Macro(it) => Some(it.module(db)),
|
ModuleDef::Macro(it) => Some(it.module(db)),
|
||||||
ModuleDef::BuiltinType(_) => None,
|
ModuleDef::BuiltinType(_) => None,
|
||||||
|
@ -338,6 +342,7 @@ impl ModuleDef {
|
||||||
ModuleDef::Const(it) => it.name(db)?,
|
ModuleDef::Const(it) => it.name(db)?,
|
||||||
ModuleDef::Adt(it) => it.name(db),
|
ModuleDef::Adt(it) => it.name(db),
|
||||||
ModuleDef::Trait(it) => it.name(db),
|
ModuleDef::Trait(it) => it.name(db),
|
||||||
|
ModuleDef::TraitAlias(it) => it.name(db),
|
||||||
ModuleDef::Function(it) => it.name(db),
|
ModuleDef::Function(it) => it.name(db),
|
||||||
ModuleDef::Variant(it) => it.name(db),
|
ModuleDef::Variant(it) => it.name(db),
|
||||||
ModuleDef::TypeAlias(it) => it.name(db),
|
ModuleDef::TypeAlias(it) => it.name(db),
|
||||||
|
@ -356,6 +361,7 @@ impl ModuleDef {
|
||||||
Adt::Union(it) => it.id.into(),
|
Adt::Union(it) => it.id.into(),
|
||||||
},
|
},
|
||||||
ModuleDef::Trait(it) => it.id.into(),
|
ModuleDef::Trait(it) => it.id.into(),
|
||||||
|
ModuleDef::TraitAlias(it) => it.id.into(),
|
||||||
ModuleDef::Function(it) => it.id.into(),
|
ModuleDef::Function(it) => it.id.into(),
|
||||||
ModuleDef::TypeAlias(it) => it.id.into(),
|
ModuleDef::TypeAlias(it) => it.id.into(),
|
||||||
ModuleDef::Module(it) => it.id.into(),
|
ModuleDef::Module(it) => it.id.into(),
|
||||||
|
@ -398,6 +404,7 @@ impl ModuleDef {
|
||||||
ModuleDef::Module(_)
|
ModuleDef::Module(_)
|
||||||
| ModuleDef::Adt(_)
|
| ModuleDef::Adt(_)
|
||||||
| ModuleDef::Trait(_)
|
| ModuleDef::Trait(_)
|
||||||
|
| ModuleDef::TraitAlias(_)
|
||||||
| ModuleDef::TypeAlias(_)
|
| ModuleDef::TypeAlias(_)
|
||||||
| ModuleDef::Macro(_)
|
| ModuleDef::Macro(_)
|
||||||
| ModuleDef::BuiltinType(_) => None,
|
| ModuleDef::BuiltinType(_) => None,
|
||||||
|
@ -413,6 +420,7 @@ impl ModuleDef {
|
||||||
ModuleDef::Const(it) => it.attrs(db),
|
ModuleDef::Const(it) => it.attrs(db),
|
||||||
ModuleDef::Static(it) => it.attrs(db),
|
ModuleDef::Static(it) => it.attrs(db),
|
||||||
ModuleDef::Trait(it) => it.attrs(db),
|
ModuleDef::Trait(it) => it.attrs(db),
|
||||||
|
ModuleDef::TraitAlias(it) => it.attrs(db),
|
||||||
ModuleDef::TypeAlias(it) => it.attrs(db),
|
ModuleDef::TypeAlias(it) => it.attrs(db),
|
||||||
ModuleDef::Macro(it) => it.attrs(db),
|
ModuleDef::Macro(it) => it.attrs(db),
|
||||||
ModuleDef::BuiltinType(_) => return None,
|
ModuleDef::BuiltinType(_) => return None,
|
||||||
|
@ -429,6 +437,7 @@ impl HasVisibility for ModuleDef {
|
||||||
ModuleDef::Const(it) => it.visibility(db),
|
ModuleDef::Const(it) => it.visibility(db),
|
||||||
ModuleDef::Static(it) => it.visibility(db),
|
ModuleDef::Static(it) => it.visibility(db),
|
||||||
ModuleDef::Trait(it) => it.visibility(db),
|
ModuleDef::Trait(it) => it.visibility(db),
|
||||||
|
ModuleDef::TraitAlias(it) => it.visibility(db),
|
||||||
ModuleDef::TypeAlias(it) => it.visibility(db),
|
ModuleDef::TypeAlias(it) => it.visibility(db),
|
||||||
ModuleDef::Variant(it) => it.visibility(db),
|
ModuleDef::Variant(it) => it.visibility(db),
|
||||||
ModuleDef::Macro(it) => it.visibility(db),
|
ModuleDef::Macro(it) => it.visibility(db),
|
||||||
|
@ -488,6 +497,20 @@ impl Module {
|
||||||
Some(Module { id: def_map.module_id(parent_id) })
|
Some(Module { id: def_map.module_id(parent_id) })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Finds nearest non-block ancestor `Module` (`self` included).
|
||||||
|
pub fn nearest_non_block_module(self, db: &dyn HirDatabase) -> Module {
|
||||||
|
let mut id = self.id;
|
||||||
|
loop {
|
||||||
|
let def_map = id.def_map(db.upcast());
|
||||||
|
let origin = def_map[id.local_id].origin;
|
||||||
|
if matches!(origin, ModuleOrigin::BlockExpr { .. }) {
|
||||||
|
id = id.containing_module(db.upcast()).expect("block without parent module")
|
||||||
|
} else {
|
||||||
|
return Module { id };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn path_to_root(self, db: &dyn HirDatabase) -> Vec<Module> {
|
pub fn path_to_root(self, db: &dyn HirDatabase) -> Vec<Module> {
|
||||||
let mut res = vec![self];
|
let mut res = vec![self];
|
||||||
let mut curr = self;
|
let mut curr = self;
|
||||||
|
@ -1092,8 +1115,8 @@ impl Variant {
|
||||||
self.source(db)?.value.expr()
|
self.source(db)?.value.expr()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn eval(self, db: &dyn HirDatabase) -> Result<ComputedExpr, ConstEvalError> {
|
pub fn eval(self, db: &dyn HirDatabase) -> Result<i128, ConstEvalError> {
|
||||||
db.const_eval_variant(self.into())
|
db.const_eval_discriminant(self.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1170,6 +1193,25 @@ impl Adt {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns the lifetime of the DataType
|
||||||
|
pub fn lifetime(&self, db: &dyn HirDatabase) -> Option<LifetimeParamData> {
|
||||||
|
let resolver = match self {
|
||||||
|
Adt::Struct(s) => s.id.resolver(db.upcast()),
|
||||||
|
Adt::Union(u) => u.id.resolver(db.upcast()),
|
||||||
|
Adt::Enum(e) => e.id.resolver(db.upcast()),
|
||||||
|
};
|
||||||
|
resolver
|
||||||
|
.generic_params()
|
||||||
|
.and_then(|gp| {
|
||||||
|
(&gp.lifetimes)
|
||||||
|
.iter()
|
||||||
|
// there should only be a single lifetime
|
||||||
|
// but `Arena` requires to use an iterator
|
||||||
|
.nth(0)
|
||||||
|
})
|
||||||
|
.map(|arena| arena.1.clone())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn as_enum(&self) -> Option<Enum> {
|
pub fn as_enum(&self) -> Option<Enum> {
|
||||||
if let Self::Enum(v) = self {
|
if let Self::Enum(v) = self {
|
||||||
Some(*v)
|
Some(*v)
|
||||||
|
@ -1285,6 +1327,15 @@ impl DefWithBody {
|
||||||
body.pretty_print(db.upcast(), self.id())
|
body.pretty_print(db.upcast(), self.id())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A textual representation of the MIR of this def's body for debugging purposes.
|
||||||
|
pub fn debug_mir(self, db: &dyn HirDatabase) -> String {
|
||||||
|
let body = db.mir_body(self.id());
|
||||||
|
match body {
|
||||||
|
Ok(body) => body.pretty_print(db),
|
||||||
|
Err(e) => format!("error:\n{e:?}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
|
pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
|
||||||
let krate = self.module(db).id.krate();
|
let krate = self.module(db).id.krate();
|
||||||
|
|
||||||
|
@ -1334,42 +1385,35 @@ impl DefWithBody {
|
||||||
|
|
||||||
let infer = db.infer(self.into());
|
let infer = db.infer(self.into());
|
||||||
let source_map = Lazy::new(|| db.body_with_source_map(self.into()).1);
|
let source_map = Lazy::new(|| db.body_with_source_map(self.into()).1);
|
||||||
|
let expr_syntax = |expr| source_map.expr_syntax(expr).expect("unexpected synthetic");
|
||||||
for d in &infer.diagnostics {
|
for d in &infer.diagnostics {
|
||||||
match d {
|
match d {
|
||||||
hir_ty::InferenceDiagnostic::NoSuchField { expr } => {
|
&hir_ty::InferenceDiagnostic::NoSuchField { expr } => {
|
||||||
let field = source_map.field_syntax(*expr);
|
let field = source_map.field_syntax(expr);
|
||||||
acc.push(NoSuchField { field }.into())
|
acc.push(NoSuchField { field }.into())
|
||||||
}
|
}
|
||||||
&hir_ty::InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break } => {
|
&hir_ty::InferenceDiagnostic::BreakOutsideOfLoop {
|
||||||
let expr = source_map
|
expr,
|
||||||
.expr_syntax(expr)
|
is_break,
|
||||||
.expect("break outside of loop in synthetic syntax");
|
bad_value_break,
|
||||||
acc.push(BreakOutsideOfLoop { expr, is_break }.into())
|
} => {
|
||||||
|
let expr = expr_syntax(expr);
|
||||||
|
acc.push(BreakOutsideOfLoop { expr, is_break, bad_value_break }.into())
|
||||||
}
|
}
|
||||||
hir_ty::InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
|
&hir_ty::InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
|
||||||
match source_map.expr_syntax(*call_expr) {
|
acc.push(
|
||||||
Ok(source_ptr) => acc.push(
|
MismatchedArgCount { call_expr: expr_syntax(call_expr), expected, found }
|
||||||
MismatchedArgCount {
|
|
||||||
call_expr: source_ptr,
|
|
||||||
expected: *expected,
|
|
||||||
found: *found,
|
|
||||||
}
|
|
||||||
.into(),
|
.into(),
|
||||||
),
|
)
|
||||||
Err(SyntheticSyntax) => (),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
&hir_ty::InferenceDiagnostic::PrivateField { expr, field } => {
|
&hir_ty::InferenceDiagnostic::PrivateField { expr, field } => {
|
||||||
let expr = source_map.expr_syntax(expr).expect("unexpected synthetic");
|
let expr = expr_syntax(expr);
|
||||||
let field = field.into();
|
let field = field.into();
|
||||||
acc.push(PrivateField { expr, field }.into())
|
acc.push(PrivateField { expr, field }.into())
|
||||||
}
|
}
|
||||||
&hir_ty::InferenceDiagnostic::PrivateAssocItem { id, item } => {
|
&hir_ty::InferenceDiagnostic::PrivateAssocItem { id, item } => {
|
||||||
let expr_or_pat = match id {
|
let expr_or_pat = match id {
|
||||||
ExprOrPatId::ExprId(expr) => source_map
|
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(Either::Left),
|
||||||
.expr_syntax(expr)
|
|
||||||
.expect("unexpected synthetic")
|
|
||||||
.map(Either::Left),
|
|
||||||
ExprOrPatId::PatId(pat) => source_map
|
ExprOrPatId::PatId(pat) => source_map
|
||||||
.pat_syntax(pat)
|
.pat_syntax(pat)
|
||||||
.expect("unexpected synthetic")
|
.expect("unexpected synthetic")
|
||||||
|
@ -1378,16 +1422,76 @@ impl DefWithBody {
|
||||||
let item = item.into();
|
let item = item.into();
|
||||||
acc.push(PrivateAssocItem { expr_or_pat, item }.into())
|
acc.push(PrivateAssocItem { expr_or_pat, item }.into())
|
||||||
}
|
}
|
||||||
|
hir_ty::InferenceDiagnostic::ExpectedFunction { call_expr, found } => {
|
||||||
|
let call_expr = expr_syntax(*call_expr);
|
||||||
|
|
||||||
|
acc.push(
|
||||||
|
ExpectedFunction {
|
||||||
|
call: call_expr,
|
||||||
|
found: Type::new(db, DefWithBodyId::from(self), found.clone()),
|
||||||
|
}
|
||||||
|
.into(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
hir_ty::InferenceDiagnostic::UnresolvedField {
|
||||||
|
expr,
|
||||||
|
receiver,
|
||||||
|
name,
|
||||||
|
method_with_same_name_exists,
|
||||||
|
} => {
|
||||||
|
let expr = expr_syntax(*expr);
|
||||||
|
|
||||||
|
acc.push(
|
||||||
|
UnresolvedField {
|
||||||
|
expr,
|
||||||
|
name: name.clone(),
|
||||||
|
receiver: Type::new(db, DefWithBodyId::from(self), receiver.clone()),
|
||||||
|
method_with_same_name_exists: *method_with_same_name_exists,
|
||||||
|
}
|
||||||
|
.into(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
hir_ty::InferenceDiagnostic::UnresolvedMethodCall {
|
||||||
|
expr,
|
||||||
|
receiver,
|
||||||
|
name,
|
||||||
|
field_with_same_name,
|
||||||
|
} => {
|
||||||
|
let expr = expr_syntax(*expr);
|
||||||
|
|
||||||
|
acc.push(
|
||||||
|
UnresolvedMethodCall {
|
||||||
|
expr,
|
||||||
|
name: name.clone(),
|
||||||
|
receiver: Type::new(db, DefWithBodyId::from(self), receiver.clone()),
|
||||||
|
field_with_same_name: field_with_same_name
|
||||||
|
.clone()
|
||||||
|
.map(|ty| Type::new(db, DefWithBodyId::from(self), ty)),
|
||||||
|
}
|
||||||
|
.into(),
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (expr, mismatch) in infer.expr_type_mismatches() {
|
for (pat_or_expr, mismatch) in infer.type_mismatches() {
|
||||||
let expr = match source_map.expr_syntax(expr) {
|
let expr_or_pat = match pat_or_expr {
|
||||||
Ok(expr) => expr,
|
ExprOrPatId::ExprId(expr) => source_map.expr_syntax(expr).map(Either::Left),
|
||||||
Err(SyntheticSyntax) => continue,
|
// FIXME: Re-enable these once we have less false positives
|
||||||
|
ExprOrPatId::PatId(_pat) => continue,
|
||||||
|
#[allow(unreachable_patterns)]
|
||||||
|
ExprOrPatId::PatId(pat) => source_map.pat_syntax(pat).map(Either::Right),
|
||||||
};
|
};
|
||||||
|
let expr_or_pat = match expr_or_pat {
|
||||||
|
Ok(Either::Left(expr)) => Either::Left(expr),
|
||||||
|
Ok(Either::Right(InFile { file_id, value: Either::Left(pat) })) => {
|
||||||
|
Either::Right(InFile { file_id, value: pat })
|
||||||
|
}
|
||||||
|
Ok(Either::Right(_)) | Err(SyntheticSyntax) => continue,
|
||||||
|
};
|
||||||
|
|
||||||
acc.push(
|
acc.push(
|
||||||
TypeMismatch {
|
TypeMismatch {
|
||||||
expr,
|
expr_or_pat,
|
||||||
expected: Type::new(db, DefWithBodyId::from(self), mismatch.expected.clone()),
|
expected: Type::new(db, DefWithBodyId::from(self), mismatch.expected.clone()),
|
||||||
actual: Type::new(db, DefWithBodyId::from(self), mismatch.actual.clone()),
|
actual: Type::new(db, DefWithBodyId::from(self), mismatch.actual.clone()),
|
||||||
}
|
}
|
||||||
|
@ -1405,6 +1509,41 @@ impl DefWithBody {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let hir_body = db.body(self.into());
|
||||||
|
|
||||||
|
if let Ok(borrowck_result) = db.borrowck(self.into()) {
|
||||||
|
let mir_body = &borrowck_result.mir_body;
|
||||||
|
let mol = &borrowck_result.mutability_of_locals;
|
||||||
|
for (binding_id, _) in hir_body.bindings.iter() {
|
||||||
|
let need_mut = &mol[mir_body.binding_locals[binding_id]];
|
||||||
|
let local = Local { parent: self.into(), binding_id };
|
||||||
|
match (need_mut, local.is_mut(db)) {
|
||||||
|
(mir::MutabilityReason::Mut { .. }, true)
|
||||||
|
| (mir::MutabilityReason::Not, false) => (),
|
||||||
|
(mir::MutabilityReason::Mut { spans }, false) => {
|
||||||
|
for span in spans {
|
||||||
|
let span: InFile<SyntaxNodePtr> = match span {
|
||||||
|
mir::MirSpan::ExprId(e) => match source_map.expr_syntax(*e) {
|
||||||
|
Ok(s) => s.map(|x| x.into()),
|
||||||
|
Err(_) => continue,
|
||||||
|
},
|
||||||
|
mir::MirSpan::PatId(p) => match source_map.pat_syntax(*p) {
|
||||||
|
Ok(s) => s.map(|x| match x {
|
||||||
|
Either::Left(e) => e.into(),
|
||||||
|
Either::Right(e) => e.into(),
|
||||||
|
}),
|
||||||
|
Err(_) => continue,
|
||||||
|
},
|
||||||
|
mir::MirSpan::Unknown => continue,
|
||||||
|
};
|
||||||
|
acc.push(NeedMut { local, span }.into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(mir::MutabilityReason::Not, true) => acc.push(UnusedMut { local }.into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for diagnostic in BodyValidationDiagnostic::collect(db, self.into()) {
|
for diagnostic in BodyValidationDiagnostic::collect(db, self.into()) {
|
||||||
match diagnostic {
|
match diagnostic {
|
||||||
BodyValidationDiagnostic::RecordMissingFields {
|
BodyValidationDiagnostic::RecordMissingFields {
|
||||||
|
@ -1489,11 +1628,13 @@ impl DefWithBody {
|
||||||
if let ast::Expr::MatchExpr(match_expr) =
|
if let ast::Expr::MatchExpr(match_expr) =
|
||||||
&source_ptr.value.to_node(&root)
|
&source_ptr.value.to_node(&root)
|
||||||
{
|
{
|
||||||
if let Some(match_expr) = match_expr.expr() {
|
if let Some(scrut_expr) = match_expr.expr() {
|
||||||
acc.push(
|
acc.push(
|
||||||
MissingMatchArms {
|
MissingMatchArms {
|
||||||
file: source_ptr.file_id,
|
scrutinee_expr: InFile::new(
|
||||||
match_expr: AstPtr::new(&match_expr),
|
source_ptr.file_id,
|
||||||
|
AstPtr::new(&scrut_expr),
|
||||||
|
),
|
||||||
uncovered_patterns,
|
uncovered_patterns,
|
||||||
}
|
}
|
||||||
.into(),
|
.into(),
|
||||||
|
@ -1582,6 +1723,10 @@ impl Function {
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn num_params(self, db: &dyn HirDatabase) -> usize {
|
||||||
|
db.function_data(self.id).params.len()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn method_params(self, db: &dyn HirDatabase) -> Option<Vec<Param>> {
|
pub fn method_params(self, db: &dyn HirDatabase) -> Option<Vec<Param>> {
|
||||||
if self.self_param(db).is_none() {
|
if self.self_param(db).is_none() {
|
||||||
return None;
|
return None;
|
||||||
|
@ -1639,6 +1784,14 @@ impl Function {
|
||||||
let def_map = db.crate_def_map(loc.krate(db).into());
|
let def_map = db.crate_def_map(loc.krate(db).into());
|
||||||
def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() })
|
def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn eval(self, db: &dyn HirDatabase) -> Result<(), MirEvalError> {
|
||||||
|
let body = db
|
||||||
|
.mir_body(self.id.into())
|
||||||
|
.map_err(|e| MirEvalError::MirLowerError(self.id.into(), e))?;
|
||||||
|
interpret_mir(db, &body, false)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note: logically, this belongs to `hir_ty`, but we are not using it there yet.
|
// Note: logically, this belongs to `hir_ty`, but we are not using it there yet.
|
||||||
|
@ -1679,8 +1832,8 @@ impl Param {
|
||||||
let parent = DefWithBodyId::FunctionId(self.func.into());
|
let parent = DefWithBodyId::FunctionId(self.func.into());
|
||||||
let body = db.body(parent);
|
let body = db.body(parent);
|
||||||
let pat_id = body.params[self.idx];
|
let pat_id = body.params[self.idx];
|
||||||
if let Pat::Bind { .. } = &body[pat_id] {
|
if let Pat::Bind { id, .. } = &body[pat_id] {
|
||||||
Some(Local { parent, pat_id: body.params[self.idx] })
|
Some(Local { parent, binding_id: *id })
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -1781,8 +1934,18 @@ impl Const {
|
||||||
Type::new_with_resolver_inner(db, &resolver, ty)
|
Type::new_with_resolver_inner(db, &resolver, ty)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn eval(self, db: &dyn HirDatabase) -> Result<ComputedExpr, ConstEvalError> {
|
pub fn render_eval(self, db: &dyn HirDatabase) -> Result<String, ConstEvalError> {
|
||||||
db.const_eval(self.id)
|
let c = db.const_eval(self.id)?;
|
||||||
|
let r = format!("{}", HexifiedConst(c).display(db));
|
||||||
|
// We want to see things like `<utf8-error>` and `<layout-error>` as they are probably bug in our
|
||||||
|
// implementation, but there is no need to show things like `<enum-not-supported>` or `<ref-not-supported>` to
|
||||||
|
// the user.
|
||||||
|
if r.contains("not-supported>") {
|
||||||
|
return Err(ConstEvalError::MirEvalError(MirEvalError::NotSupported(
|
||||||
|
"rendering complex constants".to_string(),
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
return Ok(r);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1893,6 +2056,27 @@ impl HasVisibility for Trait {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
pub struct TraitAlias {
|
||||||
|
pub(crate) id: TraitAliasId,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TraitAlias {
|
||||||
|
pub fn module(self, db: &dyn HirDatabase) -> Module {
|
||||||
|
Module { id: self.id.lookup(db.upcast()).container }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn name(self, db: &dyn HirDatabase) -> Name {
|
||||||
|
db.trait_alias_data(self.id).name.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasVisibility for TraitAlias {
|
||||||
|
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
|
||||||
|
db.trait_alias_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
pub struct TypeAlias {
|
pub struct TypeAlias {
|
||||||
pub(crate) id: TypeAliasId,
|
pub(crate) id: TypeAliasId,
|
||||||
|
@ -2265,6 +2449,7 @@ pub enum GenericDef {
|
||||||
Function(Function),
|
Function(Function),
|
||||||
Adt(Adt),
|
Adt(Adt),
|
||||||
Trait(Trait),
|
Trait(Trait),
|
||||||
|
TraitAlias(TraitAlias),
|
||||||
TypeAlias(TypeAlias),
|
TypeAlias(TypeAlias),
|
||||||
Impl(Impl),
|
Impl(Impl),
|
||||||
// enum variants cannot have generics themselves, but their parent enums
|
// enum variants cannot have generics themselves, but their parent enums
|
||||||
|
@ -2277,6 +2462,7 @@ impl_from!(
|
||||||
Function,
|
Function,
|
||||||
Adt(Struct, Enum, Union),
|
Adt(Struct, Enum, Union),
|
||||||
Trait,
|
Trait,
|
||||||
|
TraitAlias,
|
||||||
TypeAlias,
|
TypeAlias,
|
||||||
Impl,
|
Impl,
|
||||||
Variant,
|
Variant,
|
||||||
|
@ -2317,20 +2503,53 @@ impl GenericDef {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A single local definition.
|
/// A single local definition.
|
||||||
///
|
|
||||||
/// If the definition of this is part of a "MultiLocal", that is a local that has multiple declarations due to or-patterns
|
|
||||||
/// then this only references a single one of those.
|
|
||||||
/// To retrieve the other locals you should use [`Local::associated_locals`]
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||||
pub struct Local {
|
pub struct Local {
|
||||||
pub(crate) parent: DefWithBodyId,
|
pub(crate) parent: DefWithBodyId,
|
||||||
pub(crate) pat_id: PatId,
|
pub(crate) binding_id: BindingId,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct LocalSource {
|
||||||
|
pub local: Local,
|
||||||
|
pub source: InFile<Either<ast::IdentPat, ast::SelfParam>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LocalSource {
|
||||||
|
pub fn as_ident_pat(&self) -> Option<&ast::IdentPat> {
|
||||||
|
match &self.source.value {
|
||||||
|
Either::Left(x) => Some(x),
|
||||||
|
Either::Right(_) => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn into_ident_pat(self) -> Option<ast::IdentPat> {
|
||||||
|
match self.source.value {
|
||||||
|
Either::Left(x) => Some(x),
|
||||||
|
Either::Right(_) => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn original_file(&self, db: &dyn HirDatabase) -> FileId {
|
||||||
|
self.source.file_id.original_file(db.upcast())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn name(&self) -> Option<ast::Name> {
|
||||||
|
self.source.value.name()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn syntax(&self) -> &SyntaxNode {
|
||||||
|
self.source.value.syntax()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn syntax_ptr(self) -> InFile<SyntaxNodePtr> {
|
||||||
|
self.source.map(|x| SyntaxNodePtr::new(x.syntax()))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Local {
|
impl Local {
|
||||||
pub fn is_param(self, db: &dyn HirDatabase) -> bool {
|
pub fn is_param(self, db: &dyn HirDatabase) -> bool {
|
||||||
let src = self.source(db);
|
let src = self.primary_source(db);
|
||||||
match src.value {
|
match src.source.value {
|
||||||
Either::Left(pat) => pat
|
Either::Left(pat) => pat
|
||||||
.syntax()
|
.syntax()
|
||||||
.ancestors()
|
.ancestors()
|
||||||
|
@ -2350,13 +2569,7 @@ impl Local {
|
||||||
|
|
||||||
pub fn name(self, db: &dyn HirDatabase) -> Name {
|
pub fn name(self, db: &dyn HirDatabase) -> Name {
|
||||||
let body = db.body(self.parent);
|
let body = db.body(self.parent);
|
||||||
match &body[self.pat_id] {
|
body[self.binding_id].name.clone()
|
||||||
Pat::Bind { name, .. } => name.clone(),
|
|
||||||
_ => {
|
|
||||||
stdx::never!("hir::Local is missing a name!");
|
|
||||||
Name::missing()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_self(self, db: &dyn HirDatabase) -> bool {
|
pub fn is_self(self, db: &dyn HirDatabase) -> bool {
|
||||||
|
@ -2365,15 +2578,12 @@ impl Local {
|
||||||
|
|
||||||
pub fn is_mut(self, db: &dyn HirDatabase) -> bool {
|
pub fn is_mut(self, db: &dyn HirDatabase) -> bool {
|
||||||
let body = db.body(self.parent);
|
let body = db.body(self.parent);
|
||||||
matches!(&body[self.pat_id], Pat::Bind { mode: BindingAnnotation::Mutable, .. })
|
body[self.binding_id].mode == BindingAnnotation::Mutable
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_ref(self, db: &dyn HirDatabase) -> bool {
|
pub fn is_ref(self, db: &dyn HirDatabase) -> bool {
|
||||||
let body = db.body(self.parent);
|
let body = db.body(self.parent);
|
||||||
matches!(
|
matches!(body[self.binding_id].mode, BindingAnnotation::Ref | BindingAnnotation::RefMut)
|
||||||
&body[self.pat_id],
|
|
||||||
Pat::Bind { mode: BindingAnnotation::Ref | BindingAnnotation::RefMut, .. }
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parent(self, _db: &dyn HirDatabase) -> DefWithBody {
|
pub fn parent(self, _db: &dyn HirDatabase) -> DefWithBody {
|
||||||
|
@ -2387,34 +2597,33 @@ impl Local {
|
||||||
pub fn ty(self, db: &dyn HirDatabase) -> Type {
|
pub fn ty(self, db: &dyn HirDatabase) -> Type {
|
||||||
let def = self.parent;
|
let def = self.parent;
|
||||||
let infer = db.infer(def);
|
let infer = db.infer(def);
|
||||||
let ty = infer[self.pat_id].clone();
|
let ty = infer[self.binding_id].clone();
|
||||||
Type::new(db, def, ty)
|
Type::new(db, def, ty)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn associated_locals(self, db: &dyn HirDatabase) -> Box<[Local]> {
|
/// All definitions for this local. Example: `let (a$0, _) | (_, a$0) = x;`
|
||||||
let body = db.body(self.parent);
|
pub fn sources(self, db: &dyn HirDatabase) -> Vec<LocalSource> {
|
||||||
body.ident_patterns_for(&self.pat_id)
|
let (body, source_map) = db.body_with_source_map(self.parent);
|
||||||
|
body[self.binding_id]
|
||||||
|
.definitions
|
||||||
.iter()
|
.iter()
|
||||||
.map(|&pat_id| Local { parent: self.parent, pat_id })
|
.map(|&definition| {
|
||||||
|
let src = source_map.pat_syntax(definition).unwrap(); // Hmm...
|
||||||
|
let root = src.file_syntax(db.upcast());
|
||||||
|
src.map(|ast| match ast {
|
||||||
|
// Suspicious unwrap
|
||||||
|
Either::Left(it) => Either::Left(it.cast().unwrap().to_node(&root)),
|
||||||
|
Either::Right(it) => Either::Right(it.to_node(&root)),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.map(|source| LocalSource { local: self, source })
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// If this local is part of a multi-local, retrieve the representative local.
|
/// The leftmost definition for this local. Example: `let (a$0, _) | (_, a) = x;`
|
||||||
/// That is the local that references are being resolved to.
|
pub fn primary_source(self, db: &dyn HirDatabase) -> LocalSource {
|
||||||
pub fn representative(self, db: &dyn HirDatabase) -> Local {
|
let all_sources = self.sources(db);
|
||||||
let body = db.body(self.parent);
|
all_sources.into_iter().next().unwrap()
|
||||||
Local { pat_id: body.pattern_representative(self.pat_id), ..self }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn source(self, db: &dyn HirDatabase) -> InFile<Either<ast::IdentPat, ast::SelfParam>> {
|
|
||||||
let (_body, source_map) = db.body_with_source_map(self.parent);
|
|
||||||
let src = source_map.pat_syntax(self.pat_id).unwrap(); // Hmm...
|
|
||||||
let root = src.file_syntax(db.upcast());
|
|
||||||
src.map(|ast| match ast {
|
|
||||||
// Suspicious unwrap
|
|
||||||
Either::Left(it) => Either::Left(it.cast().unwrap().to_node(&root)),
|
|
||||||
Either::Right(it) => Either::Right(it.to_node(&root)),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3190,6 +3399,14 @@ impl Type {
|
||||||
matches!(self.ty.kind(Interner), TyKind::Raw(..))
|
matches!(self.ty.kind(Interner), TyKind::Raw(..))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn remove_raw_ptr(&self) -> Option<Type> {
|
||||||
|
if let TyKind::Raw(_, ty) = self.ty.kind(Interner) {
|
||||||
|
Some(self.derived(ty.clone()))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn contains_unknown(&self) -> bool {
|
pub fn contains_unknown(&self) -> bool {
|
||||||
// FIXME: When we get rid of `ConstScalar::Unknown`, we can just look at precomputed
|
// FIXME: When we get rid of `ConstScalar::Unknown`, we can just look at precomputed
|
||||||
// `TypeFlags` in `TyData`.
|
// `TypeFlags` in `TyData`.
|
||||||
|
@ -3260,12 +3477,7 @@ impl Type {
|
||||||
|
|
||||||
pub fn as_array(&self, _db: &dyn HirDatabase) -> Option<(Type, usize)> {
|
pub fn as_array(&self, _db: &dyn HirDatabase) -> Option<(Type, usize)> {
|
||||||
if let TyKind::Array(ty, len) = &self.ty.kind(Interner) {
|
if let TyKind::Array(ty, len) = &self.ty.kind(Interner) {
|
||||||
match len.data(Interner).value {
|
try_const_usize(len).map(|x| (self.derived(ty.clone()), x as usize))
|
||||||
ConstValue::Concrete(ConcreteConst { interned: ConstScalar::UInt(len) }) => {
|
|
||||||
Some((self.derived(ty.clone()), len as usize))
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -3321,6 +3533,24 @@ impl Type {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Iterates its type arguments
|
||||||
|
///
|
||||||
|
/// It iterates the actual type arguments when concrete types are used
|
||||||
|
/// and otherwise the generic names.
|
||||||
|
/// It does not include `const` arguments.
|
||||||
|
///
|
||||||
|
/// For code, such as:
|
||||||
|
/// ```text
|
||||||
|
/// struct Foo<T, U>
|
||||||
|
///
|
||||||
|
/// impl<U> Foo<String, U>
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// It iterates:
|
||||||
|
/// ```text
|
||||||
|
/// - "String"
|
||||||
|
/// - "U"
|
||||||
|
/// ```
|
||||||
pub fn type_arguments(&self) -> impl Iterator<Item = Type> + '_ {
|
pub fn type_arguments(&self) -> impl Iterator<Item = Type> + '_ {
|
||||||
self.ty
|
self.ty
|
||||||
.strip_references()
|
.strip_references()
|
||||||
|
@ -3331,12 +3561,62 @@ impl Type {
|
||||||
.map(move |ty| self.derived(ty))
|
.map(move |ty| self.derived(ty))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn iterate_method_candidates<T>(
|
/// Iterates its type and const arguments
|
||||||
|
///
|
||||||
|
/// It iterates the actual type and const arguments when concrete types
|
||||||
|
/// are used and otherwise the generic names.
|
||||||
|
///
|
||||||
|
/// For code, such as:
|
||||||
|
/// ```text
|
||||||
|
/// struct Foo<T, const U: usize, const X: usize>
|
||||||
|
///
|
||||||
|
/// impl<U> Foo<String, U, 12>
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// It iterates:
|
||||||
|
/// ```text
|
||||||
|
/// - "String"
|
||||||
|
/// - "U"
|
||||||
|
/// - "12"
|
||||||
|
/// ```
|
||||||
|
pub fn type_and_const_arguments<'a>(
|
||||||
|
&'a self,
|
||||||
|
db: &'a dyn HirDatabase,
|
||||||
|
) -> impl Iterator<Item = SmolStr> + 'a {
|
||||||
|
self.ty
|
||||||
|
.strip_references()
|
||||||
|
.as_adt()
|
||||||
|
.into_iter()
|
||||||
|
.flat_map(|(_, substs)| substs.iter(Interner))
|
||||||
|
.filter_map(|arg| {
|
||||||
|
// arg can be either a `Ty` or `constant`
|
||||||
|
if let Some(ty) = arg.ty(Interner) {
|
||||||
|
Some(SmolStr::new(ty.display(db).to_string()))
|
||||||
|
} else if let Some(const_) = arg.constant(Interner) {
|
||||||
|
Some(SmolStr::new_inline(&const_.display(db).to_string()))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Combines lifetime indicators, type and constant parameters into a single `Iterator`
|
||||||
|
pub fn generic_parameters<'a>(
|
||||||
|
&'a self,
|
||||||
|
db: &'a dyn HirDatabase,
|
||||||
|
) -> impl Iterator<Item = SmolStr> + 'a {
|
||||||
|
// iterate the lifetime
|
||||||
|
self.as_adt()
|
||||||
|
.and_then(|a| a.lifetime(db).and_then(|lt| Some((<.name).to_smol_str())))
|
||||||
|
.into_iter()
|
||||||
|
// add the type and const paramaters
|
||||||
|
.chain(self.type_and_const_arguments(db))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn iterate_method_candidates_with_traits<T>(
|
||||||
&self,
|
&self,
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
scope: &SemanticsScope<'_>,
|
scope: &SemanticsScope<'_>,
|
||||||
// FIXME this can be retrieved from `scope`, except autoimport uses this
|
|
||||||
// to specify a different set, so the method needs to be split
|
|
||||||
traits_in_scope: &FxHashSet<TraitId>,
|
traits_in_scope: &FxHashSet<TraitId>,
|
||||||
with_local_impls: Option<Module>,
|
with_local_impls: Option<Module>,
|
||||||
name: Option<&Name>,
|
name: Option<&Name>,
|
||||||
|
@ -3364,6 +3644,24 @@ impl Type {
|
||||||
slot
|
slot
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn iterate_method_candidates<T>(
|
||||||
|
&self,
|
||||||
|
db: &dyn HirDatabase,
|
||||||
|
scope: &SemanticsScope<'_>,
|
||||||
|
with_local_impls: Option<Module>,
|
||||||
|
name: Option<&Name>,
|
||||||
|
callback: impl FnMut(Function) -> Option<T>,
|
||||||
|
) -> Option<T> {
|
||||||
|
self.iterate_method_candidates_with_traits(
|
||||||
|
db,
|
||||||
|
scope,
|
||||||
|
&scope.visible_traits().0,
|
||||||
|
with_local_impls,
|
||||||
|
name,
|
||||||
|
callback,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
fn iterate_method_candidates_dyn(
|
fn iterate_method_candidates_dyn(
|
||||||
&self,
|
&self,
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
|
@ -3632,11 +3930,13 @@ impl Type {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME: Document this
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Callable {
|
pub struct Callable {
|
||||||
ty: Type,
|
ty: Type,
|
||||||
sig: CallableSig,
|
sig: CallableSig,
|
||||||
callee: Callee,
|
callee: Callee,
|
||||||
|
/// Whether this is a method that was called with method call syntax.
|
||||||
pub(crate) is_bound_method: bool,
|
pub(crate) is_bound_method: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3670,14 +3970,14 @@ impl Callable {
|
||||||
Other => CallableKind::Other,
|
Other => CallableKind::Other,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option<ast::SelfParam> {
|
pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option<(ast::SelfParam, Type)> {
|
||||||
let func = match self.callee {
|
let func = match self.callee {
|
||||||
Callee::Def(CallableDefId::FunctionId(it)) if self.is_bound_method => it,
|
Callee::Def(CallableDefId::FunctionId(it)) if self.is_bound_method => it,
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
let src = func.lookup(db.upcast()).source(db.upcast());
|
let src = func.lookup(db.upcast()).source(db.upcast());
|
||||||
let param_list = src.value.param_list()?;
|
let param_list = src.value.param_list()?;
|
||||||
param_list.self_param()
|
Some((param_list.self_param()?, self.ty.derived(self.sig.params()[0].clone())))
|
||||||
}
|
}
|
||||||
pub fn n_params(&self) -> usize {
|
pub fn n_params(&self) -> usize {
|
||||||
self.sig.params().len() - if self.is_bound_method { 1 } else { 0 }
|
self.sig.params().len() - if self.is_bound_method { 1 } else { 0 }
|
||||||
|
@ -3936,6 +4236,12 @@ impl HasCrate for Trait {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl HasCrate for TraitAlias {
|
||||||
|
fn krate(&self, db: &dyn HirDatabase) -> Crate {
|
||||||
|
self.module(db).krate()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl HasCrate for Static {
|
impl HasCrate for Static {
|
||||||
fn krate(&self, db: &dyn HirDatabase) -> Crate {
|
fn krate(&self, db: &dyn HirDatabase) -> Crate {
|
||||||
self.module(db).krate()
|
self.module(db).krate()
|
||||||
|
|
|
@ -12,7 +12,7 @@ use hir_def::{
|
||||||
macro_id_to_def_id,
|
macro_id_to_def_id,
|
||||||
resolver::{self, HasResolver, Resolver, TypeNs},
|
resolver::{self, HasResolver, Resolver, TypeNs},
|
||||||
type_ref::Mutability,
|
type_ref::Mutability,
|
||||||
AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId,
|
AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId,
|
||||||
};
|
};
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
db::AstDatabase,
|
db::AstDatabase,
|
||||||
|
@ -68,7 +68,8 @@ impl PathResolution {
|
||||||
| ModuleDef::Function(_)
|
| ModuleDef::Function(_)
|
||||||
| ModuleDef::Module(_)
|
| ModuleDef::Module(_)
|
||||||
| ModuleDef::Static(_)
|
| ModuleDef::Static(_)
|
||||||
| ModuleDef::Trait(_),
|
| ModuleDef::Trait(_)
|
||||||
|
| ModuleDef::TraitAlias(_),
|
||||||
) => None,
|
) => None,
|
||||||
PathResolution::Def(ModuleDef::TypeAlias(alias)) => {
|
PathResolution::Def(ModuleDef::TypeAlias(alias)) => {
|
||||||
Some(TypeNs::TypeAliasId((*alias).into()))
|
Some(TypeNs::TypeAliasId((*alias).into()))
|
||||||
|
@ -365,6 +366,16 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
||||||
self.imp.resolve_method_call(call).map(Function::from)
|
self.imp.resolve_method_call(call).map(Function::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Attempts to resolve this call expression as a method call falling back to resolving it as a field.
|
||||||
|
pub fn resolve_method_call_field_fallback(
|
||||||
|
&self,
|
||||||
|
call: &ast::MethodCallExpr,
|
||||||
|
) -> Option<Either<Function, Field>> {
|
||||||
|
self.imp
|
||||||
|
.resolve_method_call_fallback(call)
|
||||||
|
.map(|it| it.map_left(Function::from).map_right(Field::from))
|
||||||
|
}
|
||||||
|
|
||||||
pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<Function> {
|
pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<Function> {
|
||||||
self.imp.resolve_await_to_poll(await_expr).map(Function::from)
|
self.imp.resolve_await_to_poll(await_expr).map(Function::from)
|
||||||
}
|
}
|
||||||
|
@ -527,8 +538,8 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
|
fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
|
||||||
let src = self.wrap_node_infile(attr.clone());
|
|
||||||
let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
|
let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
|
||||||
|
let src = self.wrap_node_infile(attr.clone());
|
||||||
let call_id = self.with_ctx(|ctx| {
|
let call_id = self.with_ctx(|ctx| {
|
||||||
ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
|
ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
|
||||||
})?;
|
})?;
|
||||||
|
@ -1092,7 +1103,10 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
let kind = match adjust.kind {
|
let kind = match adjust.kind {
|
||||||
hir_ty::Adjust::NeverToAny => Adjust::NeverToAny,
|
hir_ty::Adjust::NeverToAny => Adjust::NeverToAny,
|
||||||
hir_ty::Adjust::Deref(Some(hir_ty::OverloadedDeref(m))) => {
|
hir_ty::Adjust::Deref(Some(hir_ty::OverloadedDeref(m))) => {
|
||||||
Adjust::Deref(Some(OverloadedDeref(mutability(m))))
|
// FIXME: Should we handle unknown mutability better?
|
||||||
|
Adjust::Deref(Some(OverloadedDeref(
|
||||||
|
m.map(mutability).unwrap_or(Mutability::Shared),
|
||||||
|
)))
|
||||||
}
|
}
|
||||||
hir_ty::Adjust::Deref(None) => Adjust::Deref(None),
|
hir_ty::Adjust::Deref(None) => Adjust::Deref(None),
|
||||||
hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => {
|
hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => {
|
||||||
|
@ -1145,6 +1159,13 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
self.analyze(call.syntax())?.resolve_method_call(self.db, call)
|
self.analyze(call.syntax())?.resolve_method_call(self.db, call)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn resolve_method_call_fallback(
|
||||||
|
&self,
|
||||||
|
call: &ast::MethodCallExpr,
|
||||||
|
) -> Option<Either<FunctionId, FieldId>> {
|
||||||
|
self.analyze(call.syntax())?.resolve_method_call_fallback(self.db, call)
|
||||||
|
}
|
||||||
|
|
||||||
fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<FunctionId> {
|
fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<FunctionId> {
|
||||||
self.analyze(await_expr.syntax())?.resolve_await_to_poll(self.db, await_expr)
|
self.analyze(await_expr.syntax())?.resolve_await_to_poll(self.db, await_expr)
|
||||||
}
|
}
|
||||||
|
@ -1330,6 +1351,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
|
ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
|
||||||
|
ChildContainer::TraitAliasId(it) => it.resolver(self.db.upcast()),
|
||||||
ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),
|
ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),
|
||||||
ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()),
|
ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()),
|
||||||
ChildContainer::EnumId(it) => it.resolver(self.db.upcast()),
|
ChildContainer::EnumId(it) => it.resolver(self.db.upcast()),
|
||||||
|
@ -1556,6 +1578,7 @@ to_def_impls![
|
||||||
(crate::Enum, ast::Enum, enum_to_def),
|
(crate::Enum, ast::Enum, enum_to_def),
|
||||||
(crate::Union, ast::Union, union_to_def),
|
(crate::Union, ast::Union, union_to_def),
|
||||||
(crate::Trait, ast::Trait, trait_to_def),
|
(crate::Trait, ast::Trait, trait_to_def),
|
||||||
|
(crate::TraitAlias, ast::TraitAlias, trait_alias_to_def),
|
||||||
(crate::Impl, ast::Impl, impl_to_def),
|
(crate::Impl, ast::Impl, impl_to_def),
|
||||||
(crate::TypeAlias, ast::TypeAlias, type_alias_to_def),
|
(crate::TypeAlias, ast::TypeAlias, type_alias_to_def),
|
||||||
(crate::Const, ast::Const, const_to_def),
|
(crate::Const, ast::Const, const_to_def),
|
||||||
|
@ -1634,8 +1657,8 @@ impl<'a> SemanticsScope<'a> {
|
||||||
resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
|
resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
|
||||||
resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
|
resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
|
||||||
resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(id.into()),
|
resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(id.into()),
|
||||||
resolver::ScopeDef::Local(pat_id) => match self.resolver.body_owner() {
|
resolver::ScopeDef::Local(binding_id) => match self.resolver.body_owner() {
|
||||||
Some(parent) => ScopeDef::Local(Local { parent, pat_id }),
|
Some(parent) => ScopeDef::Local(Local { parent, binding_id }),
|
||||||
None => continue,
|
None => continue,
|
||||||
},
|
},
|
||||||
resolver::ScopeDef::Label(label_id) => match self.resolver.body_owner() {
|
resolver::ScopeDef::Label(label_id) => match self.resolver.body_owner() {
|
||||||
|
@ -1673,6 +1696,7 @@ impl<'a> SemanticsScope<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct VisibleTraits(pub FxHashSet<TraitId>);
|
pub struct VisibleTraits(pub FxHashSet<TraitId>);
|
||||||
|
|
||||||
impl ops::Deref for VisibleTraits {
|
impl ops::Deref for VisibleTraits {
|
||||||
|
|
|
@ -89,16 +89,16 @@ use base_db::FileId;
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
child_by_source::ChildBySource,
|
child_by_source::ChildBySource,
|
||||||
dyn_map::DynMap,
|
dyn_map::DynMap,
|
||||||
expr::{LabelId, PatId},
|
expr::{BindingId, LabelId},
|
||||||
keys::{self, Key},
|
keys::{self, Key},
|
||||||
AdtId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, FieldId, FunctionId,
|
AdtId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, FieldId, FunctionId,
|
||||||
GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId, StructId,
|
GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId, StructId,
|
||||||
TraitId, TypeAliasId, TypeParamId, UnionId, VariantId,
|
TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, VariantId,
|
||||||
};
|
};
|
||||||
use hir_expand::{attrs::AttrId, name::AsName, HirFileId, MacroCallId};
|
use hir_expand::{attrs::AttrId, name::AsName, HirFileId, MacroCallId};
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use stdx::impl_from;
|
use stdx::{impl_from, never};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasName},
|
ast::{self, HasName},
|
||||||
AstNode, SyntaxNode,
|
AstNode, SyntaxNode,
|
||||||
|
@ -159,6 +159,12 @@ impl SourceToDefCtx<'_, '_> {
|
||||||
pub(super) fn trait_to_def(&mut self, src: InFile<ast::Trait>) -> Option<TraitId> {
|
pub(super) fn trait_to_def(&mut self, src: InFile<ast::Trait>) -> Option<TraitId> {
|
||||||
self.to_def(src, keys::TRAIT)
|
self.to_def(src, keys::TRAIT)
|
||||||
}
|
}
|
||||||
|
pub(super) fn trait_alias_to_def(
|
||||||
|
&mut self,
|
||||||
|
src: InFile<ast::TraitAlias>,
|
||||||
|
) -> Option<TraitAliasId> {
|
||||||
|
self.to_def(src, keys::TRAIT_ALIAS)
|
||||||
|
}
|
||||||
pub(super) fn impl_to_def(&mut self, src: InFile<ast::Impl>) -> Option<ImplId> {
|
pub(super) fn impl_to_def(&mut self, src: InFile<ast::Impl>) -> Option<ImplId> {
|
||||||
self.to_def(src, keys::IMPL)
|
self.to_def(src, keys::IMPL)
|
||||||
}
|
}
|
||||||
|
@ -210,14 +216,14 @@ impl SourceToDefCtx<'_, '_> {
|
||||||
pub(super) fn bind_pat_to_def(
|
pub(super) fn bind_pat_to_def(
|
||||||
&mut self,
|
&mut self,
|
||||||
src: InFile<ast::IdentPat>,
|
src: InFile<ast::IdentPat>,
|
||||||
) -> Option<(DefWithBodyId, PatId)> {
|
) -> Option<(DefWithBodyId, BindingId)> {
|
||||||
let container = self.find_pat_or_label_container(src.syntax())?;
|
let container = self.find_pat_or_label_container(src.syntax())?;
|
||||||
let (body, source_map) = self.db.body_with_source_map(container);
|
let (body, source_map) = self.db.body_with_source_map(container);
|
||||||
let src = src.map(ast::Pat::from);
|
let src = src.map(ast::Pat::from);
|
||||||
let pat_id = source_map.node_pat(src.as_ref())?;
|
let pat_id = source_map.node_pat(src.as_ref())?;
|
||||||
// the pattern could resolve to a constant, verify that that is not the case
|
// the pattern could resolve to a constant, verify that that is not the case
|
||||||
if let crate::Pat::Bind { .. } = body[pat_id] {
|
if let crate::Pat::Bind { id, .. } = body[pat_id] {
|
||||||
Some((container, pat_id))
|
Some((container, id))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -225,11 +231,16 @@ impl SourceToDefCtx<'_, '_> {
|
||||||
pub(super) fn self_param_to_def(
|
pub(super) fn self_param_to_def(
|
||||||
&mut self,
|
&mut self,
|
||||||
src: InFile<ast::SelfParam>,
|
src: InFile<ast::SelfParam>,
|
||||||
) -> Option<(DefWithBodyId, PatId)> {
|
) -> Option<(DefWithBodyId, BindingId)> {
|
||||||
let container = self.find_pat_or_label_container(src.syntax())?;
|
let container = self.find_pat_or_label_container(src.syntax())?;
|
||||||
let (_body, source_map) = self.db.body_with_source_map(container);
|
let (body, source_map) = self.db.body_with_source_map(container);
|
||||||
let pat_id = source_map.node_self_param(src.as_ref())?;
|
let pat_id = source_map.node_self_param(src.as_ref())?;
|
||||||
Some((container, pat_id))
|
if let crate::Pat::Bind { id, .. } = body[pat_id] {
|
||||||
|
Some((container, id))
|
||||||
|
} else {
|
||||||
|
never!();
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
pub(super) fn label_to_def(
|
pub(super) fn label_to_def(
|
||||||
&mut self,
|
&mut self,
|
||||||
|
@ -353,6 +364,9 @@ impl SourceToDefCtx<'_, '_> {
|
||||||
match item {
|
match item {
|
||||||
ast::Item::Module(it) => self.module_to_def(container.with_value(it))?.into(),
|
ast::Item::Module(it) => self.module_to_def(container.with_value(it))?.into(),
|
||||||
ast::Item::Trait(it) => self.trait_to_def(container.with_value(it))?.into(),
|
ast::Item::Trait(it) => self.trait_to_def(container.with_value(it))?.into(),
|
||||||
|
ast::Item::TraitAlias(it) => {
|
||||||
|
self.trait_alias_to_def(container.with_value(it))?.into()
|
||||||
|
}
|
||||||
ast::Item::Impl(it) => self.impl_to_def(container.with_value(it))?.into(),
|
ast::Item::Impl(it) => self.impl_to_def(container.with_value(it))?.into(),
|
||||||
ast::Item::Enum(it) => self.enum_to_def(container.with_value(it))?.into(),
|
ast::Item::Enum(it) => self.enum_to_def(container.with_value(it))?.into(),
|
||||||
ast::Item::TypeAlias(it) => {
|
ast::Item::TypeAlias(it) => {
|
||||||
|
@ -400,6 +414,9 @@ impl SourceToDefCtx<'_, '_> {
|
||||||
ast::Item::Struct(it) => self.struct_to_def(InFile::new(file_id, it))?.into(),
|
ast::Item::Struct(it) => self.struct_to_def(InFile::new(file_id, it))?.into(),
|
||||||
ast::Item::Enum(it) => self.enum_to_def(InFile::new(file_id, it))?.into(),
|
ast::Item::Enum(it) => self.enum_to_def(InFile::new(file_id, it))?.into(),
|
||||||
ast::Item::Trait(it) => self.trait_to_def(InFile::new(file_id, it))?.into(),
|
ast::Item::Trait(it) => self.trait_to_def(InFile::new(file_id, it))?.into(),
|
||||||
|
ast::Item::TraitAlias(it) => {
|
||||||
|
self.trait_alias_to_def(InFile::new(file_id, it))?.into()
|
||||||
|
}
|
||||||
ast::Item::TypeAlias(it) => {
|
ast::Item::TypeAlias(it) => {
|
||||||
self.type_alias_to_def(InFile::new(file_id, it))?.into()
|
self.type_alias_to_def(InFile::new(file_id, it))?.into()
|
||||||
}
|
}
|
||||||
|
@ -435,6 +452,7 @@ pub(crate) enum ChildContainer {
|
||||||
DefWithBodyId(DefWithBodyId),
|
DefWithBodyId(DefWithBodyId),
|
||||||
ModuleId(ModuleId),
|
ModuleId(ModuleId),
|
||||||
TraitId(TraitId),
|
TraitId(TraitId),
|
||||||
|
TraitAliasId(TraitAliasId),
|
||||||
ImplId(ImplId),
|
ImplId(ImplId),
|
||||||
EnumId(EnumId),
|
EnumId(EnumId),
|
||||||
VariantId(VariantId),
|
VariantId(VariantId),
|
||||||
|
@ -447,6 +465,7 @@ impl_from! {
|
||||||
DefWithBodyId,
|
DefWithBodyId,
|
||||||
ModuleId,
|
ModuleId,
|
||||||
TraitId,
|
TraitId,
|
||||||
|
TraitAliasId,
|
||||||
ImplId,
|
ImplId,
|
||||||
EnumId,
|
EnumId,
|
||||||
VariantId,
|
VariantId,
|
||||||
|
@ -462,6 +481,7 @@ impl ChildContainer {
|
||||||
ChildContainer::DefWithBodyId(it) => it.child_by_source(db, file_id),
|
ChildContainer::DefWithBodyId(it) => it.child_by_source(db, file_id),
|
||||||
ChildContainer::ModuleId(it) => it.child_by_source(db, file_id),
|
ChildContainer::ModuleId(it) => it.child_by_source(db, file_id),
|
||||||
ChildContainer::TraitId(it) => it.child_by_source(db, file_id),
|
ChildContainer::TraitId(it) => it.child_by_source(db, file_id),
|
||||||
|
ChildContainer::TraitAliasId(_) => DynMap::default(),
|
||||||
ChildContainer::ImplId(it) => it.child_by_source(db, file_id),
|
ChildContainer::ImplId(it) => it.child_by_source(db, file_id),
|
||||||
ChildContainer::EnumId(it) => it.child_by_source(db, file_id),
|
ChildContainer::EnumId(it) => it.child_by_source(db, file_id),
|
||||||
ChildContainer::VariantId(it) => it.child_by_source(db, file_id),
|
ChildContainer::VariantId(it) => it.child_by_source(db, file_id),
|
||||||
|
|
|
@ -10,6 +10,7 @@ use std::{
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use either::Either;
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
body::{
|
body::{
|
||||||
self,
|
self,
|
||||||
|
@ -51,7 +52,7 @@ use syntax::{
|
||||||
use crate::{
|
use crate::{
|
||||||
db::HirDatabase, semantics::PathResolution, Adt, AssocItem, BindingMode, BuiltinAttr,
|
db::HirDatabase, semantics::PathResolution, Adt, AssocItem, BindingMode, BuiltinAttr,
|
||||||
BuiltinType, Callable, Const, DeriveHelper, Field, Function, Local, Macro, ModuleDef, Static,
|
BuiltinType, Callable, Const, DeriveHelper, Field, Function, Local, Macro, ModuleDef, Static,
|
||||||
Struct, ToolModule, Trait, Type, TypeAlias, Variant,
|
Struct, ToolModule, Trait, TraitAlias, Type, TypeAlias, Variant,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
|
/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
|
||||||
|
@ -266,6 +267,21 @@ impl SourceAnalyzer {
|
||||||
Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs))
|
Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn resolve_method_call_fallback(
|
||||||
|
&self,
|
||||||
|
db: &dyn HirDatabase,
|
||||||
|
call: &ast::MethodCallExpr,
|
||||||
|
) -> Option<Either<FunctionId, FieldId>> {
|
||||||
|
let expr_id = self.expr_id(db, &call.clone().into())?;
|
||||||
|
let inference_result = self.infer.as_ref()?;
|
||||||
|
match inference_result.method_resolution(expr_id) {
|
||||||
|
Some((f_in_trait, substs)) => {
|
||||||
|
Some(Either::Left(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs)))
|
||||||
|
}
|
||||||
|
None => inference_result.field_resolution(expr_id).map(Either::Right),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn resolve_await_to_poll(
|
pub(crate) fn resolve_await_to_poll(
|
||||||
&self,
|
&self,
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
|
@ -406,8 +422,8 @@ impl SourceAnalyzer {
|
||||||
// Shorthand syntax, resolve to the local
|
// Shorthand syntax, resolve to the local
|
||||||
let path = ModPath::from_segments(PathKind::Plain, once(local_name.clone()));
|
let path = ModPath::from_segments(PathKind::Plain, once(local_name.clone()));
|
||||||
match self.resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) {
|
match self.resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) {
|
||||||
Some(ValueNs::LocalBinding(pat_id)) => {
|
Some(ValueNs::LocalBinding(binding_id)) => {
|
||||||
Some(Local { pat_id, parent: self.resolver.body_owner()? })
|
Some(Local { binding_id, parent: self.resolver.body_owner()? })
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
|
@ -791,7 +807,7 @@ impl SourceAnalyzer {
|
||||||
|| Arc::new(hir_ty::TraitEnvironment::empty(krate)),
|
|| Arc::new(hir_ty::TraitEnvironment::empty(krate)),
|
||||||
|d| db.trait_environment(d),
|
|d| db.trait_environment(d),
|
||||||
);
|
);
|
||||||
method_resolution::lookup_impl_method(db, env, func, substs)
|
method_resolution::lookup_impl_method(db, env, func, substs).0
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_impl_const_or_trait_def(
|
fn resolve_impl_const_or_trait_def(
|
||||||
|
@ -809,7 +825,7 @@ impl SourceAnalyzer {
|
||||||
|| Arc::new(hir_ty::TraitEnvironment::empty(krate)),
|
|| Arc::new(hir_ty::TraitEnvironment::empty(krate)),
|
||||||
|d| db.trait_environment(d),
|
|d| db.trait_environment(d),
|
||||||
);
|
);
|
||||||
method_resolution::lookup_impl_const(db, env, const_id, subs)
|
method_resolution::lookup_impl_const(db, env, const_id, subs).0
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lang_trait_fn(
|
fn lang_trait_fn(
|
||||||
|
@ -943,17 +959,17 @@ fn resolve_hir_path_(
|
||||||
res.map(|ty_ns| (ty_ns, path.segments().first()))
|
res.map(|ty_ns| (ty_ns, path.segments().first()))
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
let (ty, remaining) =
|
let (ty, remaining_idx) =
|
||||||
resolver.resolve_path_in_type_ns(db.upcast(), path.mod_path())?;
|
resolver.resolve_path_in_type_ns(db.upcast(), path.mod_path())?;
|
||||||
match remaining {
|
match remaining_idx {
|
||||||
Some(remaining) if remaining > 1 => {
|
Some(remaining_idx) => {
|
||||||
if remaining + 1 == path.segments().len() {
|
if remaining_idx + 1 == path.segments().len() {
|
||||||
Some((ty, path.segments().last()))
|
Some((ty, path.segments().last()))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => Some((ty, path.segments().get(1))),
|
None => Some((ty, None)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}?;
|
}?;
|
||||||
|
@ -978,6 +994,7 @@ fn resolve_hir_path_(
|
||||||
TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
|
TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
|
||||||
TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()),
|
TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()),
|
||||||
TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
|
TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
|
||||||
|
TypeNs::TraitAliasId(it) => PathResolution::Def(TraitAlias::from(it).into()),
|
||||||
};
|
};
|
||||||
match unresolved {
|
match unresolved {
|
||||||
Some(unresolved) => resolver
|
Some(unresolved) => resolver
|
||||||
|
@ -1001,8 +1018,8 @@ fn resolve_hir_path_(
|
||||||
let values = || {
|
let values = || {
|
||||||
resolver.resolve_path_in_value_ns_fully(db.upcast(), path.mod_path()).and_then(|val| {
|
resolver.resolve_path_in_value_ns_fully(db.upcast(), path.mod_path()).and_then(|val| {
|
||||||
let res = match val {
|
let res = match val {
|
||||||
ValueNs::LocalBinding(pat_id) => {
|
ValueNs::LocalBinding(binding_id) => {
|
||||||
let var = Local { parent: body_owner?, pat_id };
|
let var = Local { parent: body_owner?, binding_id };
|
||||||
PathResolution::Local(var)
|
PathResolution::Local(var)
|
||||||
}
|
}
|
||||||
ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
|
ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
|
||||||
|
@ -1065,6 +1082,7 @@ fn resolve_hir_path_qualifier(
|
||||||
TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
|
TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
|
||||||
TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()),
|
TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()),
|
||||||
TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
|
TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
|
||||||
|
TypeNs::TraitAliasId(it) => PathResolution::Def(TraitAlias::from(it).into()),
|
||||||
})
|
})
|
||||||
.or_else(|| {
|
.or_else(|| {
|
||||||
resolver
|
resolver
|
||||||
|
|
|
@ -68,6 +68,7 @@ pub enum FileSymbolKind {
|
||||||
Static,
|
Static,
|
||||||
Struct,
|
Struct,
|
||||||
Trait,
|
Trait,
|
||||||
|
TraitAlias,
|
||||||
TypeAlias,
|
TypeAlias,
|
||||||
Union,
|
Union,
|
||||||
}
|
}
|
||||||
|
@ -153,6 +154,9 @@ impl<'a> SymbolCollector<'a> {
|
||||||
self.push_decl(id, FileSymbolKind::Trait);
|
self.push_decl(id, FileSymbolKind::Trait);
|
||||||
self.collect_from_trait(id);
|
self.collect_from_trait(id);
|
||||||
}
|
}
|
||||||
|
ModuleDefId::TraitAliasId(id) => {
|
||||||
|
self.push_decl(id, FileSymbolKind::TraitAlias);
|
||||||
|
}
|
||||||
ModuleDefId::TypeAliasId(id) => {
|
ModuleDefId::TypeAliasId(id) => {
|
||||||
self.push_decl_assoc(id, FileSymbolKind::TypeAlias);
|
self.push_decl_assoc(id, FileSymbolKind::TypeAlias);
|
||||||
}
|
}
|
||||||
|
|
|
@ -211,10 +211,8 @@ fn main() {
|
||||||
check_assist_not_applicable(
|
check_assist_not_applicable(
|
||||||
add_explicit_type,
|
add_explicit_type,
|
||||||
r#"
|
r#"
|
||||||
//- minicore: option
|
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let $0l = [0.0; Some(2).unwrap()];
|
let $0l = [0.0; unresolved_function(5)];
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
|
|
|
@ -157,19 +157,12 @@ fn is_ref_and_impls_iter_method(
|
||||||
let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
|
let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
|
||||||
|
|
||||||
let has_wanted_method = ty
|
let has_wanted_method = ty
|
||||||
.iterate_method_candidates(
|
.iterate_method_candidates(sema.db, &scope, None, Some(&wanted_method), |func| {
|
||||||
sema.db,
|
if func.ret_type(sema.db).impls_trait(sema.db, iter_trait, &[]) {
|
||||||
&scope,
|
return Some(());
|
||||||
&scope.visible_traits().0,
|
}
|
||||||
None,
|
None
|
||||||
Some(&wanted_method),
|
})
|
||||||
|func| {
|
|
||||||
if func.ret_type(sema.db).impls_trait(sema.db, iter_trait, &[]) {
|
|
||||||
return Some(());
|
|
||||||
}
|
|
||||||
None
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.is_some();
|
.is_some();
|
||||||
if !has_wanted_method {
|
if !has_wanted_method {
|
||||||
return None;
|
return None;
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use ide_db::defs::{Definition, NameRefClass};
|
use ide_db::defs::{Definition, NameRefClass};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasName},
|
ast::{self, HasName, Name},
|
||||||
ted, AstNode, SyntaxNode,
|
ted, AstNode, SyntaxNode,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -48,7 +48,7 @@ pub(crate) fn convert_match_to_let_else(acc: &mut Assists, ctx: &AssistContext<'
|
||||||
other => format!("{{ {other} }}"),
|
other => format!("{{ {other} }}"),
|
||||||
};
|
};
|
||||||
let extracting_arm_pat = extracting_arm.pat()?;
|
let extracting_arm_pat = extracting_arm.pat()?;
|
||||||
let extracted_variable = find_extracted_variable(ctx, &extracting_arm)?;
|
let extracted_variable_positions = find_extracted_variable(ctx, &extracting_arm)?;
|
||||||
|
|
||||||
acc.add(
|
acc.add(
|
||||||
AssistId("convert_match_to_let_else", AssistKind::RefactorRewrite),
|
AssistId("convert_match_to_let_else", AssistKind::RefactorRewrite),
|
||||||
|
@ -56,7 +56,7 @@ pub(crate) fn convert_match_to_let_else(acc: &mut Assists, ctx: &AssistContext<'
|
||||||
let_stmt.syntax().text_range(),
|
let_stmt.syntax().text_range(),
|
||||||
|builder| {
|
|builder| {
|
||||||
let extracting_arm_pat =
|
let extracting_arm_pat =
|
||||||
rename_variable(&extracting_arm_pat, extracted_variable, binding);
|
rename_variable(&extracting_arm_pat, &extracted_variable_positions, binding);
|
||||||
builder.replace(
|
builder.replace(
|
||||||
let_stmt.syntax().text_range(),
|
let_stmt.syntax().text_range(),
|
||||||
format!("let {extracting_arm_pat} = {initializer_expr} else {diverging_arm_expr};"),
|
format!("let {extracting_arm_pat} = {initializer_expr} else {diverging_arm_expr};"),
|
||||||
|
@ -95,14 +95,15 @@ fn find_arms(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Given an extracting arm, find the extracted variable.
|
// Given an extracting arm, find the extracted variable.
|
||||||
fn find_extracted_variable(ctx: &AssistContext<'_>, arm: &ast::MatchArm) -> Option<ast::Name> {
|
fn find_extracted_variable(ctx: &AssistContext<'_>, arm: &ast::MatchArm) -> Option<Vec<Name>> {
|
||||||
match arm.expr()? {
|
match arm.expr()? {
|
||||||
ast::Expr::PathExpr(path) => {
|
ast::Expr::PathExpr(path) => {
|
||||||
let name_ref = path.syntax().descendants().find_map(ast::NameRef::cast)?;
|
let name_ref = path.syntax().descendants().find_map(ast::NameRef::cast)?;
|
||||||
match NameRefClass::classify(&ctx.sema, &name_ref)? {
|
match NameRefClass::classify(&ctx.sema, &name_ref)? {
|
||||||
NameRefClass::Definition(Definition::Local(local)) => {
|
NameRefClass::Definition(Definition::Local(local)) => {
|
||||||
let source = local.source(ctx.db()).value.left()?;
|
let source =
|
||||||
Some(source.name()?)
|
local.sources(ctx.db()).into_iter().map(|x| x.into_ident_pat()?.name());
|
||||||
|
source.collect()
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
|
@ -115,27 +116,34 @@ fn find_extracted_variable(ctx: &AssistContext<'_>, arm: &ast::MatchArm) -> Opti
|
||||||
}
|
}
|
||||||
|
|
||||||
// Rename `extracted` with `binding` in `pat`.
|
// Rename `extracted` with `binding` in `pat`.
|
||||||
fn rename_variable(pat: &ast::Pat, extracted: ast::Name, binding: ast::Pat) -> SyntaxNode {
|
fn rename_variable(pat: &ast::Pat, extracted: &[Name], binding: ast::Pat) -> SyntaxNode {
|
||||||
let syntax = pat.syntax().clone_for_update();
|
let syntax = pat.syntax().clone_for_update();
|
||||||
let extracted_syntax = syntax.covering_element(extracted.syntax().text_range());
|
let extracted = extracted
|
||||||
|
.iter()
|
||||||
|
.map(|e| syntax.covering_element(e.syntax().text_range()))
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
for extracted_syntax in extracted {
|
||||||
|
// If `extracted` variable is a record field, we should rename it to `binding`,
|
||||||
|
// otherwise we just need to replace `extracted` with `binding`.
|
||||||
|
|
||||||
// If `extracted` variable is a record field, we should rename it to `binding`,
|
if let Some(record_pat_field) =
|
||||||
// otherwise we just need to replace `extracted` with `binding`.
|
extracted_syntax.ancestors().find_map(ast::RecordPatField::cast)
|
||||||
|
{
|
||||||
if let Some(record_pat_field) = extracted_syntax.ancestors().find_map(ast::RecordPatField::cast)
|
if let Some(name_ref) = record_pat_field.field_name() {
|
||||||
{
|
ted::replace(
|
||||||
if let Some(name_ref) = record_pat_field.field_name() {
|
record_pat_field.syntax(),
|
||||||
ted::replace(
|
ast::make::record_pat_field(
|
||||||
record_pat_field.syntax(),
|
ast::make::name_ref(&name_ref.text()),
|
||||||
ast::make::record_pat_field(ast::make::name_ref(&name_ref.text()), binding)
|
binding.clone(),
|
||||||
|
)
|
||||||
.syntax()
|
.syntax()
|
||||||
.clone_for_update(),
|
.clone_for_update(),
|
||||||
);
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
ted::replace(extracted_syntax, binding.clone().syntax().clone_for_update());
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
ted::replace(extracted_syntax, binding.syntax().clone_for_update());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
syntax
|
syntax
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -162,6 +170,39 @@ fn foo(opt: Option<()>) {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn or_pattern_multiple_binding() {
|
||||||
|
check_assist(
|
||||||
|
convert_match_to_let_else,
|
||||||
|
r#"
|
||||||
|
//- minicore: option
|
||||||
|
enum Foo {
|
||||||
|
A(u32),
|
||||||
|
B(u32),
|
||||||
|
C(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
fn foo(opt: Option<Foo>) -> Result<u32, ()> {
|
||||||
|
let va$0lue = match opt {
|
||||||
|
Some(Foo::A(it) | Foo::B(it)) => it,
|
||||||
|
_ => return Err(()),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
enum Foo {
|
||||||
|
A(u32),
|
||||||
|
B(u32),
|
||||||
|
C(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
fn foo(opt: Option<Foo>) -> Result<u32, ()> {
|
||||||
|
let Some(Foo::A(value) | Foo::B(value)) = opt else { return Err(()) };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn should_not_be_applicable_if_extracting_arm_is_not_an_identity_expr() {
|
fn should_not_be_applicable_if_extracting_arm_is_not_an_identity_expr() {
|
||||||
cov_mark::check_count!(extracting_arm_is_not_an_identity_expr, 2);
|
cov_mark::check_count!(extracting_arm_is_not_an_identity_expr, 2);
|
||||||
|
|
|
@ -3,7 +3,8 @@ use std::iter;
|
||||||
use ast::make;
|
use ast::make;
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir::{
|
use hir::{
|
||||||
HasSource, HirDisplay, InFile, Local, ModuleDef, PathResolution, Semantics, TypeInfo, TypeParam,
|
HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef, PathResolution, Semantics,
|
||||||
|
TypeInfo, TypeParam,
|
||||||
};
|
};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
defs::{Definition, NameRefClass},
|
defs::{Definition, NameRefClass},
|
||||||
|
@ -710,7 +711,7 @@ impl FunctionBody {
|
||||||
) => local_ref,
|
) => local_ref,
|
||||||
_ => return,
|
_ => return,
|
||||||
};
|
};
|
||||||
let InFile { file_id, value } = local_ref.source(sema.db);
|
let InFile { file_id, value } = local_ref.primary_source(sema.db).source;
|
||||||
// locals defined inside macros are not relevant to us
|
// locals defined inside macros are not relevant to us
|
||||||
if !file_id.is_macro() {
|
if !file_id.is_macro() {
|
||||||
match value {
|
match value {
|
||||||
|
@ -972,11 +973,11 @@ impl FunctionBody {
|
||||||
locals: impl Iterator<Item = Local>,
|
locals: impl Iterator<Item = Local>,
|
||||||
) -> Vec<Param> {
|
) -> Vec<Param> {
|
||||||
locals
|
locals
|
||||||
.map(|local| (local, local.source(ctx.db())))
|
.map(|local| (local, local.primary_source(ctx.db())))
|
||||||
.filter(|(_, src)| is_defined_outside_of_body(ctx, self, src))
|
.filter(|(_, src)| is_defined_outside_of_body(ctx, self, src))
|
||||||
.filter_map(|(local, src)| match src.value {
|
.filter_map(|(local, src)| match src.into_ident_pat() {
|
||||||
Either::Left(src) => Some((local, src)),
|
Some(src) => Some((local, src)),
|
||||||
Either::Right(_) => {
|
None => {
|
||||||
stdx::never!(false, "Local::is_self returned false, but source is SelfParam");
|
stdx::never!(false, "Local::is_self returned false, but source is SelfParam");
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -1238,17 +1239,9 @@ fn local_outlives_body(
|
||||||
fn is_defined_outside_of_body(
|
fn is_defined_outside_of_body(
|
||||||
ctx: &AssistContext<'_>,
|
ctx: &AssistContext<'_>,
|
||||||
body: &FunctionBody,
|
body: &FunctionBody,
|
||||||
src: &hir::InFile<Either<ast::IdentPat, ast::SelfParam>>,
|
src: &LocalSource,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
src.file_id.original_file(ctx.db()) == ctx.file_id()
|
src.original_file(ctx.db()) == ctx.file_id() && !body.contains_node(src.syntax())
|
||||||
&& !body.contains_node(either_syntax(&src.value))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn either_syntax(value: &Either<ast::IdentPat, ast::SelfParam>) -> &SyntaxNode {
|
|
||||||
match value {
|
|
||||||
Either::Left(pat) => pat.syntax(),
|
|
||||||
Either::Right(it) => it.syntax(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// find where to put extracted function definition
|
/// find where to put extracted function definition
|
||||||
|
|
|
@ -287,7 +287,7 @@ fn foo() {
|
||||||
extract_variable,
|
extract_variable,
|
||||||
r"
|
r"
|
||||||
fn foo() {
|
fn foo() {
|
||||||
$0{ let x = 0; x }$0
|
$0{ let x = 0; x }$0;
|
||||||
something_else();
|
something_else();
|
||||||
}",
|
}",
|
||||||
r"
|
r"
|
||||||
|
|
|
@ -192,6 +192,10 @@ fn target_data_for_def(
|
||||||
target_name = Some(t.name(db));
|
target_name = Some(t.name(db));
|
||||||
offset_target_and_file_id(db, t)?
|
offset_target_and_file_id(db, t)?
|
||||||
}
|
}
|
||||||
|
hir::ModuleDef::TraitAlias(t) => {
|
||||||
|
target_name = Some(t.name(db));
|
||||||
|
offset_target_and_file_id(db, t)?
|
||||||
|
}
|
||||||
hir::ModuleDef::TypeAlias(t) => {
|
hir::ModuleDef::TypeAlias(t) => {
|
||||||
target_name = Some(t.name(db));
|
target_name = Some(t.name(db));
|
||||||
offset_target_and_file_id(db, t)?
|
offset_target_and_file_id(db, t)?
|
||||||
|
|
|
@ -5,6 +5,7 @@ use ide_db::{
|
||||||
base_db::FileId,
|
base_db::FileId,
|
||||||
defs::{Definition, NameRefClass},
|
defs::{Definition, NameRefClass},
|
||||||
famous_defs::FamousDefs,
|
famous_defs::FamousDefs,
|
||||||
|
helpers::is_editable_crate,
|
||||||
path_transform::PathTransform,
|
path_transform::PathTransform,
|
||||||
FxHashMap, FxHashSet, RootDatabase, SnippetCap,
|
FxHashMap, FxHashSet, RootDatabase, SnippetCap,
|
||||||
};
|
};
|
||||||
|
@ -65,6 +66,13 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||||
let fn_name = &*name_ref.text();
|
let fn_name = &*name_ref.text();
|
||||||
let TargetInfo { target_module, adt_name, target, file, insert_offset } =
|
let TargetInfo { target_module, adt_name, target, file, insert_offset } =
|
||||||
fn_target_info(ctx, path, &call, fn_name)?;
|
fn_target_info(ctx, path, &call, fn_name)?;
|
||||||
|
|
||||||
|
if let Some(m) = target_module {
|
||||||
|
if !is_editable_crate(m.krate(), ctx.db()) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let function_builder = FunctionBuilder::from_call(ctx, &call, fn_name, target_module, target)?;
|
let function_builder = FunctionBuilder::from_call(ctx, &call, fn_name, target_module, target)?;
|
||||||
let text_range = call.syntax().text_range();
|
let text_range = call.syntax().text_range();
|
||||||
let label = format!("Generate {} function", function_builder.fn_name);
|
let label = format!("Generate {} function", function_builder.fn_name);
|
||||||
|
@ -141,12 +149,11 @@ fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||||
let receiver_ty = ctx.sema.type_of_expr(&call.receiver()?)?.original().strip_references();
|
let receiver_ty = ctx.sema.type_of_expr(&call.receiver()?)?.original().strip_references();
|
||||||
let adt = receiver_ty.as_adt()?;
|
let adt = receiver_ty.as_adt()?;
|
||||||
|
|
||||||
let current_module = ctx.sema.scope(call.syntax())?.module();
|
|
||||||
let target_module = adt.module(ctx.sema.db);
|
let target_module = adt.module(ctx.sema.db);
|
||||||
|
if !is_editable_crate(target_module.krate(), ctx.db()) {
|
||||||
if current_module.krate() != target_module.krate() {
|
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let (impl_, file) = get_adt_source(ctx, &adt, fn_name.text().as_str())?;
|
let (impl_, file) = get_adt_source(ctx, &adt, fn_name.text().as_str())?;
|
||||||
let (target, insert_offset) = get_method_target(ctx, &impl_, &adt)?;
|
let (target, insert_offset) = get_method_target(ctx, &impl_, &adt)?;
|
||||||
|
|
||||||
|
@ -253,7 +260,7 @@ struct FunctionBuilder {
|
||||||
params: ast::ParamList,
|
params: ast::ParamList,
|
||||||
ret_type: Option<ast::RetType>,
|
ret_type: Option<ast::RetType>,
|
||||||
should_focus_return_type: bool,
|
should_focus_return_type: bool,
|
||||||
needs_pub: bool,
|
visibility: Visibility,
|
||||||
is_async: bool,
|
is_async: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -264,12 +271,14 @@ impl FunctionBuilder {
|
||||||
ctx: &AssistContext<'_>,
|
ctx: &AssistContext<'_>,
|
||||||
call: &ast::CallExpr,
|
call: &ast::CallExpr,
|
||||||
fn_name: &str,
|
fn_name: &str,
|
||||||
target_module: Option<hir::Module>,
|
target_module: Option<Module>,
|
||||||
target: GeneratedFunctionTarget,
|
target: GeneratedFunctionTarget,
|
||||||
) -> Option<Self> {
|
) -> Option<Self> {
|
||||||
let needs_pub = target_module.is_some();
|
|
||||||
let target_module =
|
let target_module =
|
||||||
target_module.or_else(|| ctx.sema.scope(target.syntax()).map(|it| it.module()))?;
|
target_module.or_else(|| ctx.sema.scope(target.syntax()).map(|it| it.module()))?;
|
||||||
|
|
||||||
|
let current_module = ctx.sema.scope(call.syntax())?.module();
|
||||||
|
let visibility = calculate_necessary_visibility(current_module, target_module, ctx);
|
||||||
let fn_name = make::name(fn_name);
|
let fn_name = make::name(fn_name);
|
||||||
let mut necessary_generic_params = FxHashSet::default();
|
let mut necessary_generic_params = FxHashSet::default();
|
||||||
let params = fn_args(
|
let params = fn_args(
|
||||||
|
@ -300,7 +309,7 @@ impl FunctionBuilder {
|
||||||
params,
|
params,
|
||||||
ret_type,
|
ret_type,
|
||||||
should_focus_return_type,
|
should_focus_return_type,
|
||||||
needs_pub,
|
visibility,
|
||||||
is_async,
|
is_async,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -313,8 +322,9 @@ impl FunctionBuilder {
|
||||||
target_module: Module,
|
target_module: Module,
|
||||||
target: GeneratedFunctionTarget,
|
target: GeneratedFunctionTarget,
|
||||||
) -> Option<Self> {
|
) -> Option<Self> {
|
||||||
let needs_pub =
|
let current_module = ctx.sema.scope(call.syntax())?.module();
|
||||||
!module_is_descendant(&ctx.sema.scope(call.syntax())?.module(), &target_module, ctx);
|
let visibility = calculate_necessary_visibility(current_module, target_module, ctx);
|
||||||
|
|
||||||
let fn_name = make::name(&name.text());
|
let fn_name = make::name(&name.text());
|
||||||
let mut necessary_generic_params = FxHashSet::default();
|
let mut necessary_generic_params = FxHashSet::default();
|
||||||
necessary_generic_params.extend(receiver_ty.generic_params(ctx.db()));
|
necessary_generic_params.extend(receiver_ty.generic_params(ctx.db()));
|
||||||
|
@ -346,7 +356,7 @@ impl FunctionBuilder {
|
||||||
params,
|
params,
|
||||||
ret_type,
|
ret_type,
|
||||||
should_focus_return_type,
|
should_focus_return_type,
|
||||||
needs_pub,
|
visibility,
|
||||||
is_async,
|
is_async,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -354,7 +364,11 @@ impl FunctionBuilder {
|
||||||
fn render(self, is_method: bool) -> FunctionTemplate {
|
fn render(self, is_method: bool) -> FunctionTemplate {
|
||||||
let placeholder_expr = make::ext::expr_todo();
|
let placeholder_expr = make::ext::expr_todo();
|
||||||
let fn_body = make::block_expr(vec![], Some(placeholder_expr));
|
let fn_body = make::block_expr(vec![], Some(placeholder_expr));
|
||||||
let visibility = if self.needs_pub { Some(make::visibility_pub_crate()) } else { None };
|
let visibility = match self.visibility {
|
||||||
|
Visibility::None => None,
|
||||||
|
Visibility::Crate => Some(make::visibility_pub_crate()),
|
||||||
|
Visibility::Pub => Some(make::visibility_pub()),
|
||||||
|
};
|
||||||
let mut fn_def = make::fn_(
|
let mut fn_def = make::fn_(
|
||||||
visibility,
|
visibility,
|
||||||
self.fn_name,
|
self.fn_name,
|
||||||
|
@ -527,7 +541,7 @@ impl GeneratedFunctionTarget {
|
||||||
/// Computes parameter list for the generated function.
|
/// Computes parameter list for the generated function.
|
||||||
fn fn_args(
|
fn fn_args(
|
||||||
ctx: &AssistContext<'_>,
|
ctx: &AssistContext<'_>,
|
||||||
target_module: hir::Module,
|
target_module: Module,
|
||||||
call: ast::CallableExpr,
|
call: ast::CallableExpr,
|
||||||
necessary_generic_params: &mut FxHashSet<hir::GenericParam>,
|
necessary_generic_params: &mut FxHashSet<hir::GenericParam>,
|
||||||
) -> Option<ast::ParamList> {
|
) -> Option<ast::ParamList> {
|
||||||
|
@ -957,13 +971,13 @@ fn fn_arg_name(sema: &Semantics<'_, RootDatabase>, arg_expr: &ast::Expr) -> Stri
|
||||||
|
|
||||||
fn fn_arg_type(
|
fn fn_arg_type(
|
||||||
ctx: &AssistContext<'_>,
|
ctx: &AssistContext<'_>,
|
||||||
target_module: hir::Module,
|
target_module: Module,
|
||||||
fn_arg: &ast::Expr,
|
fn_arg: &ast::Expr,
|
||||||
generic_params: &mut FxHashSet<hir::GenericParam>,
|
generic_params: &mut FxHashSet<hir::GenericParam>,
|
||||||
) -> String {
|
) -> String {
|
||||||
fn maybe_displayed_type(
|
fn maybe_displayed_type(
|
||||||
ctx: &AssistContext<'_>,
|
ctx: &AssistContext<'_>,
|
||||||
target_module: hir::Module,
|
target_module: Module,
|
||||||
fn_arg: &ast::Expr,
|
fn_arg: &ast::Expr,
|
||||||
generic_params: &mut FxHashSet<hir::GenericParam>,
|
generic_params: &mut FxHashSet<hir::GenericParam>,
|
||||||
) -> Option<String> {
|
) -> Option<String> {
|
||||||
|
@ -1048,16 +1062,29 @@ fn next_space_for_fn_in_impl(impl_: &ast::Impl) -> Option<GeneratedFunctionTarge
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn module_is_descendant(module: &hir::Module, ans: &hir::Module, ctx: &AssistContext<'_>) -> bool {
|
#[derive(Clone, Copy)]
|
||||||
if module == ans {
|
enum Visibility {
|
||||||
return true;
|
None,
|
||||||
|
Crate,
|
||||||
|
Pub,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn calculate_necessary_visibility(
|
||||||
|
current_module: Module,
|
||||||
|
target_module: Module,
|
||||||
|
ctx: &AssistContext<'_>,
|
||||||
|
) -> Visibility {
|
||||||
|
let db = ctx.db();
|
||||||
|
let current_module = current_module.nearest_non_block_module(db);
|
||||||
|
let target_module = target_module.nearest_non_block_module(db);
|
||||||
|
|
||||||
|
if target_module.krate() != current_module.krate() {
|
||||||
|
Visibility::Pub
|
||||||
|
} else if current_module.path_to_root(db).contains(&target_module) {
|
||||||
|
Visibility::None
|
||||||
|
} else {
|
||||||
|
Visibility::Crate
|
||||||
}
|
}
|
||||||
for c in ans.children(ctx.sema.db) {
|
|
||||||
if module_is_descendant(module, &c, ctx) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
false
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// This is never intended to be used as a generic graph strucuture. If there's ever another need of
|
// This is never intended to be used as a generic graph strucuture. If there's ever another need of
|
||||||
|
@ -2656,4 +2683,79 @@ fn main() {
|
||||||
",
|
",
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn applicable_in_different_local_crate() {
|
||||||
|
check_assist(
|
||||||
|
generate_function,
|
||||||
|
r"
|
||||||
|
//- /lib.rs crate:lib new_source_root:local
|
||||||
|
fn dummy() {}
|
||||||
|
//- /main.rs crate:main deps:lib new_source_root:local
|
||||||
|
fn main() {
|
||||||
|
lib::foo$0();
|
||||||
|
}
|
||||||
|
",
|
||||||
|
r"
|
||||||
|
fn dummy() {}
|
||||||
|
|
||||||
|
pub fn foo() ${0:-> _} {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn applicable_in_different_local_crate_method() {
|
||||||
|
check_assist(
|
||||||
|
generate_function,
|
||||||
|
r"
|
||||||
|
//- /lib.rs crate:lib new_source_root:local
|
||||||
|
pub struct S;
|
||||||
|
//- /main.rs crate:main deps:lib new_source_root:local
|
||||||
|
fn main() {
|
||||||
|
lib::S.foo$0();
|
||||||
|
}
|
||||||
|
",
|
||||||
|
r"
|
||||||
|
pub struct S;
|
||||||
|
impl S {
|
||||||
|
pub fn foo(&self) ${0:-> _} {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn not_applicable_in_different_library_crate() {
|
||||||
|
check_assist_not_applicable(
|
||||||
|
generate_function,
|
||||||
|
r"
|
||||||
|
//- /lib.rs crate:lib new_source_root:library
|
||||||
|
fn dummy() {}
|
||||||
|
//- /main.rs crate:main deps:lib new_source_root:local
|
||||||
|
fn main() {
|
||||||
|
lib::foo$0();
|
||||||
|
}
|
||||||
|
",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn not_applicable_in_different_library_crate_method() {
|
||||||
|
check_assist_not_applicable(
|
||||||
|
generate_function,
|
||||||
|
r"
|
||||||
|
//- /lib.rs crate:lib new_source_root:library
|
||||||
|
pub struct S;
|
||||||
|
//- /main.rs crate:main deps:lib new_source_root:local
|
||||||
|
fn main() {
|
||||||
|
lib::S.foo$0();
|
||||||
|
}
|
||||||
|
",
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -95,14 +95,7 @@ fn get_impl_method(
|
||||||
|
|
||||||
let scope = ctx.sema.scope(impl_.syntax())?;
|
let scope = ctx.sema.scope(impl_.syntax())?;
|
||||||
let ty = impl_def.self_ty(db);
|
let ty = impl_def.self_ty(db);
|
||||||
ty.iterate_method_candidates(
|
ty.iterate_method_candidates(db, &scope, None, Some(fn_name), |func| Some(func))
|
||||||
db,
|
|
||||||
&scope,
|
|
||||||
&scope.visible_traits().0,
|
|
||||||
None,
|
|
||||||
Some(fn_name),
|
|
||||||
|func| Some(func),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
imports::import_assets::item_for_path_search, use_trivial_contructor::use_trivial_constructor,
|
imports::import_assets::item_for_path_search, use_trivial_constructor::use_trivial_constructor,
|
||||||
};
|
};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use stdx::format_to;
|
use stdx::format_to;
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
use either::Either;
|
|
||||||
use hir::{PathResolution, Semantics};
|
use hir::{PathResolution, Semantics};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::FileId,
|
base_db::FileId,
|
||||||
|
@ -205,12 +204,14 @@ fn inline_usage(
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: Handle multiple local definitions
|
let sources = local.sources(sema.db);
|
||||||
let bind_pat = match local.source(sema.db).value {
|
let [source] = sources.as_slice() else {
|
||||||
Either::Left(ident) => ident,
|
// Not applicable with locals with multiple definitions (i.e. or patterns)
|
||||||
_ => return None,
|
return None;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let bind_pat = source.as_ident_pat()?;
|
||||||
|
|
||||||
let let_stmt = ast::LetStmt::cast(bind_pat.syntax().parent()?)?;
|
let let_stmt = ast::LetStmt::cast(bind_pat.syntax().parent()?)?;
|
||||||
|
|
||||||
let UsageSearchResult { mut references } = Definition::Local(local).usages(sema).all();
|
let UsageSearchResult { mut references } = Definition::Local(local).usages(sema).all();
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, AstNode, AstToken},
|
ast::{self, make, AstNode, AstToken},
|
||||||
match_ast, NodeOrToken, SyntaxElement, TextRange, TextSize, T,
|
match_ast, ted, NodeOrToken, SyntaxElement, TextRange, TextSize, T,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||||
|
@ -12,24 +12,28 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||||
//
|
//
|
||||||
// ```
|
// ```
|
||||||
// fn main() {
|
// fn main() {
|
||||||
// $0dbg!(92);
|
// let x = $0dbg!(42 * dbg!(4 + 2));$0
|
||||||
// }
|
// }
|
||||||
// ```
|
// ```
|
||||||
// ->
|
// ->
|
||||||
// ```
|
// ```
|
||||||
// fn main() {
|
// fn main() {
|
||||||
// 92;
|
// let x = 42 * (4 + 2);
|
||||||
// }
|
// }
|
||||||
// ```
|
// ```
|
||||||
pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||||
let macro_calls = if ctx.has_empty_selection() {
|
let macro_calls = if ctx.has_empty_selection() {
|
||||||
vec![ctx.find_node_at_offset::<ast::MacroCall>()?]
|
vec![ctx.find_node_at_offset::<ast::MacroExpr>()?]
|
||||||
} else {
|
} else {
|
||||||
ctx.covering_element()
|
ctx.covering_element()
|
||||||
.as_node()?
|
.as_node()?
|
||||||
.descendants()
|
.descendants()
|
||||||
.filter(|node| ctx.selection_trimmed().contains_range(node.text_range()))
|
.filter(|node| ctx.selection_trimmed().contains_range(node.text_range()))
|
||||||
|
// When the selection exactly covers the macro call to be removed, `covering_element()`
|
||||||
|
// returns `ast::MacroCall` instead of its parent `ast::MacroExpr` that we want. So
|
||||||
|
// first try finding `ast::MacroCall`s and then retrieve their parent.
|
||||||
.filter_map(ast::MacroCall::cast)
|
.filter_map(ast::MacroCall::cast)
|
||||||
|
.filter_map(|it| it.syntax().parent().and_then(ast::MacroExpr::cast))
|
||||||
.collect()
|
.collect()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -44,14 +48,25 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
|
||||||
"Remove dbg!()",
|
"Remove dbg!()",
|
||||||
ctx.selection_trimmed(),
|
ctx.selection_trimmed(),
|
||||||
|builder| {
|
|builder| {
|
||||||
for (range, text) in replacements {
|
for (range, expr) in replacements {
|
||||||
builder.replace(range, text);
|
if let Some(expr) = expr {
|
||||||
|
builder.replace(range, expr.to_string());
|
||||||
|
} else {
|
||||||
|
builder.delete(range);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compute_dbg_replacement(macro_call: ast::MacroCall) -> Option<(TextRange, String)> {
|
/// Returns `None` when either
|
||||||
|
/// - macro call is not `dbg!()`
|
||||||
|
/// - any node inside `dbg!()` could not be parsed as an expression
|
||||||
|
/// - (`macro_expr` has no parent - is that possible?)
|
||||||
|
///
|
||||||
|
/// Returns `Some(_, None)` when the macro call should just be removed.
|
||||||
|
fn compute_dbg_replacement(macro_expr: ast::MacroExpr) -> Option<(TextRange, Option<ast::Expr>)> {
|
||||||
|
let macro_call = macro_expr.macro_call()?;
|
||||||
let tt = macro_call.token_tree()?;
|
let tt = macro_call.token_tree()?;
|
||||||
let r_delim = NodeOrToken::Token(tt.right_delimiter_token()?);
|
let r_delim = NodeOrToken::Token(tt.right_delimiter_token()?);
|
||||||
if macro_call.path()?.segment()?.name_ref()?.text() != "dbg"
|
if macro_call.path()?.segment()?.name_ref()?.text() != "dbg"
|
||||||
|
@ -68,20 +83,19 @@ fn compute_dbg_replacement(macro_call: ast::MacroCall) -> Option<(TextRange, Str
|
||||||
.map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
|
.map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
|
||||||
.collect::<Option<Vec<ast::Expr>>>()?;
|
.collect::<Option<Vec<ast::Expr>>>()?;
|
||||||
|
|
||||||
let macro_expr = ast::MacroExpr::cast(macro_call.syntax().parent()?)?;
|
|
||||||
let parent = macro_expr.syntax().parent()?;
|
let parent = macro_expr.syntax().parent()?;
|
||||||
Some(match &*input_expressions {
|
Some(match &*input_expressions {
|
||||||
// dbg!()
|
// dbg!()
|
||||||
[] => {
|
[] => {
|
||||||
match_ast! {
|
match_ast! {
|
||||||
match parent {
|
match parent {
|
||||||
ast::StmtList(__) => {
|
ast::StmtList(_) => {
|
||||||
let range = macro_expr.syntax().text_range();
|
let range = macro_expr.syntax().text_range();
|
||||||
let range = match whitespace_start(macro_expr.syntax().prev_sibling_or_token()) {
|
let range = match whitespace_start(macro_expr.syntax().prev_sibling_or_token()) {
|
||||||
Some(start) => range.cover_offset(start),
|
Some(start) => range.cover_offset(start),
|
||||||
None => range,
|
None => range,
|
||||||
};
|
};
|
||||||
(range, String::new())
|
(range, None)
|
||||||
},
|
},
|
||||||
ast::ExprStmt(it) => {
|
ast::ExprStmt(it) => {
|
||||||
let range = it.syntax().text_range();
|
let range = it.syntax().text_range();
|
||||||
|
@ -89,19 +103,23 @@ fn compute_dbg_replacement(macro_call: ast::MacroCall) -> Option<(TextRange, Str
|
||||||
Some(start) => range.cover_offset(start),
|
Some(start) => range.cover_offset(start),
|
||||||
None => range,
|
None => range,
|
||||||
};
|
};
|
||||||
(range, String::new())
|
(range, None)
|
||||||
},
|
},
|
||||||
_ => (macro_call.syntax().text_range(), "()".to_owned())
|
_ => (macro_call.syntax().text_range(), Some(make::expr_unit())),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// dbg!(expr0)
|
// dbg!(expr0)
|
||||||
[expr] => {
|
[expr] => {
|
||||||
|
// dbg!(expr, &parent);
|
||||||
let wrap = match ast::Expr::cast(parent) {
|
let wrap = match ast::Expr::cast(parent) {
|
||||||
Some(parent) => match (expr, parent) {
|
Some(parent) => match (expr, parent) {
|
||||||
(ast::Expr::CastExpr(_), ast::Expr::CastExpr(_)) => false,
|
(ast::Expr::CastExpr(_), ast::Expr::CastExpr(_)) => false,
|
||||||
(
|
(
|
||||||
ast::Expr::BoxExpr(_) | ast::Expr::PrefixExpr(_) | ast::Expr::RefExpr(_),
|
ast::Expr::BoxExpr(_)
|
||||||
|
| ast::Expr::PrefixExpr(_)
|
||||||
|
| ast::Expr::RefExpr(_)
|
||||||
|
| ast::Expr::MacroExpr(_),
|
||||||
ast::Expr::AwaitExpr(_)
|
ast::Expr::AwaitExpr(_)
|
||||||
| ast::Expr::CallExpr(_)
|
| ast::Expr::CallExpr(_)
|
||||||
| ast::Expr::CastExpr(_)
|
| ast::Expr::CastExpr(_)
|
||||||
|
@ -112,7 +130,10 @@ fn compute_dbg_replacement(macro_call: ast::MacroCall) -> Option<(TextRange, Str
|
||||||
| ast::Expr::TryExpr(_),
|
| ast::Expr::TryExpr(_),
|
||||||
) => true,
|
) => true,
|
||||||
(
|
(
|
||||||
ast::Expr::BinExpr(_) | ast::Expr::CastExpr(_) | ast::Expr::RangeExpr(_),
|
ast::Expr::BinExpr(_)
|
||||||
|
| ast::Expr::CastExpr(_)
|
||||||
|
| ast::Expr::RangeExpr(_)
|
||||||
|
| ast::Expr::MacroExpr(_),
|
||||||
ast::Expr::AwaitExpr(_)
|
ast::Expr::AwaitExpr(_)
|
||||||
| ast::Expr::BinExpr(_)
|
| ast::Expr::BinExpr(_)
|
||||||
| ast::Expr::CallExpr(_)
|
| ast::Expr::CallExpr(_)
|
||||||
|
@ -129,16 +150,61 @@ fn compute_dbg_replacement(macro_call: ast::MacroCall) -> Option<(TextRange, Str
|
||||||
},
|
},
|
||||||
None => false,
|
None => false,
|
||||||
};
|
};
|
||||||
(
|
let expr = replace_nested_dbgs(expr.clone());
|
||||||
macro_call.syntax().text_range(),
|
let expr = if wrap { make::expr_paren(expr) } else { expr.clone_subtree() };
|
||||||
if wrap { format!("({expr})") } else { expr.to_string() },
|
(macro_call.syntax().text_range(), Some(expr))
|
||||||
)
|
|
||||||
}
|
}
|
||||||
// dbg!(expr0, expr1, ...)
|
// dbg!(expr0, expr1, ...)
|
||||||
exprs => (macro_call.syntax().text_range(), format!("({})", exprs.iter().format(", "))),
|
exprs => {
|
||||||
|
let exprs = exprs.iter().cloned().map(replace_nested_dbgs);
|
||||||
|
let expr = make::expr_tuple(exprs);
|
||||||
|
(macro_call.syntax().text_range(), Some(expr))
|
||||||
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn replace_nested_dbgs(expanded: ast::Expr) -> ast::Expr {
|
||||||
|
if let ast::Expr::MacroExpr(mac) = &expanded {
|
||||||
|
// Special-case when `expanded` itself is `dbg!()` since we cannot replace the whole tree
|
||||||
|
// with `ted`. It should be fairly rare as it means the user wrote `dbg!(dbg!(..))` but you
|
||||||
|
// never know how code ends up being!
|
||||||
|
let replaced = if let Some((_, expr_opt)) = compute_dbg_replacement(mac.clone()) {
|
||||||
|
match expr_opt {
|
||||||
|
Some(expr) => expr,
|
||||||
|
None => {
|
||||||
|
stdx::never!("dbg! inside dbg! should not be just removed");
|
||||||
|
expanded
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
expanded
|
||||||
|
};
|
||||||
|
|
||||||
|
return replaced;
|
||||||
|
}
|
||||||
|
|
||||||
|
let expanded = expanded.clone_for_update();
|
||||||
|
|
||||||
|
// We need to collect to avoid mutation during traversal.
|
||||||
|
let macro_exprs: Vec<_> =
|
||||||
|
expanded.syntax().descendants().filter_map(ast::MacroExpr::cast).collect();
|
||||||
|
|
||||||
|
for mac in macro_exprs {
|
||||||
|
let expr_opt = match compute_dbg_replacement(mac.clone()) {
|
||||||
|
Some((_, expr)) => expr,
|
||||||
|
None => continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(expr) = expr_opt {
|
||||||
|
ted::replace(mac.syntax(), expr.syntax().clone_for_update());
|
||||||
|
} else {
|
||||||
|
ted::remove(mac.syntax());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expanded
|
||||||
|
}
|
||||||
|
|
||||||
fn whitespace_start(it: Option<SyntaxElement>) -> Option<TextSize> {
|
fn whitespace_start(it: Option<SyntaxElement>) -> Option<TextSize> {
|
||||||
Some(it?.into_token().and_then(ast::Whitespace::cast)?.syntax().text_range().start())
|
Some(it?.into_token().and_then(ast::Whitespace::cast)?.syntax().text_range().start())
|
||||||
}
|
}
|
||||||
|
@ -287,4 +353,32 @@ fn f() {
|
||||||
check_assist_not_applicable(remove_dbg, r#"$0dbg$0!(0)"#);
|
check_assist_not_applicable(remove_dbg, r#"$0dbg$0!(0)"#);
|
||||||
check_assist_not_applicable(remove_dbg, r#"$0dbg!(0$0)"#);
|
check_assist_not_applicable(remove_dbg, r#"$0dbg!(0$0)"#);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_nested_dbg() {
|
||||||
|
check(
|
||||||
|
r#"$0let x = dbg!(dbg!(dbg!(dbg!(0 + 1)) * 2) + dbg!(3));$0"#,
|
||||||
|
r#"let x = ((0 + 1) * 2) + 3;"#,
|
||||||
|
);
|
||||||
|
check(r#"$0dbg!(10, dbg!(), dbg!(20, 30))$0"#, r#"(10, (), (20, 30))"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_multiple_nested_dbg() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
fn f() {
|
||||||
|
$0dbg!();
|
||||||
|
let x = dbg!(dbg!(dbg!(0 + 1)) + 2) + dbg!(3);
|
||||||
|
dbg!(10, dbg!(), dbg!(20, 30));$0
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
fn f() {
|
||||||
|
let x = ((0 + 1) + 2) + 3;
|
||||||
|
(10, (), (20, 30));
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -102,9 +102,11 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<'
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let let_ = if pat_seen { " let" } else { "" };
|
||||||
|
|
||||||
acc.add(
|
acc.add(
|
||||||
AssistId("replace_if_let_with_match", AssistKind::RefactorRewrite),
|
AssistId("replace_if_let_with_match", AssistKind::RefactorRewrite),
|
||||||
"Replace if let with match",
|
format!("Replace if{let_} with match"),
|
||||||
available_range,
|
available_range,
|
||||||
move |edit| {
|
move |edit| {
|
||||||
let match_expr = {
|
let match_expr = {
|
||||||
|
@ -210,8 +212,17 @@ fn make_else_arm(
|
||||||
// ```
|
// ```
|
||||||
pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||||
let match_expr: ast::MatchExpr = ctx.find_node_at_offset()?;
|
let match_expr: ast::MatchExpr = ctx.find_node_at_offset()?;
|
||||||
|
let match_arm_list = match_expr.match_arm_list()?;
|
||||||
|
let available_range = TextRange::new(
|
||||||
|
match_expr.syntax().text_range().start(),
|
||||||
|
match_arm_list.syntax().text_range().start(),
|
||||||
|
);
|
||||||
|
let cursor_in_range = available_range.contains_range(ctx.selection_trimmed());
|
||||||
|
if !cursor_in_range {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
let mut arms = match_expr.match_arm_list()?.arms();
|
let mut arms = match_arm_list.arms();
|
||||||
let (first_arm, second_arm) = (arms.next()?, arms.next()?);
|
let (first_arm, second_arm) = (arms.next()?, arms.next()?);
|
||||||
if arms.next().is_some() || first_arm.guard().is_some() || second_arm.guard().is_some() {
|
if arms.next().is_some() || first_arm.guard().is_some() || second_arm.guard().is_some() {
|
||||||
return None;
|
return None;
|
||||||
|
@ -226,10 +237,20 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<'
|
||||||
)?;
|
)?;
|
||||||
let scrutinee = match_expr.expr()?;
|
let scrutinee = match_expr.expr()?;
|
||||||
|
|
||||||
|
let let_ = match &if_let_pat {
|
||||||
|
ast::Pat::LiteralPat(p)
|
||||||
|
if p.literal()
|
||||||
|
.map(|it| it.token().kind())
|
||||||
|
.map_or(false, |it| it == T![true] || it == T![false]) =>
|
||||||
|
{
|
||||||
|
""
|
||||||
|
}
|
||||||
|
_ => " let",
|
||||||
|
};
|
||||||
let target = match_expr.syntax().text_range();
|
let target = match_expr.syntax().text_range();
|
||||||
acc.add(
|
acc.add(
|
||||||
AssistId("replace_match_with_if_let", AssistKind::RefactorRewrite),
|
AssistId("replace_match_with_if_let", AssistKind::RefactorRewrite),
|
||||||
"Replace match with if let",
|
format!("Replace match with if{let_}"),
|
||||||
target,
|
target,
|
||||||
move |edit| {
|
move |edit| {
|
||||||
fn make_block_expr(expr: ast::Expr) -> ast::BlockExpr {
|
fn make_block_expr(expr: ast::Expr) -> ast::BlockExpr {
|
||||||
|
|
310
crates/ide-assists/src/handlers/replace_method_eager_lazy.rs
Normal file
310
crates/ide-assists/src/handlers/replace_method_eager_lazy.rs
Normal file
|
@ -0,0 +1,310 @@
|
||||||
|
use ide_db::assists::{AssistId, AssistKind};
|
||||||
|
use syntax::{
|
||||||
|
ast::{self, make, Expr, HasArgList},
|
||||||
|
AstNode,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{AssistContext, Assists};
|
||||||
|
|
||||||
|
// Assist: replace_with_lazy_method
|
||||||
|
//
|
||||||
|
// Replace `unwrap_or` with `unwrap_or_else` and `ok_or` with `ok_or_else`.
|
||||||
|
//
|
||||||
|
// ```
|
||||||
|
// # //- minicore:option, fn
|
||||||
|
// fn foo() {
|
||||||
|
// let a = Some(1);
|
||||||
|
// a.unwra$0p_or(2);
|
||||||
|
// }
|
||||||
|
// ```
|
||||||
|
// ->
|
||||||
|
// ```
|
||||||
|
// fn foo() {
|
||||||
|
// let a = Some(1);
|
||||||
|
// a.unwrap_or_else(|| 2);
|
||||||
|
// }
|
||||||
|
// ```
|
||||||
|
pub(crate) fn replace_with_lazy_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||||
|
let call: ast::MethodCallExpr = ctx.find_node_at_offset()?;
|
||||||
|
let scope = ctx.sema.scope(call.syntax())?;
|
||||||
|
|
||||||
|
let last_arg = call.arg_list()?.args().next()?;
|
||||||
|
let method_name = call.name_ref()?;
|
||||||
|
|
||||||
|
let callable = ctx.sema.resolve_method_call_as_callable(&call)?;
|
||||||
|
let (_, receiver_ty) = callable.receiver_param(ctx.sema.db)?;
|
||||||
|
let n_params = callable.n_params() + 1;
|
||||||
|
|
||||||
|
let method_name_lazy = format!(
|
||||||
|
"{method_name}{}",
|
||||||
|
if method_name.text().ends_with("or") { "_else" } else { "_with" }
|
||||||
|
);
|
||||||
|
|
||||||
|
receiver_ty.iterate_method_candidates_with_traits(
|
||||||
|
ctx.sema.db,
|
||||||
|
&scope,
|
||||||
|
&scope.visible_traits().0,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
|func| {
|
||||||
|
let valid = func.name(ctx.sema.db).as_str() == Some(&*method_name_lazy)
|
||||||
|
&& func.num_params(ctx.sema.db) == n_params
|
||||||
|
&& {
|
||||||
|
let params = func.params_without_self(ctx.sema.db);
|
||||||
|
let last_p = params.first()?;
|
||||||
|
// FIXME: Check that this has the form of `() -> T` where T is the current type of the argument
|
||||||
|
last_p.ty().impls_fnonce(ctx.sema.db)
|
||||||
|
};
|
||||||
|
valid.then_some(func)
|
||||||
|
},
|
||||||
|
)?;
|
||||||
|
|
||||||
|
acc.add(
|
||||||
|
AssistId("replace_with_lazy_method", AssistKind::RefactorRewrite),
|
||||||
|
format!("Replace {method_name} with {method_name_lazy}"),
|
||||||
|
call.syntax().text_range(),
|
||||||
|
|builder| {
|
||||||
|
builder.replace(method_name.syntax().text_range(), method_name_lazy);
|
||||||
|
let closured = into_closure(&last_arg);
|
||||||
|
builder.replace_ast(last_arg, closured);
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn into_closure(param: &Expr) -> Expr {
|
||||||
|
(|| {
|
||||||
|
if let ast::Expr::CallExpr(call) = param {
|
||||||
|
if call.arg_list()?.args().count() == 0 {
|
||||||
|
Some(call.expr()?)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})()
|
||||||
|
.unwrap_or_else(|| make::expr_closure(None, param.clone()))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Assist: replace_with_eager_method
|
||||||
|
//
|
||||||
|
// Replace `unwrap_or_else` with `unwrap_or` and `ok_or_else` with `ok_or`.
|
||||||
|
//
|
||||||
|
// ```
|
||||||
|
// # //- minicore:option, fn
|
||||||
|
// fn foo() {
|
||||||
|
// let a = Some(1);
|
||||||
|
// a.unwra$0p_or_else(|| 2);
|
||||||
|
// }
|
||||||
|
// ```
|
||||||
|
// ->
|
||||||
|
// ```
|
||||||
|
// fn foo() {
|
||||||
|
// let a = Some(1);
|
||||||
|
// a.unwrap_or(2);
|
||||||
|
// }
|
||||||
|
// ```
|
||||||
|
pub(crate) fn replace_with_eager_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||||
|
let call: ast::MethodCallExpr = ctx.find_node_at_offset()?;
|
||||||
|
let scope = ctx.sema.scope(call.syntax())?;
|
||||||
|
|
||||||
|
let last_arg = call.arg_list()?.args().next()?;
|
||||||
|
let method_name = call.name_ref()?;
|
||||||
|
|
||||||
|
let callable = ctx.sema.resolve_method_call_as_callable(&call)?;
|
||||||
|
let (_, receiver_ty) = callable.receiver_param(ctx.sema.db)?;
|
||||||
|
let n_params = callable.n_params() + 1;
|
||||||
|
let params = callable.params(ctx.sema.db);
|
||||||
|
|
||||||
|
// FIXME: Check that the arg is of the form `() -> T`
|
||||||
|
if !params.first()?.1.impls_fnonce(ctx.sema.db) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let method_name_text = method_name.text();
|
||||||
|
let method_name_eager = method_name_text
|
||||||
|
.strip_suffix("_else")
|
||||||
|
.or_else(|| method_name_text.strip_suffix("_with"))?;
|
||||||
|
|
||||||
|
receiver_ty.iterate_method_candidates_with_traits(
|
||||||
|
ctx.sema.db,
|
||||||
|
&scope,
|
||||||
|
&scope.visible_traits().0,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
|func| {
|
||||||
|
let valid = func.name(ctx.sema.db).as_str() == Some(&*method_name_eager)
|
||||||
|
&& func.num_params(ctx.sema.db) == n_params;
|
||||||
|
valid.then_some(func)
|
||||||
|
},
|
||||||
|
)?;
|
||||||
|
|
||||||
|
acc.add(
|
||||||
|
AssistId("replace_with_eager_method", AssistKind::RefactorRewrite),
|
||||||
|
format!("Replace {method_name} with {method_name_eager}"),
|
||||||
|
call.syntax().text_range(),
|
||||||
|
|builder| {
|
||||||
|
builder.replace(method_name.syntax().text_range(), method_name_eager);
|
||||||
|
let called = into_call(&last_arg);
|
||||||
|
builder.replace_ast(last_arg, called);
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn into_call(param: &Expr) -> Expr {
|
||||||
|
(|| {
|
||||||
|
if let ast::Expr::ClosureExpr(closure) = param {
|
||||||
|
if closure.param_list()?.params().count() == 0 {
|
||||||
|
Some(closure.body()?)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})()
|
||||||
|
.unwrap_or_else(|| make::expr_call(param.clone(), make::arg_list(Vec::new())))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::tests::check_assist;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn replace_or_with_or_else_simple() {
|
||||||
|
check_assist(
|
||||||
|
replace_with_lazy_method,
|
||||||
|
r#"
|
||||||
|
//- minicore: option, fn
|
||||||
|
fn foo() {
|
||||||
|
let foo = Some(1);
|
||||||
|
return foo.unwrap_$0or(2);
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
fn foo() {
|
||||||
|
let foo = Some(1);
|
||||||
|
return foo.unwrap_or_else(|| 2);
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn replace_or_with_or_else_call() {
|
||||||
|
check_assist(
|
||||||
|
replace_with_lazy_method,
|
||||||
|
r#"
|
||||||
|
//- minicore: option, fn
|
||||||
|
fn foo() {
|
||||||
|
let foo = Some(1);
|
||||||
|
return foo.unwrap_$0or(x());
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
fn foo() {
|
||||||
|
let foo = Some(1);
|
||||||
|
return foo.unwrap_or_else(x);
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn replace_or_with_or_else_block() {
|
||||||
|
check_assist(
|
||||||
|
replace_with_lazy_method,
|
||||||
|
r#"
|
||||||
|
//- minicore: option, fn
|
||||||
|
fn foo() {
|
||||||
|
let foo = Some(1);
|
||||||
|
return foo.unwrap_$0or({
|
||||||
|
let mut x = bar();
|
||||||
|
for i in 0..10 {
|
||||||
|
x += i;
|
||||||
|
}
|
||||||
|
x
|
||||||
|
});
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
fn foo() {
|
||||||
|
let foo = Some(1);
|
||||||
|
return foo.unwrap_or_else(|| {
|
||||||
|
let mut x = bar();
|
||||||
|
for i in 0..10 {
|
||||||
|
x += i;
|
||||||
|
}
|
||||||
|
x
|
||||||
|
});
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn replace_or_else_with_or_simple() {
|
||||||
|
check_assist(
|
||||||
|
replace_with_eager_method,
|
||||||
|
r#"
|
||||||
|
//- minicore: option, fn
|
||||||
|
fn foo() {
|
||||||
|
let foo = Some(1);
|
||||||
|
return foo.unwrap_$0or_else(|| 2);
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
fn foo() {
|
||||||
|
let foo = Some(1);
|
||||||
|
return foo.unwrap_or(2);
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn replace_or_else_with_or_call() {
|
||||||
|
check_assist(
|
||||||
|
replace_with_eager_method,
|
||||||
|
r#"
|
||||||
|
//- minicore: option, fn
|
||||||
|
fn foo() {
|
||||||
|
let foo = Some(1);
|
||||||
|
return foo.unwrap_$0or_else(x);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn x() -> i32 { 0 }
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
fn foo() {
|
||||||
|
let foo = Some(1);
|
||||||
|
return foo.unwrap_or(x());
|
||||||
|
}
|
||||||
|
|
||||||
|
fn x() -> i32 { 0 }
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn replace_or_else_with_or_map() {
|
||||||
|
check_assist(
|
||||||
|
replace_with_eager_method,
|
||||||
|
r#"
|
||||||
|
//- minicore: option, fn
|
||||||
|
fn foo() {
|
||||||
|
let foo = Some("foo");
|
||||||
|
return foo.map$0_or_else(|| 42, |v| v.len());
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
fn foo() {
|
||||||
|
let foo = Some("foo");
|
||||||
|
return foo.map_or(42, |v| v.len());
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,364 +0,0 @@
|
||||||
use ide_db::{
|
|
||||||
assists::{AssistId, AssistKind},
|
|
||||||
famous_defs::FamousDefs,
|
|
||||||
};
|
|
||||||
use syntax::{
|
|
||||||
ast::{self, make, Expr, HasArgList},
|
|
||||||
AstNode,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{AssistContext, Assists};
|
|
||||||
|
|
||||||
// Assist: replace_or_with_or_else
|
|
||||||
//
|
|
||||||
// Replace `unwrap_or` with `unwrap_or_else` and `ok_or` with `ok_or_else`.
|
|
||||||
//
|
|
||||||
// ```
|
|
||||||
// # //- minicore:option
|
|
||||||
// fn foo() {
|
|
||||||
// let a = Some(1);
|
|
||||||
// a.unwra$0p_or(2);
|
|
||||||
// }
|
|
||||||
// ```
|
|
||||||
// ->
|
|
||||||
// ```
|
|
||||||
// fn foo() {
|
|
||||||
// let a = Some(1);
|
|
||||||
// a.unwrap_or_else(|| 2);
|
|
||||||
// }
|
|
||||||
// ```
|
|
||||||
pub(crate) fn replace_or_with_or_else(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
|
||||||
let call: ast::MethodCallExpr = ctx.find_node_at_offset()?;
|
|
||||||
|
|
||||||
let kind = is_option_or_result(call.receiver()?, ctx)?;
|
|
||||||
|
|
||||||
let (name, arg_list) = (call.name_ref()?, call.arg_list()?);
|
|
||||||
|
|
||||||
let mut map_or = false;
|
|
||||||
|
|
||||||
let replace = match &*name.text() {
|
|
||||||
"unwrap_or" => "unwrap_or_else".to_string(),
|
|
||||||
"or" => "or_else".to_string(),
|
|
||||||
"ok_or" if kind == Kind::Option => "ok_or_else".to_string(),
|
|
||||||
"map_or" => {
|
|
||||||
map_or = true;
|
|
||||||
"map_or_else".to_string()
|
|
||||||
}
|
|
||||||
_ => return None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let arg = match arg_list.args().collect::<Vec<_>>().as_slice() {
|
|
||||||
[] => make::arg_list(Vec::new()),
|
|
||||||
[first] => {
|
|
||||||
let param = into_closure(first);
|
|
||||||
make::arg_list(vec![param])
|
|
||||||
}
|
|
||||||
[first, second] if map_or => {
|
|
||||||
let param = into_closure(first);
|
|
||||||
make::arg_list(vec![param, second.clone()])
|
|
||||||
}
|
|
||||||
_ => return None,
|
|
||||||
};
|
|
||||||
|
|
||||||
acc.add(
|
|
||||||
AssistId("replace_or_with_or_else", AssistKind::RefactorRewrite),
|
|
||||||
format!("Replace {name} with {replace}"),
|
|
||||||
call.syntax().text_range(),
|
|
||||||
|builder| {
|
|
||||||
builder.replace(name.syntax().text_range(), replace);
|
|
||||||
builder.replace_ast(arg_list, arg)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn into_closure(param: &Expr) -> Expr {
|
|
||||||
(|| {
|
|
||||||
if let ast::Expr::CallExpr(call) = param {
|
|
||||||
if call.arg_list()?.args().count() == 0 {
|
|
||||||
Some(call.expr()?)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})()
|
|
||||||
.unwrap_or_else(|| make::expr_closure(None, param.clone()))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Assist: replace_or_else_with_or
|
|
||||||
//
|
|
||||||
// Replace `unwrap_or_else` with `unwrap_or` and `ok_or_else` with `ok_or`.
|
|
||||||
//
|
|
||||||
// ```
|
|
||||||
// # //- minicore:option
|
|
||||||
// fn foo() {
|
|
||||||
// let a = Some(1);
|
|
||||||
// a.unwra$0p_or_else(|| 2);
|
|
||||||
// }
|
|
||||||
// ```
|
|
||||||
// ->
|
|
||||||
// ```
|
|
||||||
// fn foo() {
|
|
||||||
// let a = Some(1);
|
|
||||||
// a.unwrap_or(2);
|
|
||||||
// }
|
|
||||||
// ```
|
|
||||||
pub(crate) fn replace_or_else_with_or(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
|
||||||
let call: ast::MethodCallExpr = ctx.find_node_at_offset()?;
|
|
||||||
|
|
||||||
let kind = is_option_or_result(call.receiver()?, ctx)?;
|
|
||||||
|
|
||||||
let (name, arg_list) = (call.name_ref()?, call.arg_list()?);
|
|
||||||
|
|
||||||
let mut map_or = false;
|
|
||||||
let replace = match &*name.text() {
|
|
||||||
"unwrap_or_else" => "unwrap_or".to_string(),
|
|
||||||
"or_else" => "or".to_string(),
|
|
||||||
"ok_or_else" if kind == Kind::Option => "ok_or".to_string(),
|
|
||||||
"map_or_else" => {
|
|
||||||
map_or = true;
|
|
||||||
"map_or".to_string()
|
|
||||||
}
|
|
||||||
_ => return None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let arg = match arg_list.args().collect::<Vec<_>>().as_slice() {
|
|
||||||
[] => make::arg_list(Vec::new()),
|
|
||||||
[first] => {
|
|
||||||
let param = into_call(first);
|
|
||||||
make::arg_list(vec![param])
|
|
||||||
}
|
|
||||||
[first, second] if map_or => {
|
|
||||||
let param = into_call(first);
|
|
||||||
make::arg_list(vec![param, second.clone()])
|
|
||||||
}
|
|
||||||
_ => return None,
|
|
||||||
};
|
|
||||||
|
|
||||||
acc.add(
|
|
||||||
AssistId("replace_or_else_with_or", AssistKind::RefactorRewrite),
|
|
||||||
format!("Replace {name} with {replace}"),
|
|
||||||
call.syntax().text_range(),
|
|
||||||
|builder| {
|
|
||||||
builder.replace(name.syntax().text_range(), replace);
|
|
||||||
builder.replace_ast(arg_list, arg)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn into_call(param: &Expr) -> Expr {
|
|
||||||
(|| {
|
|
||||||
if let ast::Expr::ClosureExpr(closure) = param {
|
|
||||||
if closure.param_list()?.params().count() == 0 {
|
|
||||||
Some(closure.body()?)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})()
|
|
||||||
.unwrap_or_else(|| make::expr_call(param.clone(), make::arg_list(Vec::new())))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(PartialEq, Eq)]
|
|
||||||
enum Kind {
|
|
||||||
Option,
|
|
||||||
Result,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_option_or_result(receiver: Expr, ctx: &AssistContext<'_>) -> Option<Kind> {
|
|
||||||
let ty = ctx.sema.type_of_expr(&receiver)?.adjusted().as_adt()?.as_enum()?;
|
|
||||||
let option_enum =
|
|
||||||
FamousDefs(&ctx.sema, ctx.sema.scope(receiver.syntax())?.krate()).core_option_Option();
|
|
||||||
|
|
||||||
if let Some(option_enum) = option_enum {
|
|
||||||
if ty == option_enum {
|
|
||||||
return Some(Kind::Option);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let result_enum =
|
|
||||||
FamousDefs(&ctx.sema, ctx.sema.scope(receiver.syntax())?.krate()).core_result_Result();
|
|
||||||
|
|
||||||
if let Some(result_enum) = result_enum {
|
|
||||||
if ty == result_enum {
|
|
||||||
return Some(Kind::Result);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use crate::tests::{check_assist, check_assist_not_applicable};
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn replace_or_with_or_else_simple() {
|
|
||||||
check_assist(
|
|
||||||
replace_or_with_or_else,
|
|
||||||
r#"
|
|
||||||
//- minicore: option
|
|
||||||
fn foo() {
|
|
||||||
let foo = Some(1);
|
|
||||||
return foo.unwrap_$0or(2);
|
|
||||||
}
|
|
||||||
"#,
|
|
||||||
r#"
|
|
||||||
fn foo() {
|
|
||||||
let foo = Some(1);
|
|
||||||
return foo.unwrap_or_else(|| 2);
|
|
||||||
}
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn replace_or_with_or_else_call() {
|
|
||||||
check_assist(
|
|
||||||
replace_or_with_or_else,
|
|
||||||
r#"
|
|
||||||
//- minicore: option
|
|
||||||
fn foo() {
|
|
||||||
let foo = Some(1);
|
|
||||||
return foo.unwrap_$0or(x());
|
|
||||||
}
|
|
||||||
"#,
|
|
||||||
r#"
|
|
||||||
fn foo() {
|
|
||||||
let foo = Some(1);
|
|
||||||
return foo.unwrap_or_else(x);
|
|
||||||
}
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn replace_or_with_or_else_block() {
|
|
||||||
check_assist(
|
|
||||||
replace_or_with_or_else,
|
|
||||||
r#"
|
|
||||||
//- minicore: option
|
|
||||||
fn foo() {
|
|
||||||
let foo = Some(1);
|
|
||||||
return foo.unwrap_$0or({
|
|
||||||
let mut x = bar();
|
|
||||||
for i in 0..10 {
|
|
||||||
x += i;
|
|
||||||
}
|
|
||||||
x
|
|
||||||
});
|
|
||||||
}
|
|
||||||
"#,
|
|
||||||
r#"
|
|
||||||
fn foo() {
|
|
||||||
let foo = Some(1);
|
|
||||||
return foo.unwrap_or_else(|| {
|
|
||||||
let mut x = bar();
|
|
||||||
for i in 0..10 {
|
|
||||||
x += i;
|
|
||||||
}
|
|
||||||
x
|
|
||||||
});
|
|
||||||
}
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn replace_or_else_with_or_simple() {
|
|
||||||
check_assist(
|
|
||||||
replace_or_else_with_or,
|
|
||||||
r#"
|
|
||||||
//- minicore: option
|
|
||||||
fn foo() {
|
|
||||||
let foo = Some(1);
|
|
||||||
return foo.unwrap_$0or_else(|| 2);
|
|
||||||
}
|
|
||||||
"#,
|
|
||||||
r#"
|
|
||||||
fn foo() {
|
|
||||||
let foo = Some(1);
|
|
||||||
return foo.unwrap_or(2);
|
|
||||||
}
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn replace_or_else_with_or_call() {
|
|
||||||
check_assist(
|
|
||||||
replace_or_else_with_or,
|
|
||||||
r#"
|
|
||||||
//- minicore: option
|
|
||||||
fn foo() {
|
|
||||||
let foo = Some(1);
|
|
||||||
return foo.unwrap_$0or_else(x);
|
|
||||||
}
|
|
||||||
"#,
|
|
||||||
r#"
|
|
||||||
fn foo() {
|
|
||||||
let foo = Some(1);
|
|
||||||
return foo.unwrap_or(x());
|
|
||||||
}
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn replace_or_else_with_or_result() {
|
|
||||||
check_assist(
|
|
||||||
replace_or_else_with_or,
|
|
||||||
r#"
|
|
||||||
//- minicore: result
|
|
||||||
fn foo() {
|
|
||||||
let foo = Ok(1);
|
|
||||||
return foo.unwrap_$0or_else(x);
|
|
||||||
}
|
|
||||||
"#,
|
|
||||||
r#"
|
|
||||||
fn foo() {
|
|
||||||
let foo = Ok(1);
|
|
||||||
return foo.unwrap_or(x());
|
|
||||||
}
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn replace_or_else_with_or_map() {
|
|
||||||
check_assist(
|
|
||||||
replace_or_else_with_or,
|
|
||||||
r#"
|
|
||||||
//- minicore: result
|
|
||||||
fn foo() {
|
|
||||||
let foo = Ok("foo");
|
|
||||||
return foo.map$0_or_else(|| 42, |v| v.len());
|
|
||||||
}
|
|
||||||
"#,
|
|
||||||
r#"
|
|
||||||
fn foo() {
|
|
||||||
let foo = Ok("foo");
|
|
||||||
return foo.map_or(42, |v| v.len());
|
|
||||||
}
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn replace_or_else_with_or_not_applicable() {
|
|
||||||
check_assist_not_applicable(
|
|
||||||
replace_or_else_with_or,
|
|
||||||
r#"
|
|
||||||
fn foo() {
|
|
||||||
let foo = Ok(1);
|
|
||||||
return foo.unwrap_$0or_else(x);
|
|
||||||
}
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -188,7 +188,7 @@ mod handlers {
|
||||||
mod replace_try_expr_with_match;
|
mod replace_try_expr_with_match;
|
||||||
mod replace_derive_with_manual_impl;
|
mod replace_derive_with_manual_impl;
|
||||||
mod replace_if_let_with_match;
|
mod replace_if_let_with_match;
|
||||||
mod replace_or_with_or_else;
|
mod replace_method_eager_lazy;
|
||||||
mod replace_arith_op;
|
mod replace_arith_op;
|
||||||
mod introduce_named_generic;
|
mod introduce_named_generic;
|
||||||
mod replace_let_with_if_let;
|
mod replace_let_with_if_let;
|
||||||
|
@ -297,8 +297,8 @@ mod handlers {
|
||||||
replace_if_let_with_match::replace_if_let_with_match,
|
replace_if_let_with_match::replace_if_let_with_match,
|
||||||
replace_if_let_with_match::replace_match_with_if_let,
|
replace_if_let_with_match::replace_match_with_if_let,
|
||||||
replace_let_with_if_let::replace_let_with_if_let,
|
replace_let_with_if_let::replace_let_with_if_let,
|
||||||
replace_or_with_or_else::replace_or_else_with_or,
|
replace_method_eager_lazy::replace_with_eager_method,
|
||||||
replace_or_with_or_else::replace_or_with_or_else,
|
replace_method_eager_lazy::replace_with_lazy_method,
|
||||||
replace_turbofish_with_explicit_type::replace_turbofish_with_explicit_type,
|
replace_turbofish_with_explicit_type::replace_turbofish_with_explicit_type,
|
||||||
replace_qualified_name_with_use::replace_qualified_name_with_use,
|
replace_qualified_name_with_use::replace_qualified_name_with_use,
|
||||||
replace_arith_op::replace_arith_with_wrapping,
|
replace_arith_op::replace_arith_with_wrapping,
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue