Auto merge of #125887 - lnicola:sync-from-ra, r=lnicola

Subtree update of `rust-analyzer`

r? `@ghost`
This commit is contained in:
bors 2024-06-02 11:14:39 +00:00
commit 74f9b0f0a5
22 changed files with 1721 additions and 197 deletions

106
Cargo.lock generated
View file

@ -216,16 +216,6 @@ dependencies = [
"tracing",
]
[[package]]
name = "command-group"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5080df6b0f0ecb76cab30808f00d937ba725cebe266a3da8cd89dff92f2a9916"
dependencies = [
"nix 0.26.4",
"winapi",
]
[[package]]
name = "countme"
version = "3.0.1"
@ -292,7 +282,7 @@ version = "3.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "672465ae37dc1bc6380a6547a8883d5dd397b0f1faaad4f265726cc7042a5345"
dependencies = [
"nix 0.28.0",
"nix",
"windows-sys 0.52.0",
]
@ -432,9 +422,9 @@ name = "flycheck"
version = "0.0.0"
dependencies = [
"cargo_metadata",
"command-group",
"crossbeam-channel",
"paths",
"process-wrap",
"rustc-hash",
"serde",
"serde_json",
@ -1121,17 +1111,6 @@ dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "nix"
version = "0.26.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b"
dependencies = [
"bitflags 1.3.2",
"cfg-if",
"libc",
]
[[package]]
name = "nix"
version = "0.28.0"
@ -1397,6 +1376,18 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "process-wrap"
version = "8.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38ee68ae331824036479c84060534b18254c864fa73366c58d86db3b7b811619"
dependencies = [
"indexmap",
"nix",
"tracing",
"windows",
]
[[package]]
name = "profile"
version = "0.0.0"
@ -2374,22 +2365,6 @@ version = "0.11.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
version = "0.1.8"
@ -2400,10 +2375,57 @@ dependencies = [
]
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
name = "windows"
version = "0.56.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
checksum = "1de69df01bdf1ead2f4ac895dc77c9351aefff65b2f3db429a343f9cbf05e132"
dependencies = [
"windows-core",
"windows-targets 0.52.5",
]
[[package]]
name = "windows-core"
version = "0.56.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4698e52ed2d08f8658ab0c39512a7c00ee5fe2688c65f8c0a4f06750d729f2a6"
dependencies = [
"windows-implement",
"windows-interface",
"windows-result",
"windows-targets 0.52.5",
]
[[package]]
name = "windows-implement"
version = "0.56.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6fc35f58ecd95a9b71c4f2329b911016e6bec66b3f2e6a4aad86bd2e99e2f9b"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "windows-interface"
version = "0.56.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08990546bf4edef8f431fa6326e032865f27138718c587dc21bc0265bbcb57cc"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "windows-result"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "749f0da9cc72d82e600d8d2e44cadd0b9eedb9038f71a1c58556ac1c5791813b"
dependencies = [
"windows-targets 0.52.5",
]
[[package]]
name = "windows-sys"

View file

@ -111,7 +111,6 @@ chalk-solve = { version = "0.97.0", default-features = false }
chalk-ir = "0.97.0"
chalk-recursive = { version = "0.97.0", default-features = false }
chalk-derive = "0.97.0"
command-group = "2.0.1"
crossbeam-channel = "0.5.8"
dissimilar = "1.0.7"
dot = "0.1.4"
@ -132,6 +131,7 @@ object = { version = "0.33.0", default-features = false, features = [
"macho",
"pe",
] }
process-wrap = { version = "8.0.2", features = ["std"] }
pulldown-cmark-to-cmark = "10.0.4"
pulldown-cmark = { version = "0.9.0", default-features = false }
rayon = "1.8.0"

View file

@ -18,7 +18,7 @@ tracing.workspace = true
rustc-hash.workspace = true
serde_json.workspace = true
serde.workspace = true
command-group.workspace = true
process-wrap.workspace = true
# local deps
paths.workspace = true

View file

@ -9,8 +9,8 @@ use std::{
process::{ChildStderr, ChildStdout, Command, Stdio},
};
use command_group::{CommandGroup, GroupChild};
use crossbeam_channel::Sender;
use process_wrap::std::{StdChildWrapper, StdCommandWrap};
use stdx::process::streaming_output;
/// Cargo output is structured as a one JSON per line. This trait abstracts parsing one line of
@ -85,7 +85,7 @@ impl<T: ParseFromLine> CargoActor<T> {
}
}
struct JodGroupChild(GroupChild);
struct JodGroupChild(Box<dyn StdChildWrapper>);
impl Drop for JodGroupChild {
fn drop(&mut self) {
@ -119,14 +119,20 @@ impl<T> fmt::Debug for CommandHandle<T> {
impl<T: ParseFromLine> CommandHandle<T> {
pub(crate) fn spawn(mut command: Command, sender: Sender<T>) -> std::io::Result<Self> {
command.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null());
let mut child = command.group_spawn().map(JodGroupChild)?;
let program = command.get_program().into();
let arguments = command.get_args().map(|arg| arg.into()).collect::<Vec<OsString>>();
let current_dir = command.get_current_dir().map(|arg| arg.to_path_buf());
let stdout = child.0.inner().stdout.take().unwrap();
let stderr = child.0.inner().stderr.take().unwrap();
let mut child = StdCommandWrap::from(command);
#[cfg(unix)]
child.wrap(process_wrap::std::ProcessSession);
#[cfg(windows)]
child.wrap(process_wrap::std::JobObject);
let mut child = child.spawn().map(JodGroupChild)?;
let stdout = child.0.stdout().take().unwrap();
let stderr = child.0.stderr().take().unwrap();
let actor = CargoActor::<T>::new(sender, stdout, stderr);
let thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker)

View file

@ -163,6 +163,9 @@ pub enum Message {
/// Request adding a diagnostic with fixes included to a file
AddDiagnostic { id: usize, workspace_root: AbsPathBuf, diagnostic: Diagnostic },
/// Request clearing all previous diagnostics
ClearDiagnostics { id: usize },
/// Request check progress notification to client
Progress {
/// Flycheck instance ID
@ -180,6 +183,9 @@ impl fmt::Debug for Message {
.field("workspace_root", workspace_root)
.field("diagnostic_code", &diagnostic.code.as_ref().map(|it| &it.code))
.finish(),
Message::ClearDiagnostics { id } => {
f.debug_struct("ClearDiagnostics").field("id", id).finish()
}
Message::Progress { id, progress } => {
f.debug_struct("Progress").field("id", id).field("progress", progress).finish()
}
@ -220,6 +226,8 @@ struct FlycheckActor {
command_handle: Option<CommandHandle<CargoCheckMessage>>,
/// The receiver side of the channel mentioned above.
command_receiver: Option<Receiver<CargoCheckMessage>>,
status: FlycheckStatus,
}
enum Event {
@ -227,6 +235,13 @@ enum Event {
CheckEvent(Option<CargoCheckMessage>),
}
#[derive(PartialEq)]
enum FlycheckStatus {
Started,
DiagnosticSent,
Finished,
}
const SAVED_FILE_PLACEHOLDER: &str = "$saved_file";
impl FlycheckActor {
@ -248,6 +263,7 @@ impl FlycheckActor {
manifest_path,
command_handle: None,
command_receiver: None,
status: FlycheckStatus::Finished,
}
}
@ -298,12 +314,14 @@ impl FlycheckActor {
self.command_handle = Some(command_handle);
self.command_receiver = Some(receiver);
self.report_progress(Progress::DidStart);
self.status = FlycheckStatus::Started;
}
Err(error) => {
self.report_progress(Progress::DidFailToRestart(format!(
"Failed to run the following command: {} error={}",
formatted_command, error
)));
self.status = FlycheckStatus::Finished;
}
}
}
@ -323,7 +341,11 @@ impl FlycheckActor {
error
);
}
if self.status == FlycheckStatus::Started {
self.send(Message::ClearDiagnostics { id: self.id });
}
self.report_progress(Progress::DidFinish(res));
self.status = FlycheckStatus::Finished;
}
Event::CheckEvent(Some(message)) => match message {
CargoCheckMessage::CompilerArtifact(msg) => {
@ -341,11 +363,15 @@ impl FlycheckActor {
message = msg.message,
"diagnostic received"
);
if self.status == FlycheckStatus::Started {
self.send(Message::ClearDiagnostics { id: self.id });
}
self.send(Message::AddDiagnostic {
id: self.id,
workspace_root: self.root.clone(),
diagnostic: msg,
});
self.status = FlycheckStatus::DiagnosticSent;
}
},
}
@ -362,6 +388,7 @@ impl FlycheckActor {
);
command_handle.cancel();
self.report_progress(Progress::DidCancel);
self.status = FlycheckStatus::Finished;
}
}

View file

@ -298,7 +298,7 @@ pub fn expand_speculative(
// prefer tokens of the same kind and text
// Note the inversion of the score here, as we want to prefer the first token in case
// of all tokens having the same score
(t.kind() != token_to_map.kind()) as u8 + (t.text() != token_to_map.text()) as u8
(t.kind() != token_to_map.kind()) as u8 + 2 * ((t.text() != token_to_map.text()) as u8)
})?;
Some((node.syntax_node(), token))
}

View file

@ -153,24 +153,20 @@ impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, N> {
// region:specific impls
impl InFile<&SyntaxNode> {
/// Skips the attributed item that caused the macro invocation we are climbing up
pub fn ancestors_with_macros_skip_attr_item(
/// Traverse up macro calls and skips the macro invocation node
pub fn ancestors_with_macros(
self,
db: &dyn db::ExpandDatabase,
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
None => {
let macro_file_id = node.file_id.macro_file()?;
let parent_node = macro_file_id.call_node(db);
if macro_file_id.is_attr_macro(db) {
// macro call was an attributed item, skip it
// FIXME: does this fail if this is a direct expansion of another macro?
parent_node.map(|node| node.parent()).transpose()
} else {
Some(parent_node)
}
}
None => db
.lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id)
.to_node_item(db)
.syntax()
.cloned()
.map(|node| node.parent())
.transpose(),
};
iter::successors(succ(&self.cloned()), succ)
}

View file

@ -33,8 +33,8 @@ use std::{fmt, hash::Hash};
use base_db::{salsa::impl_intern_value_trivial, CrateId, FileId};
use either::Either;
use span::{
Edition, ErasedFileAstId, FileRange, HirFileIdRepr, Span, SpanAnchor, SyntaxContextData,
SyntaxContextId,
Edition, ErasedFileAstId, FileAstId, FileRange, HirFileIdRepr, Span, SpanAnchor,
SyntaxContextData, SyntaxContextId,
};
use syntax::{
ast::{self, AstNode},
@ -546,6 +546,18 @@ impl MacroCallLoc {
}
}
pub fn to_node_item(&self, db: &dyn ExpandDatabase) -> InFile<ast::Item> {
match self.kind {
MacroCallKind::FnLike { ast_id, .. } => {
InFile::new(ast_id.file_id, ast_id.map(FileAstId::upcast).to_node(db))
}
MacroCallKind::Derive { ast_id, .. } => {
InFile::new(ast_id.file_id, ast_id.map(FileAstId::upcast).to_node(db))
}
MacroCallKind::Attr { ast_id, .. } => InFile::new(ast_id.file_id, ast_id.to_node(db)),
}
}
fn expand_to(&self) -> ExpandTo {
match self.kind {
MacroCallKind::FnLike { expand_to, .. } => expand_to,

View file

@ -380,6 +380,27 @@ impl<'db> SemanticsImpl<'db> {
self.with_ctx(|ctx| ctx.has_derives(adt))
}
pub fn derive_helper(&self, attr: &ast::Attr) -> Option<Vec<(Macro, MacroFileId)>> {
let adt = attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it {
ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
ast::Item::Union(it) => Some(ast::Adt::Union(it)),
_ => None,
})?;
let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
let sa = self.analyze_no_infer(adt.syntax())?;
let id = self.db.ast_id_map(sa.file_id).ast_id(&adt);
let res: Vec<_> = sa
.resolver
.def_map()
.derive_helpers_in_scope(InFile::new(sa.file_id, id))?
.iter()
.filter(|&(name, _, _)| *name == attr_name)
.map(|&(_, macro_, call)| (macro_.into(), call.as_macro_file()))
.collect();
res.is_empty().not().then_some(res)
}
pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
let file_id = self.find_file(item.syntax()).file_id;
let src = InFile::new(file_id, item.clone());
@ -409,6 +430,20 @@ impl<'db> SemanticsImpl<'db> {
)
}
pub fn speculative_expand_raw(
&self,
macro_file: MacroFileId,
speculative_args: &SyntaxNode,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
hir_expand::db::expand_speculative(
self.db.upcast(),
macro_file.macro_call_id,
speculative_args,
token_to_map,
)
}
/// Expand the macro call with a different item as the input, mapping the `token_to_map` down into the
/// expansion. `token_to_map` should be a token from the `speculative args` node.
pub fn speculative_expand_attr_macro(
@ -826,107 +861,109 @@ impl<'db> SemanticsImpl<'db> {
// Then check for token trees, that means we are either in a function-like macro or
// secondary attribute inputs
let tt = token.parent_ancestors().map_while(ast::TokenTree::cast).last()?;
let parent = tt.syntax().parent()?;
if tt.left_delimiter_token().map_or(false, |it| it == token) {
return None;
}
if tt.right_delimiter_token().map_or(false, |it| it == token) {
return None;
}
if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) {
let mcall: hir_expand::files::InFileWrapper<HirFileId, ast::MacroCall> =
InFile::new(file_id, macro_call);
let file_id = match mcache.get(&mcall) {
Some(&it) => it,
None => {
let it = sa.expand(self.db, mcall.as_ref())?;
mcache.insert(mcall, it);
it
let tt = token
.parent_ancestors()
.map_while(Either::<ast::TokenTree, ast::Meta>::cast)
.last()?;
match tt {
Either::Left(tt) => {
if tt.left_delimiter_token().map_or(false, |it| it == token) {
return None;
}
};
let text_range = tt.syntax().text_range();
// remove any other token in this macro input, all their mappings are the
// same as this one
tokens.retain(|t| !text_range.contains_range(t.text_range()));
process_expansion_for_token(&mut stack, file_id).or(file_id
.eager_arg(self.db.upcast())
.and_then(|arg| {
// also descend into eager expansions
process_expansion_for_token(&mut stack, arg.as_macro_file())
}))
} else if let Some(meta) = ast::Meta::cast(parent) {
// attribute we failed expansion for earlier, this might be a derive invocation
// or derive helper attribute
let attr = meta.parent_attr()?;
let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast)
{
// this might be a derive, or a derive helper on an ADT
let derive_call = self.with_ctx(|ctx| {
// so try downmapping the token into the pseudo derive expansion
// see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
ctx.attr_to_derive_macro_call(
InFile::new(file_id, &adt),
InFile::new(file_id, attr.clone()),
)
.map(|(_, call_id, _)| call_id)
});
match derive_call {
Some(call_id) => {
// resolved to a derive
let file_id = call_id.as_macro_file();
let text_range = attr.syntax().text_range();
// remove any other token in this macro input, all their mappings are the
// same as this one
tokens.retain(|t| !text_range.contains_range(t.text_range()));
return process_expansion_for_token(&mut stack, file_id);
if tt.right_delimiter_token().map_or(false, |it| it == token) {
return None;
}
let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
let mcall: hir_expand::files::InFileWrapper<HirFileId, ast::MacroCall> =
InFile::new(file_id, macro_call);
let file_id = match mcache.get(&mcall) {
Some(&it) => it,
None => {
let it = sa.expand(self.db, mcall.as_ref())?;
mcache.insert(mcall, it);
it
}
None => Some(adt),
}
} else {
// Otherwise this could be a derive helper on a variant or field
if let Some(field) =
attr.syntax().parent().and_then(ast::RecordField::cast)
{
field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
} else if let Some(field) =
attr.syntax().parent().and_then(ast::TupleField::cast)
{
field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
} else if let Some(variant) =
attr.syntax().parent().and_then(ast::Variant::cast)
{
variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
} else {
None
}
}?;
if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(file_id, &adt))) {
return None;
};
let text_range = tt.syntax().text_range();
// remove any other token in this macro input, all their mappings are the
// same as this one
tokens.retain(|t| !text_range.contains_range(t.text_range()));
process_expansion_for_token(&mut stack, file_id).or(file_id
.eager_arg(self.db.upcast())
.and_then(|arg| {
// also descend into eager expansions
process_expansion_for_token(&mut stack, arg.as_macro_file())
}))
}
// Not an attribute, nor a derive, so it's either a builtin or a derive helper
// Try to resolve to a derive helper and downmap
let attr_name =
attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
let id = self.db.ast_id_map(file_id).ast_id(&adt);
let helpers = def_map.derive_helpers_in_scope(InFile::new(file_id, id))?;
let mut res = None;
for (.., derive) in
helpers.iter().filter(|(helper, ..)| *helper == attr_name)
{
res = res.or(process_expansion_for_token(
&mut stack,
derive.as_macro_file(),
));
Either::Right(meta) => {
// attribute we failed expansion for earlier, this might be a derive invocation
// or derive helper attribute
let attr = meta.parent_attr()?;
let adt = match attr.syntax().parent().and_then(ast::Adt::cast) {
Some(adt) => {
// this might be a derive on an ADT
let derive_call = self.with_ctx(|ctx| {
// so try downmapping the token into the pseudo derive expansion
// see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
ctx.attr_to_derive_macro_call(
InFile::new(file_id, &adt),
InFile::new(file_id, attr.clone()),
)
.map(|(_, call_id, _)| call_id)
});
match derive_call {
Some(call_id) => {
// resolved to a derive
let file_id = call_id.as_macro_file();
let text_range = attr.syntax().text_range();
// remove any other token in this macro input, all their mappings are the
// same as this
tokens.retain(|t| {
!text_range.contains_range(t.text_range())
});
return process_expansion_for_token(
&mut stack, file_id,
);
}
None => Some(adt),
}
}
None => {
// Otherwise this could be a derive helper on a variant or field
attr.syntax().ancestors().find_map(ast::Item::cast).and_then(
|it| match it {
ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
ast::Item::Union(it) => Some(ast::Adt::Union(it)),
_ => None,
},
)
}
}?;
if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(file_id, &adt))) {
return None;
}
let attr_name =
attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
// Not an attribute, nor a derive, so it's either a builtin or a derive helper
// Try to resolve to a derive helper and downmap
let id = self.db.ast_id_map(file_id).ast_id(&adt);
let helpers =
def_map.derive_helpers_in_scope(InFile::new(file_id, id))?;
let mut res = None;
for (.., derive) in
helpers.iter().filter(|(helper, ..)| *helper == attr_name)
{
res = res.or(process_expansion_for_token(
&mut stack,
derive.as_macro_file(),
));
}
res
}
res
} else {
None
}
})()
.is_none();

View file

@ -139,7 +139,7 @@ impl SourceToDefCtx<'_, '_> {
let _p = tracing::span!(tracing::Level::INFO, "module_to_def").entered();
let parent_declaration = src
.syntax()
.ancestors_with_macros_skip_attr_item(self.db.upcast())
.ancestors_with_macros(self.db.upcast())
.find_map(|it| it.map(Either::<ast::Module, ast::BlockExpr>::cast).transpose())
.map(|it| it.transpose());
@ -366,7 +366,7 @@ impl SourceToDefCtx<'_, '_> {
}
pub(super) fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option<ChildContainer> {
for container in src.ancestors_with_macros_skip_attr_item(self.db.upcast()) {
for container in src.ancestors_with_macros(self.db.upcast()) {
if let Some(res) = self.container_to_def(container) {
return Some(res);
}
@ -420,7 +420,7 @@ impl SourceToDefCtx<'_, '_> {
}
fn find_generic_param_container(&mut self, src: InFile<&SyntaxNode>) -> Option<GenericDefId> {
let ancestors = src.ancestors_with_macros_skip_attr_item(self.db.upcast());
let ancestors = src.ancestors_with_macros(self.db.upcast());
for InFile { file_id, value } in ancestors {
let item = match ast::Item::cast(value) {
Some(it) => it,
@ -429,6 +429,7 @@ impl SourceToDefCtx<'_, '_> {
let res: GenericDefId = match item {
ast::Item::Fn(it) => self.fn_to_def(InFile::new(file_id, it))?.into(),
ast::Item::Struct(it) => self.struct_to_def(InFile::new(file_id, it))?.into(),
ast::Item::Union(it) => self.union_to_def(InFile::new(file_id, it))?.into(),
ast::Item::Enum(it) => self.enum_to_def(InFile::new(file_id, it))?.into(),
ast::Item::Trait(it) => self.trait_to_def(InFile::new(file_id, it))?.into(),
ast::Item::TraitAlias(it) => {
@ -446,11 +447,18 @@ impl SourceToDefCtx<'_, '_> {
}
fn find_pat_or_label_container(&mut self, src: InFile<&SyntaxNode>) -> Option<DefWithBodyId> {
let ancestors = src.ancestors_with_macros_skip_attr_item(self.db.upcast());
let ancestors = src.ancestors_with_macros(self.db.upcast());
for InFile { file_id, value } in ancestors {
let item = match ast::Item::cast(value) {
let item = match ast::Item::cast(value.clone()) {
Some(it) => it,
None => continue,
None => {
if let Some(variant) = ast::Variant::cast(value.clone()) {
return self
.enum_variant_to_def(InFile::new(file_id, variant))
.map(Into::into);
}
continue;
}
};
let res: DefWithBodyId = match item {
ast::Item::Const(it) => self.const_to_def(InFile::new(file_id, it))?.into(),

View file

@ -329,7 +329,7 @@ pub fn term_search<DB: HirDatabase>(ctx: &TermSearchCtx<'_, DB>) -> Vec<Expr> {
while should_continue() {
lookup.new_round();
solutions.extend(tactics::type_constructor(ctx, &defs, &mut lookup, should_continue));
solutions.extend(tactics::data_constructor(ctx, &defs, &mut lookup, should_continue));
solutions.extend(tactics::free_function(ctx, &defs, &mut lookup, should_continue));
solutions.extend(tactics::impl_method(ctx, &defs, &mut lookup, should_continue));
solutions.extend(tactics::struct_projection(ctx, &defs, &mut lookup, should_continue));

View file

@ -87,9 +87,9 @@ pub(super) fn trivial<'a, DB: HirDatabase>(
})
}
/// # Type constructor tactic
/// # Data constructor tactic
///
/// Attempts different type constructors for enums and structs in scope
/// Attempts different data constructors for enums and structs in scope
///
/// Updates lookup by new types reached and returns iterator that yields
/// elements that unify with `goal`.
@ -99,7 +99,7 @@ pub(super) fn trivial<'a, DB: HirDatabase>(
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
/// * `should_continue` - Function that indicates when to stop iterating
pub(super) fn type_constructor<'a, DB: HirDatabase>(
pub(super) fn data_constructor<'a, DB: HirDatabase>(
ctx: &'a TermSearchCtx<'a, DB>,
defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable,
@ -308,7 +308,9 @@ pub(super) fn type_constructor<'a, DB: HirDatabase>(
// Early exit if some param cannot be filled from lookup
let param_exprs: Vec<Vec<Expr>> = fields
.into_iter()
.map(|field| lookup.find(db, &field.ty(db)))
.map(|field| {
lookup.find(db, &field.ty_with_args(db, generics.iter().cloned()))
})
.collect::<Option<_>>()?;
// Note that we need special case for 0 param constructors because of multi cartesian

View file

@ -0,0 +1,685 @@
use itertools::Itertools;
use syntax::{
ast::{self, edit::IndentLevel, Comment, CommentPlacement, Whitespace},
AstToken, Direction, SyntaxElement, TextRange,
};
use crate::{AssistContext, AssistId, AssistKind, Assists};
// Assist: comment_to_doc
//
// Converts comments to documentation.
//
// ```
// // Wow what $0a nice module
// // I sure hope this shows up when I hover over it
// ```
// ->
// ```
// //! Wow what a nice module
// //! I sure hope this shows up when I hover over it
// ```
pub(crate) fn convert_comment_from_or_to_doc(
acc: &mut Assists,
ctx: &AssistContext<'_>,
) -> Option<()> {
let comment = ctx.find_token_at_offset::<ast::Comment>()?;
match comment.kind().doc {
Some(_) => doc_to_comment(acc, comment),
None => can_be_doc_comment(&comment).and_then(|style| comment_to_doc(acc, comment, style)),
}
}
fn doc_to_comment(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
let target = if comment.kind().shape.is_line() {
line_comments_text_range(&comment)?
} else {
comment.syntax().text_range()
};
acc.add(
AssistId("doc_to_comment", AssistKind::RefactorRewrite),
"Replace comment with doc comment",
target,
|edit| {
// We need to either replace the first occurrence of /* with /***, or we need to replace
// the occurrences // at the start of each line with ///
let output = match comment.kind().shape {
ast::CommentShape::Line => {
let indentation = IndentLevel::from_token(comment.syntax());
let line_start = comment.prefix();
let prefix = format!("{indentation}//");
relevant_line_comments(&comment)
.iter()
.map(|comment| comment.text())
.flat_map(|text| text.lines())
.map(|line| line.replacen(line_start, &prefix, 1))
.join("\n")
}
ast::CommentShape::Block => {
let block_start = comment.prefix();
comment
.text()
.lines()
.enumerate()
.map(|(idx, line)| {
if idx == 0 {
line.replacen(block_start, "/*", 1)
} else {
line.replacen("* ", "* ", 1)
}
})
.join("\n")
}
};
edit.replace(target, output)
},
)
}
fn comment_to_doc(acc: &mut Assists, comment: ast::Comment, style: CommentPlacement) -> Option<()> {
let target = if comment.kind().shape.is_line() {
line_comments_text_range(&comment)?
} else {
comment.syntax().text_range()
};
acc.add(
AssistId("comment_to_doc", AssistKind::RefactorRewrite),
"Replace doc comment with comment",
target,
|edit| {
// We need to either replace the first occurrence of /* with /***, or we need to replace
// the occurrences // at the start of each line with ///
let output = match comment.kind().shape {
ast::CommentShape::Line => {
let indentation = IndentLevel::from_token(comment.syntax());
let line_start = match style {
CommentPlacement::Inner => format!("{indentation}//!"),
CommentPlacement::Outer => format!("{indentation}///"),
};
relevant_line_comments(&comment)
.iter()
.map(|comment| comment.text())
.flat_map(|text| text.lines())
.map(|line| line.replacen("//", &line_start, 1))
.join("\n")
}
ast::CommentShape::Block => {
let block_start = match style {
CommentPlacement::Inner => "/*!",
CommentPlacement::Outer => "/**",
};
comment
.text()
.lines()
.enumerate()
.map(|(idx, line)| {
if idx == 0 {
// On the first line we replace the comment start with a doc comment
// start.
line.replacen("/*", block_start, 1)
} else {
// put one extra space after each * since we moved the first line to
// the right by one column as well.
line.replacen("* ", "* ", 1)
}
})
.join("\n")
}
};
edit.replace(target, output)
},
)
}
/// Not all comments are valid candidates for conversion into doc comments. For example, the
/// comments in the code:
/// ```rust
/// // Brilliant module right here
///
/// // Really good right
/// fn good_function(foo: Foo) -> Bar {
/// foo.into_bar()
/// }
///
/// // So nice
/// mod nice_module {}
/// ```
/// can be converted to doc comments. However, the comments in this example:
/// ```rust
/// fn foo_bar(foo: Foo /* not bar yet */) -> Bar {
/// foo.into_bar()
/// // Nicely done
/// }
/// // end of function
///
/// struct S {
/// // The S struct
/// }
/// ```
/// are not allowed to become doc comments. Moreover, some comments _are_ allowed, but aren't common
/// style in Rust. For example, the following comments are allowed to be doc comments, but it is not
/// common style for them to be:
/// ```rust
/// fn foo_bar(foo: Foo) -> Bar {
/// // this could be an inner comment with //!
/// foo.into_bar()
/// }
///
/// trait T {
/// // The T struct could also be documented from within
/// }
///
/// mod mymod {
/// // Modules only normally get inner documentation when they are defined as a separate file.
/// }
/// ```
fn can_be_doc_comment(comment: &ast::Comment) -> Option<CommentPlacement> {
use syntax::SyntaxKind::*;
// if the comment is not on its own line, then we do not propose anything.
match comment.syntax().prev_token() {
Some(prev) => {
// There was a previous token, now check if it was a newline
Whitespace::cast(prev).filter(|w| w.text().contains('\n'))?;
}
// There is no previous token, this is the start of the file.
None => return Some(CommentPlacement::Inner),
}
// check if comment is followed by: `struct`, `trait`, `mod`, `fn`, `type`, `extern crate`,
// `use` or `const`.
let parent = comment.syntax().parent();
let par_kind = parent.as_ref().map(|parent| parent.kind());
matches!(par_kind, Some(STRUCT | TRAIT | MODULE | FN | TYPE_ALIAS | EXTERN_CRATE | USE | CONST))
.then_some(CommentPlacement::Outer)
}
/// The line -> block assist can be invoked from anywhere within a sequence of line comments.
/// relevant_line_comments crawls backwards and forwards finding the complete sequence of comments that will
/// be joined.
pub(crate) fn relevant_line_comments(comment: &ast::Comment) -> Vec<Comment> {
// The prefix identifies the kind of comment we're dealing with
let prefix = comment.prefix();
let same_prefix = |c: &ast::Comment| c.prefix() == prefix;
// These tokens are allowed to exist between comments
let skippable = |not: &SyntaxElement| {
not.clone()
.into_token()
.and_then(Whitespace::cast)
.map(|w| !w.spans_multiple_lines())
.unwrap_or(false)
};
// Find all preceding comments (in reverse order) that have the same prefix
let prev_comments = comment
.syntax()
.siblings_with_tokens(Direction::Prev)
.filter(|s| !skippable(s))
.map(|not| not.into_token().and_then(Comment::cast).filter(same_prefix))
.take_while(|opt_com| opt_com.is_some())
.flatten()
.skip(1); // skip the first element so we don't duplicate it in next_comments
let next_comments = comment
.syntax()
.siblings_with_tokens(Direction::Next)
.filter(|s| !skippable(s))
.map(|not| not.into_token().and_then(Comment::cast).filter(same_prefix))
.take_while(|opt_com| opt_com.is_some())
.flatten();
let mut comments: Vec<_> = prev_comments.collect();
comments.reverse();
comments.extend(next_comments);
comments
}
fn line_comments_text_range(comment: &ast::Comment) -> Option<TextRange> {
let comments = relevant_line_comments(comment);
let first = comments.first()?;
let indentation = IndentLevel::from_token(first.syntax());
let start =
first.syntax().text_range().start().checked_sub((indentation.0 as u32 * 4).into())?;
let end = comments.last()?.syntax().text_range().end();
Some(TextRange::new(start, end))
}
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
use super::*;
#[test]
fn module_comment_to_doc() {
check_assist(
convert_comment_from_or_to_doc,
r#"
// such a nice module$0
fn main() {
foo();
}
"#,
r#"
//! such a nice module
fn main() {
foo();
}
"#,
);
}
#[test]
fn single_line_comment_to_doc() {
check_assist(
convert_comment_from_or_to_doc,
r#"
// unseen$0 docs
fn main() {
foo();
}
"#,
r#"
/// unseen docs
fn main() {
foo();
}
"#,
);
}
#[test]
fn multi_line_comment_to_doc() {
check_assist(
convert_comment_from_or_to_doc,
r#"
// unseen$0 docs
// make me seen!
fn main() {
foo();
}
"#,
r#"
/// unseen docs
/// make me seen!
fn main() {
foo();
}
"#,
);
}
#[test]
fn single_line_doc_to_comment() {
check_assist(
convert_comment_from_or_to_doc,
r#"
/// visible$0 docs
fn main() {
foo();
}
"#,
r#"
// visible docs
fn main() {
foo();
}
"#,
);
}
#[test]
fn multi_line_doc_to_comment() {
check_assist(
convert_comment_from_or_to_doc,
r#"
/// visible$0 docs
/// Hide me!
fn main() {
foo();
}
"#,
r#"
// visible docs
// Hide me!
fn main() {
foo();
}
"#,
);
}
#[test]
fn single_line_block_comment_to_doc() {
check_assist(
convert_comment_from_or_to_doc,
r#"
/* unseen$0 docs */
fn main() {
foo();
}
"#,
r#"
/** unseen docs */
fn main() {
foo();
}
"#,
);
}
#[test]
fn multi_line_block_comment_to_doc() {
check_assist(
convert_comment_from_or_to_doc,
r#"
/* unseen$0 docs
* make me seen!
*/
fn main() {
foo();
}
"#,
r#"
/** unseen docs
* make me seen!
*/
fn main() {
foo();
}
"#,
);
}
#[test]
fn single_line_block_doc_to_comment() {
check_assist(
convert_comment_from_or_to_doc,
r#"
/** visible$0 docs */
fn main() {
foo();
}
"#,
r#"
/* visible docs */
fn main() {
foo();
}
"#,
);
}
#[test]
fn multi_line_block_doc_to_comment() {
check_assist(
convert_comment_from_or_to_doc,
r#"
/** visible$0 docs
* Hide me!
*/
fn main() {
foo();
}
"#,
r#"
/* visible docs
* Hide me!
*/
fn main() {
foo();
}
"#,
);
}
#[test]
fn single_inner_line_comment_to_doc() {
check_assist_not_applicable(
convert_comment_from_or_to_doc,
r#"
mod mymod {
// unseen$0 docs
foo();
}
"#,
);
}
#[test]
fn single_inner_line_doc_to_comment() {
check_assist(
convert_comment_from_or_to_doc,
r#"
mod mymod {
//! visible$0 docs
foo();
}
"#,
r#"
mod mymod {
// visible docs
foo();
}
"#,
);
}
#[test]
fn multi_inner_line_doc_to_comment() {
check_assist(
convert_comment_from_or_to_doc,
r#"
mod mymod {
//! visible$0 docs
//! Hide me!
foo();
}
"#,
r#"
mod mymod {
// visible docs
// Hide me!
foo();
}
"#,
);
check_assist(
convert_comment_from_or_to_doc,
r#"
mod mymod {
/// visible$0 docs
/// Hide me!
foo();
}
"#,
r#"
mod mymod {
// visible docs
// Hide me!
foo();
}
"#,
);
}
#[test]
fn single_inner_line_block_doc_to_comment() {
check_assist(
convert_comment_from_or_to_doc,
r#"
mod mymod {
/*! visible$0 docs */
type Int = i32;
}
"#,
r#"
mod mymod {
/* visible docs */
type Int = i32;
}
"#,
);
}
#[test]
fn multi_inner_line_block_doc_to_comment() {
check_assist(
convert_comment_from_or_to_doc,
r#"
mod mymod {
/*! visible$0 docs
* Hide me!
*/
type Int = i32;
}
"#,
r#"
mod mymod {
/* visible docs
* Hide me!
*/
type Int = i32;
}
"#,
);
}
#[test]
fn not_overeager() {
check_assist_not_applicable(
convert_comment_from_or_to_doc,
r#"
fn main() {
foo();
// $0well that settles main
}
// $1 nicely done
"#,
);
}
#[test]
fn all_possible_items() {
check_assist(
convert_comment_from_or_to_doc,
r#"mod m {
/* Nice struct$0 */
struct S {}
}"#,
r#"mod m {
/** Nice struct */
struct S {}
}"#,
);
check_assist(
convert_comment_from_or_to_doc,
r#"mod m {
/* Nice trait$0 */
trait T {}
}"#,
r#"mod m {
/** Nice trait */
trait T {}
}"#,
);
check_assist(
convert_comment_from_or_to_doc,
r#"mod m {
/* Nice module$0 */
mod module {}
}"#,
r#"mod m {
/** Nice module */
mod module {}
}"#,
);
check_assist(
convert_comment_from_or_to_doc,
r#"mod m {
/* Nice function$0 */
fn function() {}
}"#,
r#"mod m {
/** Nice function */
fn function() {}
}"#,
);
check_assist(
convert_comment_from_or_to_doc,
r#"mod m {
/* Nice type$0 */
type Type Int = i32;
}"#,
r#"mod m {
/** Nice type */
type Type Int = i32;
}"#,
);
check_assist(
convert_comment_from_or_to_doc,
r#"mod m {
/* Nice crate$0 */
extern crate rust_analyzer;
}"#,
r#"mod m {
/** Nice crate */
extern crate rust_analyzer;
}"#,
);
check_assist(
convert_comment_from_or_to_doc,
r#"mod m {
/* Nice import$0 */
use ide_assists::convert_comment_from_or_to_doc::tests
}"#,
r#"mod m {
/** Nice import */
use ide_assists::convert_comment_from_or_to_doc::tests
}"#,
);
check_assist(
convert_comment_from_or_to_doc,
r#"mod m {
/* Nice constant$0 */
const CONST: &str = "very const";
}"#,
r#"mod m {
/** Nice constant */
const CONST: &str = "very const";
}"#,
);
}
#[test]
fn no_inner_comments() {
check_assist_not_applicable(
convert_comment_from_or_to_doc,
r#"
mod mymod {
// aaa$0aa
}
"#,
);
}
}

View file

@ -278,4 +278,16 @@ fn f() { let a = 1; let b = 0.0; let c: (i32, (i32, f64)) = todo$0!(); }"#,
r#"fn f() { let a = 1; let b = 0.0; let c: (i32, (i32, f64)) = (a, (a, b)); }"#,
)
}
#[test]
fn test_tuple_struct_with_generics() {
check_assist(
term_search,
r#"//- minicore: todo, unimplemented
struct Foo<T>(T);
fn f() { let a = 1; let b: Foo<i32> = todo$0!(); }"#,
r#"struct Foo<T>(T);
fn f() { let a = 1; let b: Foo<i32> = Foo(a); }"#,
)
}
}

View file

@ -0,0 +1,601 @@
use hir::{ImportPathConfig, ModuleDef};
use ide_db::{
assists::{AssistId, AssistKind},
famous_defs::FamousDefs,
};
use syntax::{
ast::{self, HasVisibility},
AstNode, NodeOrToken, SyntaxKind, SyntaxNode, SyntaxToken, TextRange,
};
use crate::{AssistContext, Assists};
// Assist: sugar_impl_future_into_async
//
// Rewrites asynchronous function from `-> impl Future` into `async fn`.
// This action does not touch the function body and therefore `async { 0 }`
// block does not transform to just `0`.
//
// ```
// # //- minicore: future
// pub fn foo() -> impl core::future::F$0uture<Output = usize> {
// async { 0 }
// }
// ```
// ->
// ```
// pub async fn foo() -> usize {
// async { 0 }
// }
// ```
pub(crate) fn sugar_impl_future_into_async(
acc: &mut Assists,
ctx: &AssistContext<'_>,
) -> Option<()> {
let ret_type: ast::RetType = ctx.find_node_at_offset()?;
let function = ret_type.syntax().parent().and_then(ast::Fn::cast)?;
if function.async_token().is_some() || function.const_token().is_some() {
return None;
}
let ast::Type::ImplTraitType(return_impl_trait) = ret_type.ty()? else {
return None;
};
let main_trait_path = return_impl_trait
.type_bound_list()?
.bounds()
.filter_map(|bound| match bound.ty() {
Some(ast::Type::PathType(trait_path)) => trait_path.path(),
_ => None,
})
.next()?;
let trait_type = ctx.sema.resolve_trait(&main_trait_path)?;
let scope = ctx.sema.scope(main_trait_path.syntax())?;
if trait_type != FamousDefs(&ctx.sema, scope.krate()).core_future_Future()? {
return None;
}
let future_output = unwrap_future_output(main_trait_path)?;
acc.add(
AssistId("sugar_impl_future_into_async", AssistKind::RefactorRewrite),
"Convert `impl Future` into async",
function.syntax().text_range(),
|builder| {
match future_output {
// Empty tuple
ast::Type::TupleType(t) if t.fields().next().is_none() => {
let mut ret_type_range = ret_type.syntax().text_range();
// find leftover whitespace
let whitespace_range = function
.param_list()
.as_ref()
.map(|params| NodeOrToken::Node(params.syntax()))
.and_then(following_whitespace);
if let Some(whitespace_range) = whitespace_range {
ret_type_range =
TextRange::new(whitespace_range.start(), ret_type_range.end());
}
builder.delete(ret_type_range);
}
_ => {
builder.replace(
return_impl_trait.syntax().text_range(),
future_output.syntax().text(),
);
}
}
let (place_for_async, async_kw) = match function.visibility() {
Some(vis) => (vis.syntax().text_range().end(), " async"),
None => (function.syntax().text_range().start(), "async "),
};
builder.insert(place_for_async, async_kw);
},
)
}
// Assist: desugar_async_into_impl_future
//
// Rewrites asynchronous function from `async fn` into `-> impl Future`.
// This action does not touch the function body and therefore `0`
// block does not transform to `async { 0 }`.
//
// ```
// # //- minicore: future
// pub as$0ync fn foo() -> usize {
// 0
// }
// ```
// ->
// ```
// pub fn foo() -> impl core::future::Future<Output = usize> {
// 0
// }
// ```
pub(crate) fn desugar_async_into_impl_future(
acc: &mut Assists,
ctx: &AssistContext<'_>,
) -> Option<()> {
let async_token = ctx.find_token_syntax_at_offset(SyntaxKind::ASYNC_KW)?;
let function = async_token.parent().and_then(ast::Fn::cast)?;
let rparen = function.param_list()?.r_paren_token()?;
let return_type = match function.ret_type() {
// unable to get a `ty` makes the action unapplicable
Some(ret_type) => Some(ret_type.ty()?),
// No type means `-> ()`
None => None,
};
let scope = ctx.sema.scope(function.syntax())?;
let module = scope.module();
let future_trait = FamousDefs(&ctx.sema, scope.krate()).core_future_Future()?;
let trait_path = module.find_path(
ctx.db(),
ModuleDef::Trait(future_trait),
ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
)?;
let trait_path = trait_path.display(ctx.db());
acc.add(
AssistId("desugar_async_into_impl_future", AssistKind::RefactorRewrite),
"Convert async into `impl Future`",
function.syntax().text_range(),
|builder| {
let mut async_range = async_token.text_range();
if let Some(whitespace_range) = following_whitespace(NodeOrToken::Token(async_token)) {
async_range = TextRange::new(async_range.start(), whitespace_range.end());
}
builder.delete(async_range);
match return_type {
Some(ret_type) => builder.replace(
ret_type.syntax().text_range(),
format!("impl {trait_path}<Output = {ret_type}>"),
),
None => builder.insert(
rparen.text_range().end(),
format!(" -> impl {trait_path}<Output = ()>"),
),
}
},
)
}
fn unwrap_future_output(path: ast::Path) -> Option<ast::Type> {
let future_trait = path.segments().last()?;
let assoc_list = future_trait.generic_arg_list()?;
let future_assoc = assoc_list.generic_args().next()?;
match future_assoc {
ast::GenericArg::AssocTypeArg(output_type) => output_type.ty(),
_ => None,
}
}
fn following_whitespace(nt: NodeOrToken<&SyntaxNode, SyntaxToken>) -> Option<TextRange> {
let next_token = match nt {
NodeOrToken::Node(node) => node.next_sibling_or_token(),
NodeOrToken::Token(token) => token.next_sibling_or_token(),
}?;
(next_token.kind() == SyntaxKind::WHITESPACE).then_some(next_token.text_range())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::{check_assist, check_assist_not_applicable};
#[test]
fn sugar_with_use() {
check_assist(
sugar_impl_future_into_async,
r#"
//- minicore: future
use core::future::Future;
fn foo() -> impl F$0uture<Output = ()> {
todo!()
}
"#,
r#"
use core::future::Future;
async fn foo() {
todo!()
}
"#,
);
check_assist(
sugar_impl_future_into_async,
r#"
//- minicore: future
use core::future::Future;
fn foo() -> impl F$0uture<Output = usize> {
todo!()
}
"#,
r#"
use core::future::Future;
async fn foo() -> usize {
todo!()
}
"#,
);
}
#[test]
fn desugar_with_use() {
check_assist(
desugar_async_into_impl_future,
r#"
//- minicore: future
use core::future::Future;
as$0ync fn foo() {
todo!()
}
"#,
r#"
use core::future::Future;
fn foo() -> impl Future<Output = ()> {
todo!()
}
"#,
);
check_assist(
desugar_async_into_impl_future,
r#"
//- minicore: future
use core::future;
as$0ync fn foo() {
todo!()
}
"#,
r#"
use core::future;
fn foo() -> impl future::Future<Output = ()> {
todo!()
}
"#,
);
check_assist(
desugar_async_into_impl_future,
r#"
//- minicore: future
use core::future::Future;
as$0ync fn foo() -> usize {
todo!()
}
"#,
r#"
use core::future::Future;
fn foo() -> impl Future<Output = usize> {
todo!()
}
"#,
);
check_assist(
desugar_async_into_impl_future,
r#"
//- minicore: future
use core::future::Future;
as$0ync fn foo() -> impl Future<Output = usize> {
todo!()
}
"#,
r#"
use core::future::Future;
fn foo() -> impl Future<Output = impl Future<Output = usize>> {
todo!()
}
"#,
);
}
#[test]
fn sugar_without_use() {
check_assist(
sugar_impl_future_into_async,
r#"
//- minicore: future
fn foo() -> impl core::future::F$0uture<Output = ()> {
todo!()
}
"#,
r#"
async fn foo() {
todo!()
}
"#,
);
check_assist(
sugar_impl_future_into_async,
r#"
//- minicore: future
fn foo() -> impl core::future::F$0uture<Output = usize> {
todo!()
}
"#,
r#"
async fn foo() -> usize {
todo!()
}
"#,
);
}
#[test]
fn desugar_without_use() {
check_assist(
desugar_async_into_impl_future,
r#"
//- minicore: future
as$0ync fn foo() {
todo!()
}
"#,
r#"
fn foo() -> impl core::future::Future<Output = ()> {
todo!()
}
"#,
);
check_assist(
desugar_async_into_impl_future,
r#"
//- minicore: future
as$0ync fn foo() -> usize {
todo!()
}
"#,
r#"
fn foo() -> impl core::future::Future<Output = usize> {
todo!()
}
"#,
);
}
#[test]
fn not_applicable() {
check_assist_not_applicable(
sugar_impl_future_into_async,
r#"
//- minicore: future
trait Future {
type Output;
}
fn foo() -> impl F$0uture<Output = ()> {
todo!()
}
"#,
);
check_assist_not_applicable(
sugar_impl_future_into_async,
r#"
//- minicore: future
trait Future {
type Output;
}
fn foo() -> impl F$0uture<Output = usize> {
todo!()
}
"#,
);
check_assist_not_applicable(
sugar_impl_future_into_async,
r#"
//- minicore: future
f$0n foo() -> impl core::future::Future<Output = usize> {
todo!()
}
"#,
);
check_assist_not_applicable(
desugar_async_into_impl_future,
r#"
async f$0n foo() {
todo!()
}
"#,
);
}
#[test]
fn sugar_definition_with_use() {
check_assist(
sugar_impl_future_into_async,
r#"
//- minicore: future
use core::future::Future;
fn foo() -> impl F$0uture<Output = ()>;
"#,
r#"
use core::future::Future;
async fn foo();
"#,
);
check_assist(
sugar_impl_future_into_async,
r#"
//- minicore: future
use core::future::Future;
fn foo() -> impl F$0uture<Output = usize>;
"#,
r#"
use core::future::Future;
async fn foo() -> usize;
"#,
);
}
#[test]
fn sugar_definition_without_use() {
check_assist(
sugar_impl_future_into_async,
r#"
//- minicore: future
fn foo() -> impl core::future::F$0uture<Output = ()>;
"#,
r#"
async fn foo();
"#,
);
check_assist(
sugar_impl_future_into_async,
r#"
//- minicore: future
fn foo() -> impl core::future::F$0uture<Output = usize>;
"#,
r#"
async fn foo() -> usize;
"#,
);
}
#[test]
fn sugar_more_types() {
check_assist(
sugar_impl_future_into_async,
r#"
//- minicore: future
fn foo() -> impl core::future::F$0uture<Output = ()> + Send + Sync;
"#,
r#"
async fn foo();
"#,
);
check_assist(
sugar_impl_future_into_async,
r#"
//- minicore: future
fn foo() -> impl core::future::F$0uture<Output = usize> + Debug;
"#,
r#"
async fn foo() -> usize;
"#,
);
check_assist(
sugar_impl_future_into_async,
r#"
//- minicore: future
fn foo() -> impl core::future::F$0uture<Output = (usize)> + Debug;
"#,
r#"
async fn foo() -> (usize);
"#,
);
check_assist(
sugar_impl_future_into_async,
r#"
//- minicore: future
fn foo() -> impl core::future::F$0uture<Output = (usize, usize)> + Debug;
"#,
r#"
async fn foo() -> (usize, usize);
"#,
);
check_assist(
sugar_impl_future_into_async,
r#"
//- minicore: future
fn foo() -> impl core::future::Future<Output = impl core::future::F$0uture<Output = ()> + Send>;
"#,
r#"
async fn foo() -> impl core::future::Future<Output = ()> + Send;
"#,
);
}
#[test]
fn sugar_with_modifiers() {
check_assist_not_applicable(
sugar_impl_future_into_async,
r#"
//- minicore: future
const fn foo() -> impl core::future::F$0uture<Output = ()>;
"#,
);
check_assist(
sugar_impl_future_into_async,
r#"
//- minicore: future
pub(crate) unsafe fn foo() -> impl core::future::F$0uture<Output = usize>;
"#,
r#"
pub(crate) async unsafe fn foo() -> usize;
"#,
);
check_assist(
sugar_impl_future_into_async,
r#"
//- minicore: future
unsafe fn foo() -> impl core::future::F$0uture<Output = ()>;
"#,
r#"
async unsafe fn foo();
"#,
);
check_assist(
sugar_impl_future_into_async,
r#"
//- minicore: future
unsafe extern "C" fn foo() -> impl core::future::F$0uture<Output = ()>;
"#,
r#"
async unsafe extern "C" fn foo();
"#,
);
check_assist(
sugar_impl_future_into_async,
r#"
//- minicore: future
fn foo<T>() -> impl core::future::F$0uture<Output = T>;
"#,
r#"
async fn foo<T>() -> T;
"#,
);
check_assist(
sugar_impl_future_into_async,
r#"
//- minicore: future
fn foo<T>() -> impl core::future::F$0uture<Output = T>
where
T: Sized;
"#,
r#"
async fn foo<T>() -> T
where
T: Sized;
"#,
);
}
}

View file

@ -116,6 +116,7 @@ mod handlers {
mod change_visibility;
mod convert_bool_then;
mod convert_comment_block;
mod convert_comment_from_or_to_doc;
mod convert_from_to_tryfrom;
mod convert_integer_literal;
mod convert_into_to_from;
@ -209,6 +210,7 @@ mod handlers {
mod sort_items;
mod split_import;
mod term_search;
mod toggle_async_sugar;
mod toggle_ignore;
mod unmerge_match_arm;
mod unmerge_use;
@ -238,7 +240,10 @@ mod handlers {
change_visibility::change_visibility,
convert_bool_then::convert_bool_then_to_if,
convert_bool_then::convert_if_to_bool_then,
toggle_async_sugar::desugar_async_into_impl_future,
toggle_async_sugar::sugar_impl_future_into_async,
convert_comment_block::convert_comment_block,
convert_comment_from_or_to_doc::convert_comment_from_or_to_doc,
convert_from_to_tryfrom::convert_from_to_tryfrom,
convert_integer_literal::convert_integer_literal,
convert_into_to_from::convert_into_to_from,

View file

@ -345,6 +345,21 @@ pub(crate) fn frobnicate() {}
)
}
#[test]
fn doctest_comment_to_doc() {
check_doc_test(
"comment_to_doc",
r#####"
// Wow what $0a nice module
// I sure hope this shows up when I hover over it
"#####,
r#####"
//! Wow what a nice module
//! I sure hope this shows up when I hover over it
"#####,
)
}
#[test]
fn doctest_convert_bool_then_to_if() {
check_doc_test(
@ -800,6 +815,24 @@ fn main() {
)
}
#[test]
fn doctest_desugar_async_into_impl_future() {
check_doc_test(
"desugar_async_into_impl_future",
r#####"
//- minicore: future
pub as$0ync fn foo() -> usize {
0
}
"#####,
r#####"
pub fn foo() -> impl core::future::Future<Output = usize> {
0
}
"#####,
)
}
#[test]
fn doctest_desugar_doc_comment() {
check_doc_test(
@ -3020,6 +3053,24 @@ use std::{collections::HashMap};
)
}
#[test]
fn doctest_sugar_impl_future_into_async() {
check_doc_test(
"sugar_impl_future_into_async",
r#####"
//- minicore: future
pub fn foo() -> impl core::future::F$0uture<Output = usize> {
async { 0 }
}
"#####,
r#####"
pub async fn foo() -> usize {
async { 0 }
}
"#####,
)
}
#[test]
fn doctest_toggle_ignore() {
check_doc_test(

View file

@ -3,8 +3,9 @@ use std::iter;
use hir::{Semantics, Type, TypeInfo, Variant};
use ide_db::{active_parameter::ActiveParameter, RootDatabase};
use itertools::Either;
use syntax::{
algo::{find_node_at_offset, non_trivia_sibling},
algo::{ancestors_at_offset, find_node_at_offset, non_trivia_sibling},
ast::{self, AttrKind, HasArgList, HasGenericParams, HasLoopBody, HasName, NameOrNameRef},
match_ast, AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode,
SyntaxToken, TextRange, TextSize, T,
@ -119,20 +120,45 @@ fn expand(
}
// No attributes have been expanded, so look for macro_call! token trees or derive token trees
let orig_tt = match find_node_at_offset::<ast::TokenTree>(&original_file, offset) {
let orig_tt = match ancestors_at_offset(&original_file, offset)
.map_while(Either::<ast::TokenTree, ast::Meta>::cast)
.last()
{
Some(it) => it,
None => break 'expansion,
};
let spec_tt = match find_node_at_offset::<ast::TokenTree>(&speculative_file, offset) {
let spec_tt = match ancestors_at_offset(&speculative_file, offset)
.map_while(Either::<ast::TokenTree, ast::Meta>::cast)
.last()
{
Some(it) => it,
None => break 'expansion,
};
// Expand pseudo-derive expansion
if let (Some(orig_attr), Some(spec_attr)) = (
orig_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
spec_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
) {
let (tts, attrs) = match (orig_tt, spec_tt) {
(Either::Left(orig_tt), Either::Left(spec_tt)) => {
let attrs = orig_tt
.syntax()
.parent()
.and_then(ast::Meta::cast)
.and_then(|it| it.parent_attr())
.zip(
spec_tt
.syntax()
.parent()
.and_then(ast::Meta::cast)
.and_then(|it| it.parent_attr()),
);
(Some((orig_tt, spec_tt)), attrs)
}
(Either::Right(orig_path), Either::Right(spec_path)) => {
(None, orig_path.parent_attr().zip(spec_path.parent_attr()))
}
_ => break 'expansion,
};
// Expand pseudo-derive expansion aka `derive(Debug$0)`
if let Some((orig_attr, spec_attr)) = attrs {
if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) = (
sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
sema.speculative_expand_derive_as_pseudo_attr_macro(
@ -147,15 +173,54 @@ fn expand(
fake_mapped_token.text_range().start(),
orig_attr,
));
break 'expansion;
}
if let Some(spec_adt) =
spec_attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it {
ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
ast::Item::Union(it) => Some(ast::Adt::Union(it)),
_ => None,
})
{
// might be the path of derive helper or a token tree inside of one
if let Some(helpers) = sema.derive_helper(&orig_attr) {
for (_mac, file) in helpers {
if let Some((fake_expansion, fake_mapped_token)) = sema
.speculative_expand_raw(
file,
spec_adt.syntax(),
fake_ident_token.clone(),
)
{
// we are inside a derive helper token tree, treat this as being inside
// the derive expansion
let actual_expansion = sema.parse_or_expand(file.into());
let new_offset = fake_mapped_token.text_range().start();
if new_offset + relative_offset > actual_expansion.text_range().end() {
// offset outside of bounds from the original expansion,
// stop here to prevent problems from happening
break 'expansion;
}
original_file = actual_expansion;
speculative_file = fake_expansion;
fake_ident_token = fake_mapped_token;
offset = new_offset;
continue 'expansion;
}
}
}
}
// at this point we won't have any more successful expansions, so stop
break 'expansion;
}
// Expand fn-like macro calls
let Some((orig_tt, spec_tt)) = tts else { break 'expansion };
if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
orig_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
spec_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
orig_tt.syntax().parent().and_then(ast::MacroCall::cast),
spec_tt.syntax().parent().and_then(ast::MacroCall::cast),
) {
let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
let mac_call_path1 =
@ -201,6 +266,7 @@ fn expand(
// none of our states have changed so stop the loop
break 'expansion;
}
ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx }
}

View file

@ -106,6 +106,10 @@ impl FamousDefs<'_, '_> {
self.find_trait("core:marker:Copy")
}
pub fn core_future_Future(&self) -> Option<Trait> {
self.find_trait("core:future:Future")
}
pub fn core_macros_builtin_derive(&self) -> Option<Macro> {
self.find_macro("core:macros:builtin:derive")
}

View file

@ -16,7 +16,7 @@ use crate::{
//
// This is the same as `Go to Definition` with the following exceptions:
// - outline modules will navigate to the `mod name;` item declaration
// - trait assoc items will navigate to the assoc item of the trait declaration opposed to the trait impl
// - trait assoc items will navigate to the assoc item of the trait declaration as opposed to the trait impl
// - fields in patterns will navigate to the field declaration of the struct, union or variant
pub(crate) fn goto_declaration(
db: &RootDatabase,

View file

@ -87,7 +87,6 @@ pub(crate) struct GlobalState {
pub(crate) flycheck_sender: Sender<flycheck::Message>,
pub(crate) flycheck_receiver: Receiver<flycheck::Message>,
pub(crate) last_flycheck_error: Option<String>,
pub(crate) diagnostics_received: bool,
// Test explorer
pub(crate) test_run_session: Option<Vec<flycheck::CargoTestHandle>>,
@ -225,7 +224,6 @@ impl GlobalState {
flycheck_sender,
flycheck_receiver,
last_flycheck_error: None,
diagnostics_received: false,
test_run_session: None,
test_run_sender,

View file

@ -804,10 +804,6 @@ impl GlobalState {
fn handle_flycheck_msg(&mut self, message: flycheck::Message) {
match message {
flycheck::Message::AddDiagnostic { id, workspace_root, diagnostic } => {
if !self.diagnostics_received {
self.diagnostics.clear_check(id);
self.diagnostics_received = true;
}
let snap = self.snapshot();
let diagnostics = crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp(
&self.config.diagnostics_map(),
@ -833,12 +829,11 @@ impl GlobalState {
}
}
flycheck::Message::ClearDiagnostics { id } => self.diagnostics.clear_check(id),
flycheck::Message::Progress { id, progress } => {
let (state, message) = match progress {
flycheck::Progress::DidStart => {
self.diagnostics_received = false;
(Progress::Begin, None)
}
flycheck::Progress::DidStart => (Progress::Begin, None),
flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)),
flycheck::Progress::DidCancel => {
self.last_flycheck_error = None;
@ -852,9 +847,6 @@ impl GlobalState {
flycheck::Progress::DidFinish(result) => {
self.last_flycheck_error =
result.err().map(|err| format!("cargo check failed to start: {err}"));
if !self.diagnostics_received {
self.diagnostics.clear_check(id);
}
(Progress::End, None)
}
};