Auto merge of #16451 - Urhengulas:satisfy-clippy, r=Veykril

internal: Work through temporarily allowed clippy lints, part 2

Another follow-up to https://github.com/rust-lang/rust-analyzer/pull/16401.
This commit is contained in:
bors 2024-02-01 14:23:18 +00:00
commit 850ba2fb63
39 changed files with 136 additions and 149 deletions

View file

@ -167,29 +167,14 @@ new_ret_no_self = "allow"
## Following lints should be tackled at some point ## Following lints should be tackled at some point
borrowed_box = "allow" borrowed_box = "allow"
borrow_deref_ref = "allow"
derivable_impls = "allow"
derived_hash_with_manual_eq = "allow" derived_hash_with_manual_eq = "allow"
field_reassign_with_default = "allow"
forget_non_drop = "allow" forget_non_drop = "allow"
format_collect = "allow"
large_enum_variant = "allow"
needless_doctest_main = "allow" needless_doctest_main = "allow"
new_without_default = "allow"
non_canonical_clone_impl = "allow" non_canonical_clone_impl = "allow"
non_canonical_partial_ord_impl = "allow" non_canonical_partial_ord_impl = "allow"
self_named_constructors = "allow" self_named_constructors = "allow"
skip_while_next = "allow"
too_many_arguments = "allow" too_many_arguments = "allow"
toplevel_ref_arg = "allow"
type_complexity = "allow" type_complexity = "allow"
unnecessary_cast = "allow"
unnecessary_filter_map = "allow"
unnecessary_lazy_evaluations = "allow"
unnecessary_mut_passed = "allow"
useless_conversion = "allow"
useless_format = "allow"
wildcard_in_or_patterns = "allow"
wrong_self_convention = "allow" wrong_self_convention = "allow"
## warn at following lints ## warn at following lints

View file

@ -493,7 +493,9 @@ impl CargoActor {
// Skip certain kinds of messages to only spend time on what's useful // Skip certain kinds of messages to only spend time on what's useful
JsonMessage::Cargo(message) => match message { JsonMessage::Cargo(message) => match message {
cargo_metadata::Message::CompilerArtifact(artifact) if !artifact.fresh => { cargo_metadata::Message::CompilerArtifact(artifact) if !artifact.fresh => {
self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap(); self.sender
.send(CargoMessage::CompilerArtifact(Box::new(artifact)))
.unwrap();
} }
cargo_metadata::Message::CompilerMessage(msg) => { cargo_metadata::Message::CompilerMessage(msg) => {
self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap(); self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap();
@ -538,7 +540,7 @@ impl CargoActor {
} }
enum CargoMessage { enum CargoMessage {
CompilerArtifact(cargo_metadata::Artifact), CompilerArtifact(Box<cargo_metadata::Artifact>),
Diagnostic(Diagnostic), Diagnostic(Diagnostic),
} }

View file

@ -33,7 +33,7 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo
} }
) )
}), }),
DefWithBodyId::InTypeConstId(_) => format!("In type const = "), DefWithBodyId::InTypeConstId(_) => "In type const = ".to_string(),
DefWithBodyId::VariantId(it) => { DefWithBodyId::VariantId(it) => {
let loc = it.lookup(db); let loc = it.lookup(db);
let enum_loc = loc.parent.lookup(db); let enum_loc = loc.parent.lookup(db);

View file

@ -256,7 +256,7 @@ impl SsrError {
"##, "##,
); );
assert_eq!(db.body_with_source_map(def.into()).1.diagnostics(), &[]); assert_eq!(db.body_with_source_map(def).1.diagnostics(), &[]);
expect![[r#" expect![[r#"
fn main() { fn main() {
_ = $crate::error::SsrError::new( _ = $crate::error::SsrError::new(
@ -309,7 +309,7 @@ fn f() {
"#, "#,
); );
let (_, source_map) = db.body_with_source_map(def.into()); let (_, source_map) = db.body_with_source_map(def);
assert_eq!(source_map.diagnostics(), &[]); assert_eq!(source_map.diagnostics(), &[]);
for (_, def_map) in body.blocks(&db) { for (_, def_map) in body.blocks(&db) {

View file

@ -782,7 +782,7 @@ impl<'a> AssocItemCollector<'a> {
self.diagnostics.push(DefDiagnostic::macro_expansion_parse_error( self.diagnostics.push(DefDiagnostic::macro_expansion_parse_error(
self.module_id.local_id, self.module_id.local_id,
error_call_kind(), error_call_kind(),
errors.into(), errors,
)); ));
} }

View file

@ -166,6 +166,7 @@ enum PositionUsedAs {
} }
use PositionUsedAs::*; use PositionUsedAs::*;
#[allow(clippy::unnecessary_lazy_evaluations)]
pub(crate) fn parse( pub(crate) fn parse(
s: &ast::String, s: &ast::String,
fmt_snippet: Option<String>, fmt_snippet: Option<String>,
@ -177,9 +178,9 @@ pub(crate) fn parse(
let text = s.text_without_quotes(); let text = s.text_without_quotes();
let str_style = match s.quote_offsets() { let str_style = match s.quote_offsets() {
Some(offsets) => { Some(offsets) => {
let raw = u32::from(offsets.quotes.0.len()) - 1; let raw = usize::from(offsets.quotes.0.len()) - 1;
// subtract 1 for the `r` prefix // subtract 1 for the `r` prefix
(raw != 0).then(|| raw as usize - 1) (raw != 0).then(|| raw - 1)
} }
None => None, None => None,
}; };
@ -432,7 +433,7 @@ pub(crate) fn parse(
} }
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Clone, Debug, Default, Eq, PartialEq)]
pub struct FormatArgumentsCollector { pub struct FormatArgumentsCollector {
arguments: Vec<FormatArgument>, arguments: Vec<FormatArgument>,
num_unnamed_args: usize, num_unnamed_args: usize,
@ -451,7 +452,7 @@ impl FormatArgumentsCollector {
} }
pub fn new() -> Self { pub fn new() -> Self {
Self { arguments: vec![], names: vec![], num_unnamed_args: 0, num_explicit_args: 0 } Default::default()
} }
pub fn add(&mut self, arg: FormatArgument) -> usize { pub fn add(&mut self, arg: FormatArgument) -> usize {

View file

@ -297,7 +297,7 @@ impl SearchMode {
SearchMode::Exact => candidate.eq_ignore_ascii_case(query), SearchMode::Exact => candidate.eq_ignore_ascii_case(query),
SearchMode::Prefix => { SearchMode::Prefix => {
query.len() <= candidate.len() && { query.len() <= candidate.len() && {
let prefix = &candidate[..query.len() as usize]; let prefix = &candidate[..query.len()];
if case_sensitive { if case_sensitive {
prefix == query prefix == query
} else { } else {
@ -396,7 +396,7 @@ impl Query {
pub fn search_dependencies( pub fn search_dependencies(
db: &dyn DefDatabase, db: &dyn DefDatabase,
krate: CrateId, krate: CrateId,
ref query: Query, query: &Query,
) -> FxHashSet<ItemInNs> { ) -> FxHashSet<ItemInNs> {
let _p = tracing::span!(tracing::Level::INFO, "search_dependencies", ?query).entered(); let _p = tracing::span!(tracing::Level::INFO, "search_dependencies", ?query).entered();
@ -446,7 +446,7 @@ fn search_maps(
let end = (value & 0xFFFF_FFFF) as usize; let end = (value & 0xFFFF_FFFF) as usize;
let start = (value >> 32) as usize; let start = (value >> 32) as usize;
let ImportMap { item_to_info_map, importables, .. } = &*import_maps[import_map_idx]; let ImportMap { item_to_info_map, importables, .. } = &*import_maps[import_map_idx];
let importables = &importables[start as usize..end]; let importables = &importables[start..end];
let iter = importables let iter = importables
.iter() .iter()
@ -516,7 +516,7 @@ mod tests {
}) })
.expect("could not find crate"); .expect("could not find crate");
let actual = search_dependencies(db.upcast(), krate, query) let actual = search_dependencies(db.upcast(), krate, &query)
.into_iter() .into_iter()
.filter_map(|dependency| { .filter_map(|dependency| {
let dependency_krate = dependency.krate(db.upcast())?; let dependency_krate = dependency.krate(db.upcast())?;

View file

@ -25,7 +25,7 @@ use hir_expand::{
InFile, MacroFileId, MacroFileIdExt, InFile, MacroFileId, MacroFileIdExt,
}; };
use span::Span; use span::Span;
use stdx::format_to; use stdx::{format_to, format_to_acc};
use syntax::{ use syntax::{
ast::{self, edit::IndentLevel}, ast::{self, edit::IndentLevel},
AstNode, AstNode,
@ -149,8 +149,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
if tree { if tree {
let tree = format!("{:#?}", parse.syntax_node()) let tree = format!("{:#?}", parse.syntax_node())
.split_inclusive('\n') .split_inclusive('\n')
.map(|line| format!("// {line}")) .fold(String::new(), |mut acc, line| format_to_acc!(acc, "// {line}"));
.collect::<String>();
format_to!(expn_text, "\n{}", tree) format_to!(expn_text, "\n{}", tree)
} }
let range = call.syntax().text_range(); let range = call.syntax().text_range();

View file

@ -1924,7 +1924,7 @@ impl ModCollector<'_, '_> {
item_tree: self.item_tree, item_tree: self.item_tree,
mod_dir, mod_dir,
} }
.collect_in_top_module(&*items); .collect_in_top_module(items);
if is_macro_use { if is_macro_use {
self.import_all_legacy_macros(module_id); self.import_all_legacy_macros(module_id);
} }

View file

@ -475,7 +475,7 @@ impl DefMap {
let macro_use_prelude = || { let macro_use_prelude = || {
self.macro_use_prelude.get(name).map_or(PerNs::none(), |&(it, _extern_crate)| { self.macro_use_prelude.get(name).map_or(PerNs::none(), |&(it, _extern_crate)| {
PerNs::macros( PerNs::macros(
it.into(), it,
Visibility::Public, Visibility::Public,
// FIXME? // FIXME?
None, // extern_crate.map(ImportOrExternCrate::ExternCrate), None, // extern_crate.map(ImportOrExternCrate::ExternCrate),

View file

@ -16,19 +16,13 @@ pub enum Namespace {
Macros, Macros,
} }
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)]
pub struct PerNs { pub struct PerNs {
pub types: Option<(ModuleDefId, Visibility, Option<ImportOrExternCrate>)>, pub types: Option<(ModuleDefId, Visibility, Option<ImportOrExternCrate>)>,
pub values: Option<(ModuleDefId, Visibility, Option<ImportId>)>, pub values: Option<(ModuleDefId, Visibility, Option<ImportId>)>,
pub macros: Option<(MacroId, Visibility, Option<ImportId>)>, pub macros: Option<(MacroId, Visibility, Option<ImportId>)>,
} }
impl Default for PerNs {
fn default() -> Self {
PerNs { types: None, values: None, macros: None }
}
}
impl PerNs { impl PerNs {
pub fn none() -> PerNs { pub fn none() -> PerNs {
PerNs { types: None, values: None, macros: None } PerNs { types: None, values: None, macros: None }
@ -131,13 +125,11 @@ impl PerNs {
.into_iter() .into_iter()
.chain( .chain(
self.values self.values
.map(|it| (ItemInNs::Values(it.0), it.2.map(ImportOrExternCrate::Import))) .map(|it| (ItemInNs::Values(it.0), it.2.map(ImportOrExternCrate::Import))),
.into_iter(),
) )
.chain( .chain(
self.macros self.macros
.map(|it| (ItemInNs::Macros(it.0), it.2.map(ImportOrExternCrate::Import))) .map(|it| (ItemInNs::Macros(it.0), it.2.map(ImportOrExternCrate::Import))),
.into_iter(),
) )
} }
} }

View file

@ -41,13 +41,13 @@ impl Default for TestDB {
impl Upcast<dyn ExpandDatabase> for TestDB { impl Upcast<dyn ExpandDatabase> for TestDB {
fn upcast(&self) -> &(dyn ExpandDatabase + 'static) { fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
&*self self
} }
} }
impl Upcast<dyn DefDatabase> for TestDB { impl Upcast<dyn DefDatabase> for TestDB {
fn upcast(&self) -> &(dyn DefDatabase + 'static) { fn upcast(&self) -> &(dyn DefDatabase + 'static) {
&*self self
} }
} }

View file

@ -230,7 +230,7 @@ impl Attr {
) )
) )
}) })
.unwrap_or_else(|| tt.len()); .unwrap_or(tt.len());
let (path, input) = tt.split_at(path_end); let (path, input) = tt.split_at(path_end);
let path = Interned::new(ModPath::from_tt(db, path)?); let path = Interned::new(ModPath::from_tt(db, path)?);

View file

@ -84,8 +84,7 @@ impl TypeVisitor<Interner> for UninhabitedFrom<'_> {
Some(0) | None => CONTINUE_OPAQUELY_INHABITED, Some(0) | None => CONTINUE_OPAQUELY_INHABITED,
Some(1..) => item_ty.super_visit_with(self, outer_binder), Some(1..) => item_ty.super_visit_with(self, outer_binder),
}, },
_ => CONTINUE_OPAQUELY_INHABITED,
TyKind::Ref(..) | _ => CONTINUE_OPAQUELY_INHABITED,
}; };
self.recursive_ty.remove(ty); self.recursive_ty.remove(ty);
self.max_depth += 1; self.max_depth += 1;

View file

@ -114,7 +114,7 @@ impl MirLowerCtx<'_> {
index: i as u32, index: i as u32,
})) }))
}), }),
&mut cond_place, &cond_place,
mode, mode,
)? )?
} }

View file

@ -440,7 +440,7 @@ pub(crate) fn detect_variant_from_bytes<'a>(
(db.enum_data(e).variants[index.0].0, layout) (db.enum_data(e).variants[index.0].0, layout)
} }
hir_def::layout::Variants::Multiple { tag, tag_encoding, variants, .. } => { hir_def::layout::Variants::Multiple { tag, tag_encoding, variants, .. } => {
let size = tag.size(&*target_data_layout).bytes_usize(); let size = tag.size(target_data_layout).bytes_usize();
let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field
let tag = i128::from_le_bytes(pad16(&b[offset..offset + size], false)); let tag = i128::from_le_bytes(pad16(&b[offset..offset + size], false));
match tag_encoding { match tag_encoding {

View file

@ -239,10 +239,9 @@ fn resolve_impl_trait_item(
) -> Option<DocLinkDef> { ) -> Option<DocLinkDef> {
let canonical = ty.canonical(); let canonical = ty.canonical();
let krate = ty.krate(db); let krate = ty.krate(db);
let environment = resolver.generic_def().map_or_else( let environment = resolver
|| crate::TraitEnvironment::empty(krate.id).into(), .generic_def()
|d| db.trait_environment(d), .map_or_else(|| crate::TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
);
let traits_in_scope = resolver.traits_in_scope(db.upcast()); let traits_in_scope = resolver.traits_in_scope(db.upcast());
let mut result = None; let mut result = None;
@ -297,7 +296,7 @@ fn as_module_def_if_namespace_matches(
AssocItem::TypeAlias(it) => (ModuleDef::TypeAlias(it), Namespace::Types), AssocItem::TypeAlias(it) => (ModuleDef::TypeAlias(it), Namespace::Types),
}; };
(ns.unwrap_or(expected_ns) == expected_ns).then(|| DocLinkDef::ModuleDef(def)) (ns.unwrap_or(expected_ns) == expected_ns).then_some(DocLinkDef::ModuleDef(def))
} }
fn modpath_from_str(link: &str) -> Option<ModPath> { fn modpath_from_str(link: &str) -> Option<ModPath> {

View file

@ -236,7 +236,7 @@ impl Crate {
query: import_map::Query, query: import_map::Query,
) -> impl Iterator<Item = Either<ModuleDef, Macro>> { ) -> impl Iterator<Item = Either<ModuleDef, Macro>> {
let _p = tracing::span!(tracing::Level::INFO, "query_external_importables"); let _p = tracing::span!(tracing::Level::INFO, "query_external_importables");
import_map::search_dependencies(db, self.into(), query).into_iter().map(|item| { import_map::search_dependencies(db, self.into(), &query).into_iter().map(|item| {
match ItemInNs::from(item) { match ItemInNs::from(item) {
ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id), ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id),
ItemInNs::Macros(mac_id) => Either::Right(mac_id), ItemInNs::Macros(mac_id) => Either::Right(mac_id),
@ -903,7 +903,7 @@ fn emit_def_diagnostic_(
} }
DefDiagnosticKind::InvalidDeriveTarget { ast, id } => { DefDiagnosticKind::InvalidDeriveTarget { ast, id } => {
let node = ast.to_node(db.upcast()); let node = ast.to_node(db.upcast());
let derive = node.attrs().nth(*id as usize); let derive = node.attrs().nth(*id);
match derive { match derive {
Some(derive) => { Some(derive) => {
acc.push( acc.push(
@ -918,7 +918,7 @@ fn emit_def_diagnostic_(
} }
DefDiagnosticKind::MalformedDerive { ast, id } => { DefDiagnosticKind::MalformedDerive { ast, id } => {
let node = ast.to_node(db.upcast()); let node = ast.to_node(db.upcast());
let derive = node.attrs().nth(*id as usize); let derive = node.attrs().nth(*id);
match derive { match derive {
Some(derive) => { Some(derive) => {
acc.push( acc.push(

View file

@ -2,6 +2,7 @@
use std::{fmt, fs, path::Path}; use std::{fmt, fs, path::Path};
use stdx::format_to_acc;
use test_utils::project_root; use test_utils::project_root;
#[test] #[test]
@ -172,8 +173,7 @@ impl fmt::Display for Assist {
fn hide_hash_comments(text: &str) -> String { fn hide_hash_comments(text: &str) -> String {
text.split('\n') // want final newline text.split('\n') // want final newline
.filter(|&it| !(it.starts_with("# ") || it == "#")) .filter(|&it| !(it.starts_with("# ") || it == "#"))
.map(|it| format!("{it}\n")) .fold(String::new(), |mut acc, it| format_to_acc!(acc, "{it}\n"))
.collect()
} }
fn reveal_hash_comments(text: &str) -> String { fn reveal_hash_comments(text: &str) -> String {
@ -187,6 +187,5 @@ fn reveal_hash_comments(text: &str) -> String {
it it
} }
}) })
.map(|it| format!("{it}\n")) .fold(String::new(), |mut acc, it| format_to_acc!(acc, "{it}\n"))
.collect()
} }

View file

@ -1267,8 +1267,7 @@ fn pattern_context_for(
pat pat
.syntax() .syntax()
.ancestors() .ancestors()
.skip_while(|it| ast::Pat::can_cast(it.kind())) .find(|it| !ast::Pat::can_cast(it.kind()))
.next()
.map_or((PatternRefutability::Irrefutable, false), |node| { .map_or((PatternRefutability::Irrefutable, false), |node| {
let refutability = match_ast! { let refutability = match_ast! {
match node { match node {

View file

@ -210,23 +210,14 @@ pub(crate) fn check_edit_with_config(
let mut combined_edit = completion.text_edit.clone(); let mut combined_edit = completion.text_edit.clone();
resolve_completion_edits( resolve_completion_edits(&db, &config, position, completion.import_to_add.iter().cloned())
&db, .into_iter()
&config, .flatten()
position, .for_each(|text_edit| {
completion combined_edit.union(text_edit).expect(
.import_to_add "Failed to apply completion resolve changes: change ranges overlap, but should not",
.iter() )
.cloned() });
.filter_map(|(import_path, import_name)| Some((import_path, import_name))),
)
.into_iter()
.flatten()
.for_each(|text_edit| {
combined_edit.union(text_edit).expect(
"Failed to apply completion resolve changes: change ranges overlap, but should not",
)
});
combined_edit.apply(&mut actual); combined_edit.apply(&mut actual);
assert_eq_text!(&ra_fixture_after, &actual) assert_eq_text!(&ra_fixture_after, &actual)

View file

@ -467,8 +467,7 @@ fn recursive_normalize(use_tree: &ast::UseTree, style: NormalizationStyle) -> Op
} }
ted::replace_all(start..=end, elements); ted::replace_all(start..=end, elements);
} else { } else {
let new_use_tree_list = let new_use_tree_list = make::use_tree_list(subtrees).clone_for_update();
make::use_tree_list(subtrees.into_iter()).clone_for_update();
ted::replace(use_tree_list.syntax(), new_use_tree_list.syntax()); ted::replace(use_tree_list.syntax(), new_use_tree_list.syntax());
} }
modified = true; modified = true;

View file

@ -120,6 +120,7 @@ pub(crate) fn hover(
Some(res) Some(res)
} }
#[allow(clippy::field_reassign_with_default)]
fn hover_simple( fn hover_simple(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
FilePosition { file_id, offset }: FilePosition, FilePosition { file_id, offset }: FilePosition,

View file

@ -184,7 +184,7 @@ impl ProcMacro {
.process .process
.lock() .lock()
.unwrap_or_else(|e| e.into_inner()) .unwrap_or_else(|e| e.into_inner())
.send_task(msg::Request::ExpandMacro(task))?; .send_task(msg::Request::ExpandMacro(Box::new(task)))?;
match response { match response {
msg::Response::ExpandMacro(it) => { msg::Response::ExpandMacro(it) => {

View file

@ -29,7 +29,7 @@ pub enum Request {
/// Since [`NO_VERSION_CHECK_VERSION`] /// Since [`NO_VERSION_CHECK_VERSION`]
ListMacros { dylib_path: PathBuf }, ListMacros { dylib_path: PathBuf },
/// Since [`NO_VERSION_CHECK_VERSION`] /// Since [`NO_VERSION_CHECK_VERSION`]
ExpandMacro(ExpandMacro), ExpandMacro(Box<ExpandMacro>),
/// Since [`VERSION_CHECK_VERSION`] /// Since [`VERSION_CHECK_VERSION`]
ApiVersionCheck {}, ApiVersionCheck {},
/// Since [`RUST_ANALYZER_SPAN_SUPPORT`] /// Since [`RUST_ANALYZER_SPAN_SUPPORT`]

View file

@ -45,9 +45,11 @@ fn run() -> io::Result<()> {
msg::Response::ListMacros(srv.list_macros(&dylib_path)) msg::Response::ListMacros(srv.list_macros(&dylib_path))
} }
msg::Request::ExpandMacro(task) => match srv.span_mode() { msg::Request::ExpandMacro(task) => match srv.span_mode() {
msg::SpanMode::Id => msg::Response::ExpandMacro(srv.expand(task).map(|(it, _)| it)), msg::SpanMode::Id => {
msg::Response::ExpandMacro(srv.expand(*task).map(|(it, _)| it))
}
msg::SpanMode::RustAnalyzer => msg::Response::ExpandMacroExtended( msg::SpanMode::RustAnalyzer => msg::Response::ExpandMacroExtended(
srv.expand(task).map(|(tree, span_data_table)| msg::ExpandMacroExtended { srv.expand(*task).map(|(tree, span_data_table)| msg::ExpandMacroExtended {
tree, tree,
span_data_table, span_data_table,
}), }),

View file

@ -60,7 +60,7 @@ pub enum ProjectWorkspace {
cargo: CargoWorkspace, cargo: CargoWorkspace,
build_scripts: WorkspaceBuildScripts, build_scripts: WorkspaceBuildScripts,
sysroot: Result<Sysroot, Option<String>>, sysroot: Result<Sysroot, Option<String>>,
rustc: Result<(CargoWorkspace, WorkspaceBuildScripts), Option<String>>, rustc: Result<Box<(CargoWorkspace, WorkspaceBuildScripts)>, Option<String>>,
/// Holds cfg flags for the current target. We get those by running /// Holds cfg flags for the current target. We get those by running
/// `rustc --print cfg`. /// `rustc --print cfg`.
/// ///
@ -119,7 +119,7 @@ impl fmt::Debug for ProjectWorkspace {
.field("sysroot", &sysroot.is_ok()) .field("sysroot", &sysroot.is_ok())
.field( .field(
"n_rustc_compiler_crates", "n_rustc_compiler_crates",
&rustc.as_ref().map_or(0, |(rc, _)| rc.packages().len()), &rustc.as_ref().map(|a| a.as_ref()).map_or(0, |(rc, _)| rc.packages().len()),
) )
.field("n_rustc_cfg", &rustc_cfg.len()) .field("n_rustc_cfg", &rustc_cfg.len())
.field("n_cfg_overrides", &cfg_overrides.len()) .field("n_cfg_overrides", &cfg_overrides.len())
@ -265,7 +265,7 @@ impl ProjectWorkspace {
cargo_toml.parent(), cargo_toml.parent(),
&config.extra_env, &config.extra_env,
); );
Ok((workspace, buildscripts)) Ok(Box::new((workspace, buildscripts)))
} }
Err(e) => { Err(e) => {
tracing::error!( tracing::error!(
@ -603,7 +603,7 @@ impl ProjectWorkspace {
PackageRoot { is_local, include, exclude } PackageRoot { is_local, include, exclude }
}) })
.chain(mk_sysroot(sysroot.as_ref(), Some(cargo.workspace_root()))) .chain(mk_sysroot(sysroot.as_ref(), Some(cargo.workspace_root())))
.chain(rustc.iter().flat_map(|(rustc, _)| { .chain(rustc.iter().map(|a| a.as_ref()).flat_map(|(rustc, _)| {
rustc.packages().map(move |krate| PackageRoot { rustc.packages().map(move |krate| PackageRoot {
is_local: false, is_local: false,
include: vec![rustc[krate].manifest.parent().to_path_buf()], include: vec![rustc[krate].manifest.parent().to_path_buf()],
@ -631,7 +631,8 @@ impl ProjectWorkspace {
sysroot_package_len + project.n_crates() sysroot_package_len + project.n_crates()
} }
ProjectWorkspace::Cargo { cargo, sysroot, rustc, .. } => { ProjectWorkspace::Cargo { cargo, sysroot, rustc, .. } => {
let rustc_package_len = rustc.as_ref().map_or(0, |(it, _)| it.packages().len()); let rustc_package_len =
rustc.as_ref().map(|a| a.as_ref()).map_or(0, |(it, _)| it.packages().len());
let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.num_packages()); let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.num_packages());
cargo.packages().len() + sysroot_package_len + rustc_package_len cargo.packages().len() + sysroot_package_len + rustc_package_len
} }
@ -672,7 +673,7 @@ impl ProjectWorkspace {
target_layout, target_layout,
} => cargo_to_crate_graph( } => cargo_to_crate_graph(
load, load,
rustc.as_ref().ok(), rustc.as_ref().map(|a| a.as_ref()).ok(),
cargo, cargo,
sysroot.as_ref().ok(), sysroot.as_ref().ok(),
rustc_cfg.clone(), rustc_cfg.clone(),

View file

@ -58,12 +58,14 @@ impl flags::AnalysisStats {
Rand32::new(seed) Rand32::new(seed)
}; };
let mut cargo_config = CargoConfig::default(); let cargo_config = CargoConfig {
cargo_config.sysroot = match self.no_sysroot { sysroot: match self.no_sysroot {
true => None, true => None,
false => Some(RustLibSource::Discover), false => Some(RustLibSource::Discover),
},
sysroot_query_metadata: self.query_sysroot_metadata,
..Default::default()
}; };
cargo_config.sysroot_query_metadata = self.query_sysroot_metadata;
let no_progress = &|_| (); let no_progress = &|_| ();
let mut db_load_sw = self.stop_watch(); let mut db_load_sw = self.stop_watch();

View file

@ -13,8 +13,8 @@ use crate::cli::flags;
impl flags::Diagnostics { impl flags::Diagnostics {
pub fn run(self) -> anyhow::Result<()> { pub fn run(self) -> anyhow::Result<()> {
let mut cargo_config = CargoConfig::default(); let cargo_config =
cargo_config.sysroot = Some(RustLibSource::Discover); CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() };
let with_proc_macro_server = if let Some(p) = &self.proc_macro_srv { let with_proc_macro_server = if let Some(p) = &self.proc_macro_srv {
let path = vfs::AbsPathBuf::assert(std::env::current_dir()?.join(p)); let path = vfs::AbsPathBuf::assert(std::env::current_dir()?.join(p));
ProcMacroServerChoice::Explicit(path) ProcMacroServerChoice::Explicit(path)

View file

@ -287,8 +287,8 @@ impl flags::Lsif {
pub fn run(self) -> anyhow::Result<()> { pub fn run(self) -> anyhow::Result<()> {
eprintln!("Generating LSIF started..."); eprintln!("Generating LSIF started...");
let now = Instant::now(); let now = Instant::now();
let mut cargo_config = CargoConfig::default(); let cargo_config =
cargo_config.sysroot = Some(RustLibSource::Discover); CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() };
let no_progress = &|_| (); let no_progress = &|_| ();
let load_cargo_config = LoadCargoConfig { let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true, load_out_dirs_from_check: true,

View file

@ -13,8 +13,8 @@ use crate::cli::{flags, full_name_of_item, Result};
impl flags::RunTests { impl flags::RunTests {
pub fn run(self) -> Result<()> { pub fn run(self) -> Result<()> {
let mut cargo_config = CargoConfig::default(); let cargo_config =
cargo_config.sysroot = Some(RustLibSource::Discover); CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() };
let load_cargo_config = LoadCargoConfig { let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true, load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::Sysroot, with_proc_macro_server: ProcMacroServerChoice::Sysroot,

View file

@ -59,8 +59,8 @@ impl Tester {
path.push("ra-rustc-test.rs"); path.push("ra-rustc-test.rs");
let tmp_file = AbsPathBuf::try_from(path).unwrap(); let tmp_file = AbsPathBuf::try_from(path).unwrap();
std::fs::write(&tmp_file, "")?; std::fs::write(&tmp_file, "")?;
let mut cargo_config = CargoConfig::default(); let cargo_config =
cargo_config.sysroot = Some(RustLibSource::Discover); CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() };
let workspace = ProjectWorkspace::DetachedFiles { let workspace = ProjectWorkspace::DetachedFiles {
files: vec![tmp_file.clone()], files: vec![tmp_file.clone()],
sysroot: Ok(Sysroot::discover( sysroot: Ok(Sysroot::discover(

View file

@ -10,8 +10,8 @@ use crate::cli::flags;
impl flags::Ssr { impl flags::Ssr {
pub fn run(self) -> anyhow::Result<()> { pub fn run(self) -> anyhow::Result<()> {
use ide_db::base_db::SourceDatabaseExt; use ide_db::base_db::SourceDatabaseExt;
let mut cargo_config = CargoConfig::default(); let cargo_config =
cargo_config.sysroot = Some(RustLibSource::Discover); CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() };
let load_cargo_config = LoadCargoConfig { let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true, load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::Sysroot, with_proc_macro_server: ProcMacroServerChoice::Sysroot,

View file

@ -32,6 +32,7 @@ use project_model::{
}; };
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use serde::{de::DeserializeOwned, Deserialize}; use serde::{de::DeserializeOwned, Deserialize};
use stdx::format_to_acc;
use vfs::{AbsPath, AbsPathBuf}; use vfs::{AbsPath, AbsPathBuf};
use crate::{ use crate::{
@ -1744,7 +1745,7 @@ impl Config {
} }
pub fn main_loop_num_threads(&self) -> usize { pub fn main_loop_num_threads(&self) -> usize {
self.data.numThreads.unwrap_or(num_cpus::get_physical().try_into().unwrap_or(1)) self.data.numThreads.unwrap_or(num_cpus::get_physical())
} }
pub fn typing_autoclose_angle(&self) -> bool { pub fn typing_autoclose_angle(&self) -> bool {
@ -2563,14 +2564,13 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
#[cfg(test)] #[cfg(test)]
fn manual(fields: &[(&'static str, &'static str, &[&str], &str)]) -> String { fn manual(fields: &[(&'static str, &'static str, &[&str], &str)]) -> String {
fields fields.iter().fold(String::new(), |mut acc, (field, _ty, doc, default)| {
.iter() let name = format!("rust-analyzer.{}", field.replace('_', "."));
.map(|(field, _ty, doc, default)| { let doc = doc_comment_to_string(doc);
let name = format!("rust-analyzer.{}", field.replace('_', ".")); if default.contains('\n') {
let doc = doc_comment_to_string(doc); format_to_acc!(
if default.contains('\n') { acc,
format!( r#"[[{name}]]{name}::
r#"[[{name}]]{name}::
+ +
-- --
Default: Default:
@ -2580,16 +2580,17 @@ Default:
{doc} {doc}
-- --
"# "#
) )
} else { } else {
format!("[[{name}]]{name} (default: `{default}`)::\n+\n--\n{doc}--\n") format_to_acc!(acc, "[[{name}]]{name} (default: `{default}`)::\n+\n--\n{doc}--\n")
} }
}) })
.collect::<String>()
} }
fn doc_comment_to_string(doc: &[&str]) -> String { fn doc_comment_to_string(doc: &[&str]) -> String {
doc.iter().map(|it| it.strip_prefix(' ').unwrap_or(it)).map(|it| format!("{it}\n")).collect() doc.iter()
.map(|it| it.strip_prefix(' ').unwrap_or(it))
.fold(String::new(), |mut acc, it| format_to_acc!(acc, "{it}\n"))
} }
#[cfg(test)] #[cfg(test)]

View file

@ -312,16 +312,14 @@ fn completion_item(
set_score(&mut lsp_item, max_relevance, item.relevance); set_score(&mut lsp_item, max_relevance, item.relevance);
if config.completion().enable_imports_on_the_fly && !item.import_to_add.is_empty() { if config.completion().enable_imports_on_the_fly && !item.import_to_add.is_empty() {
let imports: Vec<_> = item let imports = item
.import_to_add .import_to_add
.into_iter() .into_iter()
.filter_map(|(import_path, import_name)| { .map(|(import_path, import_name)| lsp_ext::CompletionImport {
Some(lsp_ext::CompletionImport { full_import_path: import_path,
full_import_path: import_path, imported_name: import_name,
imported_name: import_name,
})
}) })
.collect(); .collect::<Vec<_>>();
if !imports.is_empty() { if !imports.is_empty() {
let data = lsp_ext::CompletionResolveData { position: tdpp.clone(), imports }; let data = lsp_ext::CompletionResolveData { position: tdpp.clone(), imports };
lsp_item.data = Some(to_value(data).unwrap()); lsp_item.data = Some(to_value(data).unwrap());

View file

@ -31,6 +31,7 @@ use lsp_types::{
}; };
use rust_analyzer::lsp::ext::{OnEnter, Runnables, RunnablesParams}; use rust_analyzer::lsp::ext::{OnEnter, Runnables, RunnablesParams};
use serde_json::json; use serde_json::json;
use stdx::format_to_acc;
use test_utils::skip_slow_tests; use test_utils::skip_slow_tests;
use crate::{ use crate::{
@ -591,8 +592,10 @@ fn diagnostics_dont_block_typing() {
return; return;
} }
let librs: String = (0..10).map(|i| format!("mod m{i};")).collect(); let librs: String = (0..10).fold(String::new(), |mut acc, i| format_to_acc!(acc, "mod m{i};"));
let libs: String = (0..10).map(|i| format!("//- /src/m{i}.rs\nfn foo() {{}}\n\n")).collect(); let libs: String = (0..10).fold(String::new(), |mut acc, i| {
format_to_acc!(acc, "//- /src/m{i}.rs\nfn foo() {{}}\n\n")
});
let server = Project::with_fixture(&format!( let server = Project::with_fixture(&format!(
r#" r#"
//- /Cargo.toml //- /Cargo.toml

View file

@ -14,6 +14,22 @@ macro_rules! format_to {
}; };
} }
/// Appends formatted string to a `String` and returns the `String`.
///
/// Useful for folding iterators into a `String`.
#[macro_export]
macro_rules! format_to_acc {
($buf:expr, $lit:literal $($arg:tt)*) => {
{
use ::std::fmt::Write as _;
// We can't do ::std::fmt::Write::write_fmt($buf, format_args!($lit $($arg)*))
// unfortunately, as that loses out on autoref behavior.
_ = $buf.write_fmt(format_args!($lit $($arg)*));
$buf
}
};
}
/// Generates `From` impls for `Enum E { Foo(Foo), Bar(Bar) }` enums /// Generates `From` impls for `Enum E { Foo(Foo), Bar(Bar) }` enums
/// ///
/// # Example /// # Example

View file

@ -9,10 +9,11 @@
//! API should require to assemble every node piecewise. The trick of //! API should require to assemble every node piecewise. The trick of
//! `parse(format!())` we use internally is an implementation detail -- long //! `parse(format!())` we use internally is an implementation detail -- long
//! term, it will be replaced with direct tree manipulation. //! term, it will be replaced with direct tree manipulation.
use itertools::Itertools; use itertools::Itertools;
use parser::T; use parser::T;
use rowan::NodeOrToken; use rowan::NodeOrToken;
use stdx::{format_to, never}; use stdx::{format_to, format_to_acc, never};
use crate::{ast, utils::is_raw_identifier, AstNode, SourceFile, SyntaxKind, SyntaxToken}; use crate::{ast, utils::is_raw_identifier, AstNode, SourceFile, SyntaxKind, SyntaxToken};
@ -759,15 +760,12 @@ pub fn match_arm_with_guard(
} }
pub fn match_arm_list(arms: impl IntoIterator<Item = ast::MatchArm>) -> ast::MatchArmList { pub fn match_arm_list(arms: impl IntoIterator<Item = ast::MatchArm>) -> ast::MatchArmList {
let arms_str = arms let arms_str = arms.into_iter().fold(String::new(), |mut acc, arm| {
.into_iter() let needs_comma = arm.expr().map_or(true, |it| !it.is_block_like());
.map(|arm| { let comma = if needs_comma { "," } else { "" };
let needs_comma = arm.expr().map_or(true, |it| !it.is_block_like()); let arm = arm.syntax();
let comma = if needs_comma { "," } else { "" }; format_to_acc!(acc, " {arm}{comma}\n")
let arm = arm.syntax(); });
format!(" {arm}{comma}\n")
})
.collect::<String>();
return from_text(&arms_str); return from_text(&arms_str);
fn from_text(text: &str) -> ast::MatchArmList { fn from_text(text: &str) -> ast::MatchArmList {

View file

@ -11,6 +11,7 @@ use std::{
use ast::HasName; use ast::HasName;
use expect_test::expect_file; use expect_test::expect_file;
use rayon::prelude::*; use rayon::prelude::*;
use stdx::format_to_acc;
use test_utils::{bench, bench_fixture, project_root}; use test_utils::{bench, bench_fixture, project_root};
use crate::{ast, fuzz, AstNode, SourceFile, SyntaxError}; use crate::{ast, fuzz, AstNode, SourceFile, SyntaxError};
@ -104,10 +105,9 @@ fn self_hosting_parsing() {
.collect::<Vec<_>>(); .collect::<Vec<_>>();
if !errors.is_empty() { if !errors.is_empty() {
let errors = errors let errors = errors.into_iter().fold(String::new(), |mut acc, (path, err)| {
.into_iter() format_to_acc!(acc, "{}: {:?}\n", path.display(), err[0])
.map(|(path, err)| format!("{}: {:?}\n", path.display(), err[0])) });
.collect::<String>();
panic!("Parsing errors:\n{errors}\n"); panic!("Parsing errors:\n{errors}\n");
} }
} }