Run cargo fmt

This commit is contained in:
Amos Wenger 2022-07-20 15:06:15 +02:00
parent 8318035726
commit 7e285e1ef5
35 changed files with 190 additions and 45 deletions

View file

@ -188,7 +188,10 @@ impl<'a> PathSegments<'a> {
}
impl GenericArgs {
pub(crate) fn from_ast(lower_ctx: &LowerCtx<'_>, node: ast::GenericArgList) -> Option<GenericArgs> {
pub(crate) fn from_ast(
lower_ctx: &LowerCtx<'_>,
node: ast::GenericArgList,
) -> Option<GenericArgs> {
lower::lower_generic_args(lower_ctx, node)
}

View file

@ -70,7 +70,10 @@ impl Iterator for Autoderef<'_, '_> {
}
}
pub(crate) fn autoderef_step(table: &mut InferenceTable<'_>, ty: Ty) -> Option<(AutoderefKind, Ty)> {
pub(crate) fn autoderef_step(
table: &mut InferenceTable<'_>,
ty: Ty,
) -> Option<(AutoderefKind, Ty)> {
if let Some(derefed) = builtin_deref(&ty) {
Some((AutoderefKind::Builtin, table.resolve_ty_shallow(derefed)))
} else {

View file

@ -952,7 +952,11 @@ fn write_bounds_like_dyn_trait(
Ok(())
}
fn fmt_trait_ref(tr: &TraitRef, f: &mut HirFormatter<'_>, use_as: bool) -> Result<(), HirDisplayError> {
fn fmt_trait_ref(
tr: &TraitRef,
f: &mut HirFormatter<'_>,
use_as: bool,
) -> Result<(), HirDisplayError> {
if f.should_truncate() {
return write!(f, "{}", TYPE_HINT_TRUNCATION);
}

View file

@ -140,7 +140,12 @@ trait PatLike: Into<ExprOrPatId> + Copy {
impl PatLike for ExprId {
type BindingMode = ();
fn infer(this: &mut InferenceContext<'_>, id: Self, expected_ty: &Ty, _: Self::BindingMode) -> Ty {
fn infer(
this: &mut InferenceContext<'_>,
id: Self,
expected_ty: &Ty,
_: Self::BindingMode,
) -> Ty {
this.infer_assignee_expr(id, expected_ty)
}
}

View file

@ -289,7 +289,10 @@ impl HirDisplay for ConstParam {
}
}
fn write_generic_params(def: GenericDefId, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
fn write_generic_params(
def: GenericDefId,
f: &mut HirFormatter<'_>,
) -> Result<(), HirDisplayError> {
let params = f.db.generic_params(def);
if params.lifetimes.is_empty()
&& params.type_or_consts.iter().all(|x| x.1.const_param().is_none())
@ -381,8 +384,9 @@ fn write_where_clause(def: GenericDefId, f: &mut HirFormatter<'_>) -> Result<(),
let prev_pred =
if pred_idx == 0 { None } else { Some(&params.where_predicates[pred_idx - 1]) };
let new_predicate =
|f: &mut HirFormatter<'_>| f.write_str(if pred_idx == 0 { "\n " } else { ",\n " });
let new_predicate = |f: &mut HirFormatter<'_>| {
f.write_str(if pred_idx == 0 { "\n " } else { ",\n " })
};
match pred {
WherePredicate::TypeBound { target, .. } if is_unnamed_type_target(target) => {}

View file

@ -85,7 +85,10 @@ pub(crate) fn add_missing_impl_members(acc: &mut Assists, ctx: &AssistContext<'_
// $0fn bar(&self) {}
// }
// ```
pub(crate) fn add_missing_default_members(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
pub(crate) fn add_missing_default_members(
acc: &mut Assists,
ctx: &AssistContext<'_>,
) -> Option<()> {
add_missing_impl_members_inner(
acc,
ctx,

View file

@ -142,7 +142,9 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
Some(())
}
pub(super) fn find_importable_node(ctx: &AssistContext<'_>) -> Option<(ImportAssets, SyntaxElement)> {
pub(super) fn find_importable_node(
ctx: &AssistContext<'_>,
) -> Option<(ImportAssets, SyntaxElement)> {
if let Some(path_under_caret) = ctx.find_node_at_offset_with_descend::<ast::Path>() {
ImportAssets::for_exact_path(&path_under_caret, &ctx.sema)
.zip(Some(path_under_caret.syntax().clone().into()))

View file

@ -32,7 +32,10 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
// }
// }
// ```
pub(crate) fn convert_iter_for_each_to_for(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
pub(crate) fn convert_iter_for_each_to_for(
acc: &mut Assists,
ctx: &AssistContext<'_>,
) -> Option<()> {
let method = ctx.find_node_at_offset::<ast::MethodCallExpr>()?;
let closure = match method.arg_list()?.args().next()? {
@ -91,7 +94,10 @@ pub(crate) fn convert_iter_for_each_to_for(acc: &mut Assists, ctx: &AssistContex
// });
// }
// ```
pub(crate) fn convert_for_loop_with_for_each(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
pub(crate) fn convert_for_loop_with_for_each(
acc: &mut Assists,
ctx: &AssistContext<'_>,
) -> Option<()> {
let for_loop = ctx.find_node_at_offset::<ast::ForExpr>()?;
let iterable = for_loop.iterable()?;
let pat = for_loop.pat()?;

View file

@ -1042,7 +1042,11 @@ fn generic_parents(parent: &SyntaxNode) -> Vec<GenericParent> {
}
/// checks if relevant var is used with `&mut` access inside body
fn has_exclusive_usages(ctx: &AssistContext<'_>, usages: &LocalUsages, body: &FunctionBody) -> bool {
fn has_exclusive_usages(
ctx: &AssistContext<'_>,
usages: &LocalUsages,
body: &FunctionBody,
) -> bool {
usages
.iter()
.filter(|reference| body.contains_range(reference.range))

View file

@ -36,7 +36,10 @@ use crate::{
// }
// }
// ```
pub(crate) fn generate_enum_try_into_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
pub(crate) fn generate_enum_try_into_method(
acc: &mut Assists,
ctx: &AssistContext<'_>,
) -> Option<()> {
generate_enum_projection_method(
acc,
ctx,

View file

@ -20,7 +20,10 @@ use crate::{utils::generate_trait_impl_text, AssistContext, AssistId, AssistKind
// }
// }
// ```
pub(crate) fn generate_from_impl_for_enum(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
pub(crate) fn generate_from_impl_for_enum(
acc: &mut Assists,
ctx: &AssistContext<'_>,
) -> Option<()> {
let variant = ctx.find_node_at_offset::<ast::Variant>()?;
let variant_name = variant.name()?;
let enum_ = ast::Adt::Enum(variant.parent_enum());

View file

@ -20,7 +20,10 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
// f(x)
// }
// ```
pub(crate) fn move_bounds_to_where_clause(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
pub(crate) fn move_bounds_to_where_clause(
acc: &mut Assists,
ctx: &AssistContext<'_>,
) -> Option<()> {
let type_param_list = ctx.find_node_at_offset::<ast::GenericParamList>()?;
let mut type_params = type_param_list.type_or_const_params();

View file

@ -91,7 +91,10 @@ pub(crate) fn move_guard_to_arm_body(acc: &mut Assists, ctx: &AssistContext<'_>)
// }
// }
// ```
pub(crate) fn move_arm_cond_to_match_guard(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
pub(crate) fn move_arm_cond_to_match_guard(
acc: &mut Assists,
ctx: &AssistContext<'_>,
) -> Option<()> {
let match_arm: MatchArm = ctx.find_node_at_offset::<MatchArm>()?;
let match_pat = match_arm.pat()?;
let arm_body = match_arm.expr()?;

View file

@ -86,7 +86,10 @@ fn replace<T: AstNode + PartialEq>(
});
}
fn compute_fields_ranks(path: &ast::Path, ctx: &AssistContext<'_>) -> Option<FxHashMap<String, usize>> {
fn compute_fields_ranks(
path: &ast::Path,
ctx: &AssistContext<'_>,
) -> Option<FxHashMap<String, usize>> {
let strukt = match ctx.sema.resolve_path(path) {
Some(hir::PathResolution::Def(hir::ModuleDef::Adt(hir::Adt::Struct(it)))) => it,
_ => return None,

View file

@ -93,7 +93,10 @@ pub(crate) fn reorder_impl_items(acc: &mut Assists, ctx: &AssistContext<'_>) ->
)
}
fn compute_item_ranks(path: &ast::Path, ctx: &AssistContext<'_>) -> Option<FxHashMap<String, usize>> {
fn compute_item_ranks(
path: &ast::Path,
ctx: &AssistContext<'_>,
) -> Option<FxHashMap<String, usize>> {
let td = trait_definition(path, &ctx.sema)?;
Some(

View file

@ -34,7 +34,10 @@ use crate::assist_context::{AssistContext, Assists};
// };
// }
// ```
pub(crate) fn replace_try_expr_with_match(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
pub(crate) fn replace_try_expr_with_match(
acc: &mut Assists,
ctx: &AssistContext<'_>,
) -> Option<()> {
let qm_kw = ctx.find_token_syntax_at_offset(T![?])?;
let qm_kw_parent = qm_kw.parent().and_then(ast::TryExpr::cast)?;

View file

@ -144,7 +144,12 @@ impl Completions {
item.add_to(self);
}
pub(crate) fn add_keyword_snippet(&mut self, ctx: &CompletionContext<'_>, kw: &str, snippet: &str) {
pub(crate) fn add_keyword_snippet(
&mut self,
ctx: &CompletionContext<'_>,
kw: &str,
snippet: &str,
) {
let mut item = CompletionItem::new(CompletionItemKind::Keyword, ctx.source_range(), kw);
match ctx.config.snippet_cap {
@ -348,7 +353,11 @@ impl Completions {
));
}
pub(crate) fn add_type_alias(&mut self, ctx: &CompletionContext<'_>, type_alias: hir::TypeAlias) {
pub(crate) fn add_type_alias(
&mut self,
ctx: &CompletionContext<'_>,
type_alias: hir::TypeAlias,
) {
let is_private_editable = match ctx.is_visible(&type_alias) {
Visible::Yes => false,
Visible::Editable => true,
@ -661,7 +670,11 @@ pub(super) fn complete_name_ref(
}
}
fn complete_patterns(acc: &mut Completions, ctx: &CompletionContext<'_>, pattern_ctx: &PatternContext) {
fn complete_patterns(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
pattern_ctx: &PatternContext,
) {
flyimport::import_on_the_fly_pat(acc, ctx, pattern_ctx);
fn_param::complete_fn_param(acc, ctx, pattern_ctx);
pattern::complete_pattern(acc, ctx, pattern_ctx);

View file

@ -5,7 +5,11 @@ use syntax::ast;
use crate::{context::CompletionContext, item::CompletionItem, Completions};
pub(super) fn complete_repr(acc: &mut Completions, ctx: &CompletionContext<'_>, input: ast::TokenTree) {
pub(super) fn complete_repr(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
input: ast::TokenTree,
) {
if let Some(existing_reprs) = super::parse_comma_sep_expr(input) {
for &ReprCompletion { label, snippet, lookup, collides } in REPR_COMPLETIONS {
let repr_already_annotated = existing_reprs

View file

@ -8,7 +8,11 @@ use crate::{
};
/// Complete dot accesses, i.e. fields or methods.
pub(crate) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext<'_>, dot_access: &DotAccess) {
pub(crate) fn complete_dot(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
dot_access: &DotAccess,
) {
let receiver_ty = match dot_access {
DotAccess { receiver_ty: Some(receiver_ty), .. } => &receiver_ty.original,
_ => return,

View file

@ -30,7 +30,10 @@ pub(crate) fn complete_field_list_tuple_variant(
}
}
pub(crate) fn complete_field_list_record_variant(acc: &mut Completions, ctx: &CompletionContext<'_>) {
pub(crate) fn complete_field_list_record_variant(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
) {
if ctx.qualifier_ctx.vis_node.is_none() {
let mut add_keyword = |kw, snippet| acc.add_keyword_snippet(ctx, kw, snippet);
add_keyword("pub(crate)", "pub(crate)");

View file

@ -158,7 +158,10 @@ pub(crate) fn render_tuple_field(
item.build()
}
pub(crate) fn render_type_inference(ty_string: String, ctx: &CompletionContext<'_>) -> CompletionItem {
pub(crate) fn render_type_inference(
ty_string: String,
ctx: &CompletionContext<'_>,
) -> CompletionItem {
let mut builder =
CompletionItem::new(CompletionItemKind::InferredType, ctx.source_range(), ty_string);
builder.set_relevance(CompletionRelevance { is_definite: true, ..Default::default() });

View file

@ -130,7 +130,10 @@ pub enum IdentClass {
}
impl IdentClass {
pub fn classify_node(sema: &Semantics<'_, RootDatabase>, node: &SyntaxNode) -> Option<IdentClass> {
pub fn classify_node(
sema: &Semantics<'_, RootDatabase>,
node: &SyntaxNode,
) -> Option<IdentClass> {
match_ast! {
match node {
ast::Name(name) => NameClass::classify(sema, &name).map(IdentClass::NameClass),
@ -238,7 +241,10 @@ impl NameClass {
};
return Some(NameClass::Definition(definition));
fn classify_item(sema: &Semantics<'_, RootDatabase>, item: ast::Item) -> Option<Definition> {
fn classify_item(
sema: &Semantics<'_, RootDatabase>,
item: ast::Item,
) -> Option<Definition> {
let definition = match item {
ast::Item::MacroRules(it) => {
Definition::Macro(sema.to_def(&ast::Macro::MacroRules(it))?)

View file

@ -218,7 +218,10 @@ impl ImportAssets {
}
/// This may return non-absolute paths if a part of the returned path is already imported into scope.
pub fn search_for_relative_paths(&self, sema: &Semantics<'_, RootDatabase>) -> Vec<LocatedImport> {
pub fn search_for_relative_paths(
&self,
sema: &Semantics<'_, RootDatabase>,
) -> Vec<LocatedImport> {
let _p = profile::span("import_assets::search_for_relative_paths");
self.search_for(sema, None)
}

View file

@ -66,7 +66,11 @@ macro_rules! _bail {
pub use _bail as bail;
impl Definition {
pub fn rename(&self, sema: &Semantics<'_, RootDatabase>, new_name: &str) -> Result<SourceChange> {
pub fn rename(
&self,
sema: &Semantics<'_, RootDatabase>,
new_name: &str,
) -> Result<SourceChange> {
match *self {
Definition::Module(module) => rename_mod(sema, module, new_name),
Definition::BuiltinType(_) => {

View file

@ -18,7 +18,11 @@ use crate::{fix, Assist, Diagnostic, DiagnosticsContext, Severity};
//
// This diagnostic is shown for files that are not included in any crate, or files that are part of
// crates rust-analyzer failed to discover. The file will not have IDE features available.
pub(crate) fn unlinked_file(ctx: &DiagnosticsContext<'_>, acc: &mut Vec<Diagnostic>, file_id: FileId) {
pub(crate) fn unlinked_file(
ctx: &DiagnosticsContext<'_>,
acc: &mut Vec<Diagnostic>,
file_id: FileId,
) {
// Limit diagnostic to the first few characters in the file. This matches how VS Code
// renders it with the full span, but on other editors, and is less invasive.
let range = ctx.sema.db.parse(file_id).syntax_node().text_range();

View file

@ -15,7 +15,10 @@ use crate::MatchFinder;
/// Attempts to build an SSR MatchFinder from a comment at the given file
/// range. If successful, returns the MatchFinder and a TextRange covering
/// comment.
pub fn ssr_from_comment(db: &RootDatabase, frange: FileRange) -> Option<(MatchFinder<'_>, TextRange)> {
pub fn ssr_from_comment(
db: &RootDatabase,
frange: FileRange,
) -> Option<(MatchFinder<'_>, TextRange)> {
let comment = {
let file = db.parse(frange.file_id);
file.tree().syntax().token_at_offset(frange.range.start()).find_map(ast::Comment::cast)

View file

@ -54,7 +54,11 @@ impl MatchCollector {
}
/// Attempts to add `m` as a sub-match of `existing`.
fn try_add_sub_match(m: Match, existing: &mut Match, sema: &hir::Semantics<'_, ide_db::RootDatabase>) {
fn try_add_sub_match(
m: Match,
existing: &mut Match,
sema: &hir::Semantics<'_, ide_db::RootDatabase>,
) {
for p in existing.placeholder_values.values_mut() {
// Note, no need to check if p.range.file is equal to m.range.file, since we
// already know we're within `existing`.

View file

@ -107,7 +107,10 @@ fn expand_macro_recur(
expand(sema, expanded, ast::MacroCall::cast, expand_macro_recur)
}
fn expand_attr_macro_recur(sema: &Semantics<'_, RootDatabase>, item: &ast::Item) -> Option<SyntaxNode> {
fn expand_attr_macro_recur(
sema: &Semantics<'_, RootDatabase>,
item: &ast::Item,
) -> Option<SyntaxNode> {
let expanded = sema.expand_attr_macro(item)?.clone_for_update();
expand(sema, expanded, ast::Item::cast, expand_attr_macro_recur)
}

View file

@ -90,7 +90,10 @@ fn impls_for_ty(sema: &Semantics<'_, RootDatabase>, ty: hir::Type) -> Vec<Naviga
Impl::all_for_type(sema.db, ty).into_iter().filter_map(|imp| imp.try_to_nav(sema.db)).collect()
}
fn impls_for_trait(sema: &Semantics<'_, RootDatabase>, trait_: hir::Trait) -> Vec<NavigationTarget> {
fn impls_for_trait(
sema: &Semantics<'_, RootDatabase>,
trait_: hir::Trait,
) -> Vec<NavigationTarget> {
Impl::all_for_trait(sema.db, trait_)
.into_iter()
.filter_map(|imp| imp.try_to_nav(sema.db))

View file

@ -201,7 +201,10 @@ fn find_definitions(
}
}
fn rename_to_self(sema: &Semantics<'_, RootDatabase>, local: hir::Local) -> RenameResult<SourceChange> {
fn rename_to_self(
sema: &Semantics<'_, RootDatabase>,
local: hir::Local,
) -> RenameResult<SourceChange> {
if never!(local.is_self(sema.db)) {
bail!("rename_to_self invoked on self");
}

View file

@ -304,7 +304,10 @@ fn parent_test_module(sema: &Semantics<'_, RootDatabase>, fn_def: &ast::Fn) -> O
})
}
pub(crate) fn runnable_fn(sema: &Semantics<'_, RootDatabase>, def: hir::Function) -> Option<Runnable> {
pub(crate) fn runnable_fn(
sema: &Semantics<'_, RootDatabase>,
def: hir::Function,
) -> Option<Runnable> {
let func = def.source(sema.db)?;
let name = def.name(sema.db).to_smol_str();
@ -340,7 +343,10 @@ pub(crate) fn runnable_fn(sema: &Semantics<'_, RootDatabase>, def: hir::Function
Some(Runnable { use_name_in_title: false, nav, kind, cfg })
}
pub(crate) fn runnable_mod(sema: &Semantics<'_, RootDatabase>, def: hir::Module) -> Option<Runnable> {
pub(crate) fn runnable_mod(
sema: &Semantics<'_, RootDatabase>,
def: hir::Module,
) -> Option<Runnable> {
if !has_test_function_or_multiple_test_submodules(sema, &def) {
return None;
}
@ -353,7 +359,10 @@ pub(crate) fn runnable_mod(sema: &Semantics<'_, RootDatabase>, def: hir::Module)
Some(Runnable { use_name_in_title: false, nav, kind: RunnableKind::TestMod { path }, cfg })
}
pub(crate) fn runnable_impl(sema: &Semantics<'_, RootDatabase>, def: &hir::Impl) -> Option<Runnable> {
pub(crate) fn runnable_impl(
sema: &Semantics<'_, RootDatabase>,
def: &hir::Impl,
) -> Option<Runnable> {
let attrs = def.attrs(sema.db);
if !has_runnable_doc_test(&attrs) {
return None;

View file

@ -79,7 +79,11 @@ pub(super) fn name_like(
Some((highlight, binding_hash))
}
fn punctuation(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, kind: SyntaxKind) -> Highlight {
fn punctuation(
sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken,
kind: SyntaxKind,
) -> Highlight {
let parent = token.parent();
let parent_kind = parent.as_ref().map_or(EOF, SyntaxNode::kind);
match (kind, parent_kind) {
@ -321,7 +325,11 @@ fn calc_binding_hash(name: &hir::Name, shadow_count: u32) -> u64 {
hash((name, shadow_count))
}
fn highlight_def(sema: &Semantics<'_, RootDatabase>, krate: hir::Crate, def: Definition) -> Highlight {
fn highlight_def(
sema: &Semantics<'_, RootDatabase>,
krate: hir::Crate,
def: Definition,
) -> Highlight {
let db = sema.db;
let mut h = match def {
Definition::Macro(m) => Highlight::new(HlTag::Symbol(m.kind(sema.db).into())),

View file

@ -19,7 +19,10 @@ pub(super) fn expr(p: &mut Parser<'_>) -> bool {
expr_bp(p, None, r, 1).is_some()
}
pub(super) fn expr_stmt(p: &mut Parser<'_>, m: Option<Marker>) -> Option<(CompletedMarker, BlockLike)> {
pub(super) fn expr_stmt(
p: &mut Parser<'_>,
m: Option<Marker>,
) -> Option<(CompletedMarker, BlockLike)> {
let r = Restrictions { forbid_structs: false, prefer_stmt: true };
expr_bp(p, m, r, 1)
}

View file

@ -60,7 +60,10 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet =
const EXPR_RECOVERY_SET: TokenSet = TokenSet::new(&[T![let]]);
pub(super) fn atom_expr(p: &mut Parser<'_>, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> {
pub(super) fn atom_expr(
p: &mut Parser<'_>,
r: Restrictions,
) -> Option<(CompletedMarker, BlockLike)> {
if let Some(m) = literal(p) {
return Some((m, BlockLike::NotBlock));
}

View file

@ -27,7 +27,10 @@ pub(super) fn expr_path(p: &mut Parser<'_>) {
path(p, Mode::Expr);
}
pub(crate) fn type_path_for_qualifier(p: &mut Parser<'_>, qual: CompletedMarker) -> CompletedMarker {
pub(crate) fn type_path_for_qualifier(
p: &mut Parser<'_>,
qual: CompletedMarker,
) -> CompletedMarker {
path_for_qualifier(p, Mode::Type, qual)
}
@ -45,7 +48,11 @@ fn path(p: &mut Parser<'_>, mode: Mode) {
path_for_qualifier(p, mode, qual);
}
fn path_for_qualifier(p: &mut Parser<'_>, mode: Mode, mut qual: CompletedMarker) -> CompletedMarker {
fn path_for_qualifier(
p: &mut Parser<'_>,
mode: Mode,
mut qual: CompletedMarker,
) -> CompletedMarker {
loop {
let use_tree = matches!(p.nth(2), T![*] | T!['{']);
if p.at(T![::]) && !use_tree {