mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-25 12:33:33 +00:00
fix clippy::needless_borrow
This commit is contained in:
parent
ff7e057dca
commit
7912e33ed6
36 changed files with 74 additions and 77 deletions
|
@ -398,7 +398,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
}
|
||||
|
||||
pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> {
|
||||
self.imp.scope_at_offset(&node, offset)
|
||||
self.imp.scope_at_offset(node, offset)
|
||||
}
|
||||
|
||||
pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
|
||||
|
|
|
@ -603,7 +603,7 @@ fn resolve_hir_path_(
|
|||
// within the trait's associated types.
|
||||
if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) {
|
||||
if let Some(type_alias_id) =
|
||||
db.trait_data(trait_id).associated_type_by_name(&unresolved.name)
|
||||
db.trait_data(trait_id).associated_type_by_name(unresolved.name)
|
||||
{
|
||||
return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into()));
|
||||
}
|
||||
|
|
|
@ -667,7 +667,7 @@ impl DocsRangeMap {
|
|||
let InFile { file_id, value: source } = self.source_map.source_of_id(idx);
|
||||
match source {
|
||||
Either::Left(attr) => {
|
||||
let string = get_doc_string_in_attr(&attr)?;
|
||||
let string = get_doc_string_in_attr(attr)?;
|
||||
let text_range = string.open_quote_text_range()?;
|
||||
let range = TextRange::at(
|
||||
text_range.end() + original_line_src_range.start() + relative_range.start(),
|
||||
|
|
|
@ -72,7 +72,7 @@ impl TypeOrConstParamData {
|
|||
|
||||
pub fn type_param(&self) -> Option<&TypeParamData> {
|
||||
match self {
|
||||
TypeOrConstParamData::TypeParamData(x) => Some(&x),
|
||||
TypeOrConstParamData::TypeParamData(x) => Some(x),
|
||||
TypeOrConstParamData::ConstParamData(_) => None,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -500,7 +500,7 @@ impl<'a> Printer<'a> {
|
|||
if i != 0 {
|
||||
w!(self, ", ");
|
||||
}
|
||||
self.print_type_ref(&typeref);
|
||||
self.print_type_ref(typeref);
|
||||
}
|
||||
if *varargs {
|
||||
if !args.is_empty() {
|
||||
|
@ -509,7 +509,7 @@ impl<'a> Printer<'a> {
|
|||
w!(self, "...");
|
||||
}
|
||||
w!(self, ") -> ");
|
||||
self.print_type_ref(&return_type);
|
||||
self.print_type_ref(return_type);
|
||||
}
|
||||
TypeRef::Macro(_ast_id) => {
|
||||
w!(self, "<macro>");
|
||||
|
|
|
@ -191,7 +191,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
|||
if let Some((tree, map, _)) = arg.as_deref() {
|
||||
let tt_range = call.token_tree().unwrap().syntax().text_range();
|
||||
let mut ranges = Vec::new();
|
||||
extract_id_ranges(&mut ranges, &map, &tree);
|
||||
extract_id_ranges(&mut ranges, map, tree);
|
||||
for (range, id) in ranges {
|
||||
let idx = (tt_range.start() + range.end()).into();
|
||||
text_edits.push((idx..idx, format!("#{}", id.0)));
|
||||
|
@ -269,7 +269,7 @@ fn reindent(indent: IndentLevel, pp: String) -> String {
|
|||
let mut res = lines.next().unwrap().to_string();
|
||||
for line in lines {
|
||||
if line.trim().is_empty() {
|
||||
res.push_str(&line)
|
||||
res.push_str(line)
|
||||
} else {
|
||||
format_to!(res, "{}{}", indent, line)
|
||||
}
|
||||
|
|
|
@ -245,7 +245,7 @@ impl TypeRef {
|
|||
f(type_ref);
|
||||
match type_ref {
|
||||
TypeRef::Fn(params, _) => {
|
||||
params.iter().for_each(|(_, param_type)| go(¶m_type, f))
|
||||
params.iter().for_each(|(_, param_type)| go(param_type, f))
|
||||
}
|
||||
TypeRef::Tuple(types) => types.iter().for_each(|t| go(t, f)),
|
||||
TypeRef::RawPtr(type_ref, _)
|
||||
|
|
|
@ -149,11 +149,11 @@ pub fn expand_speculative(
|
|||
let token_range = token_to_map.text_range();
|
||||
|
||||
// Build the subtree and token mapping for the speculative args
|
||||
let censor = censor_for_macro_input(&loc, &speculative_args);
|
||||
let mut fixups = fixup::fixup_syntax(&speculative_args);
|
||||
let censor = censor_for_macro_input(&loc, speculative_args);
|
||||
let mut fixups = fixup::fixup_syntax(speculative_args);
|
||||
fixups.replace.extend(censor.into_iter().map(|node| (node, Vec::new())));
|
||||
let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
||||
&speculative_args,
|
||||
speculative_args,
|
||||
fixups.token_map,
|
||||
fixups.next_id,
|
||||
fixups.replace,
|
||||
|
|
|
@ -207,7 +207,7 @@ fn eager_macro_recur(
|
|||
|
||||
// Collect replacement
|
||||
for child in children {
|
||||
let def = match child.path().and_then(|path| ModPath::from_src(db, path, &hygiene)) {
|
||||
let def = match child.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
|
||||
Some(path) => macro_resolver(path.clone()).ok_or_else(|| UnresolvedMacro { path })?,
|
||||
None => {
|
||||
diagnostic_sink(ExpandError::Other("malformed macro invocation".into()));
|
||||
|
|
|
@ -293,7 +293,7 @@ pub fn eval_const(expr: &Expr, ctx: &mut ConstEvalCtx<'_>) -> Result<ComputedExp
|
|||
|
||||
pub fn eval_usize(expr: Idx<Expr>, mut ctx: ConstEvalCtx<'_>) -> Option<u64> {
|
||||
let expr = &ctx.exprs[expr];
|
||||
if let Ok(ce) = eval_const(&expr, &mut ctx) {
|
||||
if let Ok(ce) = eval_const(expr, &mut ctx) {
|
||||
match ce {
|
||||
ComputedExpr::Literal(Literal::Int(x, _)) => return x.try_into().ok(),
|
||||
ComputedExpr::Literal(Literal::Uint(x, _)) => return x.try_into().ok(),
|
||||
|
|
|
@ -693,7 +693,7 @@ fn iterate_method_candidates_with_autoref(
|
|||
iterate_method_candidates_by_receiver(
|
||||
receiver_ty,
|
||||
first_adjustment.clone(),
|
||||
&rest,
|
||||
rest,
|
||||
db,
|
||||
env.clone(),
|
||||
traits_in_scope,
|
||||
|
@ -973,7 +973,7 @@ fn iterate_inherent_methods(
|
|||
// already happens in `is_valid_candidate` above; if not, we
|
||||
// check it here
|
||||
if receiver_ty.is_none()
|
||||
&& inherent_impl_substs(db, env.clone(), impl_def, &self_ty).is_none()
|
||||
&& inherent_impl_substs(db, env.clone(), impl_def, self_ty).is_none()
|
||||
{
|
||||
cov_mark::hit!(impl_self_type_match_without_receiver);
|
||||
continue;
|
||||
|
@ -1152,7 +1152,7 @@ pub fn implements_trait(
|
|||
env: Arc<TraitEnvironment>,
|
||||
trait_: TraitId,
|
||||
) -> bool {
|
||||
let goal = generic_implements_goal(db, env.clone(), trait_, &ty);
|
||||
let goal = generic_implements_goal(db, env.clone(), trait_, ty);
|
||||
let solution = db.trait_solve(env.krate, goal.cast(Interner));
|
||||
|
||||
solution.is_some()
|
||||
|
@ -1164,7 +1164,7 @@ pub fn implements_trait_unique(
|
|||
env: Arc<TraitEnvironment>,
|
||||
trait_: TraitId,
|
||||
) -> bool {
|
||||
let goal = generic_implements_goal(db, env.clone(), trait_, &ty);
|
||||
let goal = generic_implements_goal(db, env.clone(), trait_, ty);
|
||||
let solution = db.trait_solve(env.krate, goal.cast(Interner));
|
||||
|
||||
matches!(solution, Some(crate::Solution::Unique(_)))
|
||||
|
|
|
@ -145,7 +145,7 @@ pub(crate) fn hover(
|
|||
if result.is_none() {
|
||||
// fallbacks, show keywords or types
|
||||
|
||||
let res = descended.iter().find_map(|token| render::keyword(sema, config, &token));
|
||||
let res = descended.iter().find_map(|token| render::keyword(sema, config, token));
|
||||
if let Some(res) = res {
|
||||
return Some(RangeInfo::new(original_token.text_range(), res));
|
||||
}
|
||||
|
|
|
@ -103,7 +103,7 @@ pub(super) fn try_expr(
|
|||
|
||||
let adts = inner_ty.as_adt().zip(body_ty.as_adt());
|
||||
if let Some((hir::Adt::Enum(inner), hir::Adt::Enum(body))) = adts {
|
||||
let famous_defs = FamousDefs(sema, sema.scope(&try_expr.syntax()).krate());
|
||||
let famous_defs = FamousDefs(sema, sema.scope(try_expr.syntax()).krate());
|
||||
// special case for two options, there is no value in showing them
|
||||
if let Some(option_enum) = famous_defs.core_option_Option() {
|
||||
if inner == option_enum && body == option_enum {
|
||||
|
|
|
@ -41,7 +41,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext) ->
|
|||
let match_arm_list = match_expr.match_arm_list()?;
|
||||
let target_range = ctx.sema.original_range(match_expr.syntax()).range;
|
||||
|
||||
if let None = cursor_at_trivial_match_arm_list(&ctx, &match_expr, &match_arm_list) {
|
||||
if let None = cursor_at_trivial_match_arm_list(ctx, &match_expr, &match_arm_list) {
|
||||
let arm_list_range = ctx.sema.original_range(match_arm_list.syntax()).range;
|
||||
let cursor_in_range = arm_list_range.contains_range(ctx.selection_trimmed());
|
||||
if cursor_in_range {
|
||||
|
|
|
@ -1448,7 +1448,7 @@ fn make_body(
|
|||
.filter(|it| text_range.contains_range(it.text_range()))
|
||||
.map(|it| match &it {
|
||||
syntax::NodeOrToken::Node(n) => syntax::NodeOrToken::Node(
|
||||
rewrite_body_segment(ctx, &fun.params, &handler, &n),
|
||||
rewrite_body_segment(ctx, &fun.params, &handler, n),
|
||||
),
|
||||
_ => it,
|
||||
})
|
||||
|
|
|
@ -103,7 +103,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext) -> Option<(
|
|||
//for change_visibility and usages for first point mentioned above in the process
|
||||
let (usages_to_be_processed, record_fields) = module.get_usages_and_record_fields(ctx);
|
||||
|
||||
let import_paths_to_be_removed = module.resolve_imports(curr_parent_module, &ctx);
|
||||
let import_paths_to_be_removed = module.resolve_imports(curr_parent_module, ctx);
|
||||
module.body_items = module.change_visibility(record_fields)?;
|
||||
if module.body_items.len() == 0 {
|
||||
return None;
|
||||
|
@ -203,7 +203,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext) -> Option<(
|
|||
|
||||
builder.delete(node_to_be_removed.text_range());
|
||||
// Remove preceding indentation from node
|
||||
if let Some(range) = indent_range_before_given_node(&node_to_be_removed) {
|
||||
if let Some(range) = indent_range_before_given_node(node_to_be_removed) {
|
||||
builder.delete(range);
|
||||
}
|
||||
|
||||
|
@ -700,7 +700,7 @@ fn does_source_exists_outside_sel_in_same_mod(
|
|||
if let Some(ast_module) = &curr_parent_module {
|
||||
if let Some(hir_module) = x.parent(ctx.db()) {
|
||||
have_same_parent =
|
||||
compare_hir_and_ast_module(&ast_module, hir_module, ctx).is_some();
|
||||
compare_hir_and_ast_module(ast_module, hir_module, ctx).is_some();
|
||||
} else {
|
||||
let source_file_id = source.file_id.original_file(ctx.db());
|
||||
have_same_parent = source_file_id == curr_file_id;
|
||||
|
@ -725,7 +725,7 @@ fn does_source_exists_outside_sel_in_same_mod(
|
|||
let have_same_parent;
|
||||
if let Some(ast_module) = &curr_parent_module {
|
||||
have_same_parent =
|
||||
compare_hir_and_ast_module(&ast_module, x.module(ctx.db()), ctx).is_some();
|
||||
compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some();
|
||||
} else {
|
||||
let source_file_id = source.file_id.original_file(ctx.db());
|
||||
have_same_parent = source_file_id == curr_file_id;
|
||||
|
@ -742,7 +742,7 @@ fn does_source_exists_outside_sel_in_same_mod(
|
|||
let have_same_parent;
|
||||
if let Some(ast_module) = &curr_parent_module {
|
||||
have_same_parent =
|
||||
compare_hir_and_ast_module(&ast_module, x.module(ctx.db()), ctx).is_some();
|
||||
compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some();
|
||||
} else {
|
||||
let source_file_id = source.file_id.original_file(ctx.db());
|
||||
have_same_parent = source_file_id == curr_file_id;
|
||||
|
@ -759,7 +759,7 @@ fn does_source_exists_outside_sel_in_same_mod(
|
|||
let have_same_parent;
|
||||
if let Some(ast_module) = &curr_parent_module {
|
||||
have_same_parent =
|
||||
compare_hir_and_ast_module(&ast_module, x.module(ctx.db()), ctx).is_some();
|
||||
compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some();
|
||||
} else {
|
||||
let source_file_id = source.file_id.original_file(ctx.db());
|
||||
have_same_parent = source_file_id == curr_file_id;
|
||||
|
@ -776,7 +776,7 @@ fn does_source_exists_outside_sel_in_same_mod(
|
|||
let have_same_parent;
|
||||
if let Some(ast_module) = &curr_parent_module {
|
||||
have_same_parent =
|
||||
compare_hir_and_ast_module(&ast_module, x.module(ctx.db()), ctx).is_some();
|
||||
compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some();
|
||||
} else {
|
||||
let source_file_id = source.file_id.original_file(ctx.db());
|
||||
have_same_parent = source_file_id == curr_file_id;
|
||||
|
@ -793,7 +793,7 @@ fn does_source_exists_outside_sel_in_same_mod(
|
|||
let have_same_parent;
|
||||
if let Some(ast_module) = &curr_parent_module {
|
||||
have_same_parent =
|
||||
compare_hir_and_ast_module(&ast_module, x.module(ctx.db()), ctx).is_some();
|
||||
compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some();
|
||||
} else {
|
||||
let source_file_id = source.file_id.original_file(ctx.db());
|
||||
have_same_parent = source_file_id == curr_file_id;
|
||||
|
@ -810,7 +810,7 @@ fn does_source_exists_outside_sel_in_same_mod(
|
|||
let have_same_parent;
|
||||
if let Some(ast_module) = &curr_parent_module {
|
||||
have_same_parent =
|
||||
compare_hir_and_ast_module(&ast_module, x.module(ctx.db()), ctx).is_some();
|
||||
compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some();
|
||||
} else {
|
||||
let source_file_id = source.file_id.original_file(ctx.db());
|
||||
have_same_parent = source_file_id == curr_file_id;
|
||||
|
@ -827,7 +827,7 @@ fn does_source_exists_outside_sel_in_same_mod(
|
|||
let have_same_parent;
|
||||
if let Some(ast_module) = &curr_parent_module {
|
||||
have_same_parent =
|
||||
compare_hir_and_ast_module(&ast_module, x.module(ctx.db()), ctx).is_some();
|
||||
compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some();
|
||||
} else {
|
||||
let source_file_id = source.file_id.original_file(ctx.db());
|
||||
have_same_parent = source_file_id == curr_file_id;
|
||||
|
|
|
@ -52,7 +52,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext) -> Option
|
|||
}
|
||||
}
|
||||
|
||||
let reference_modifier = match get_receiver_type(&ctx, &to_extract) {
|
||||
let reference_modifier = match get_receiver_type(ctx, &to_extract) {
|
||||
Some(receiver_type) if receiver_type.is_mutable_reference() => "&mut ",
|
||||
Some(receiver_type) if receiver_type.is_reference() => "&",
|
||||
_ => "",
|
||||
|
|
|
@ -52,7 +52,7 @@ pub(crate) fn generate_documentation_template(
|
|||
|
||||
let parent_syntax = ast_func.syntax();
|
||||
let text_range = parent_syntax.text_range();
|
||||
let indent_level = IndentLevel::from_node(&parent_syntax);
|
||||
let indent_level = IndentLevel::from_node(parent_syntax);
|
||||
|
||||
acc.add(
|
||||
AssistId("generate_documentation_template", AssistKind::Generate),
|
||||
|
@ -202,7 +202,7 @@ fn all_parent_mods_public(hir_func: &hir::Function, ctx: &AssistContext) -> bool
|
|||
|
||||
/// Returns the name of the current crate
|
||||
fn crate_name(ast_func: &ast::Fn, ctx: &AssistContext) -> Option<String> {
|
||||
let krate = ctx.sema.scope(&ast_func.syntax()).module()?.krate();
|
||||
let krate = ctx.sema.scope(ast_func.syntax()).module()?.krate();
|
||||
Some(krate.display_name(ctx.db())?.to_string())
|
||||
}
|
||||
|
||||
|
@ -338,7 +338,7 @@ fn function_call(
|
|||
is_unsafe: bool,
|
||||
) -> Option<String> {
|
||||
let name = ast_func.name()?;
|
||||
let arguments = arguments_from_params(¶m_list);
|
||||
let arguments = arguments_from_params(param_list);
|
||||
let function_call = if param_list.self_param().is_some() {
|
||||
format!("{}.{}({})", self_name?, name, arguments)
|
||||
} else if let Some(implementation) = self_partial_type(ast_func) {
|
||||
|
|
|
@ -305,7 +305,7 @@ fn inline(
|
|||
let body = fn_body.clone_for_update();
|
||||
let usages_for_locals = |local| {
|
||||
Definition::Local(local)
|
||||
.usages(&sema)
|
||||
.usages(sema)
|
||||
.all()
|
||||
.references
|
||||
.remove(&function_def_file_id)
|
||||
|
@ -369,12 +369,12 @@ fn inline(
|
|||
// inline single use literals
|
||||
[usage] if matches!(expr, ast::Expr::Literal(_)) => {
|
||||
cov_mark::hit!(inline_call_inline_literal);
|
||||
inline_direct(usage, &expr);
|
||||
inline_direct(usage, expr);
|
||||
}
|
||||
// inline direct local arguments
|
||||
[_, ..] if expr_as_name_ref(&expr).is_some() => {
|
||||
[_, ..] if expr_as_name_ref(expr).is_some() => {
|
||||
cov_mark::hit!(inline_call_inline_locals);
|
||||
usages.into_iter().for_each(|usage| inline_direct(usage, &expr));
|
||||
usages.into_iter().for_each(|usage| inline_direct(usage, expr));
|
||||
}
|
||||
// can't inline, emit a let statement
|
||||
_ => {
|
||||
|
|
|
@ -40,7 +40,7 @@ pub(crate) fn merge_match_arms(acc: &mut Assists, ctx: &AssistContext) -> Option
|
|||
}
|
||||
let current_expr = current_arm.expr()?;
|
||||
let current_text_range = current_arm.syntax().text_range();
|
||||
let current_arm_types = get_arm_types(&ctx, ¤t_arm);
|
||||
let current_arm_types = get_arm_types(ctx, ¤t_arm);
|
||||
|
||||
// We check if the following match arms match this one. We could, but don't,
|
||||
// compare to the previous match arm as well.
|
||||
|
@ -99,7 +99,7 @@ fn are_same_types(
|
|||
arm: &ast::MatchArm,
|
||||
ctx: &AssistContext,
|
||||
) -> bool {
|
||||
let arm_types = get_arm_types(&ctx, &arm);
|
||||
let arm_types = get_arm_types(ctx, arm);
|
||||
for (other_arm_type_name, other_arm_type) in arm_types {
|
||||
match (current_arm_types.get(&other_arm_type_name), other_arm_type) {
|
||||
(Some(Some(current_arm_type)), Some(other_arm_type))
|
||||
|
@ -163,7 +163,7 @@ fn get_arm_types(
|
|||
}
|
||||
}
|
||||
|
||||
recurse(&mut mapping, &context, &arm.pat());
|
||||
recurse(&mut mapping, context, &arm.pat());
|
||||
mapping
|
||||
}
|
||||
|
||||
|
|
|
@ -44,7 +44,7 @@ pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext) -> Opt
|
|||
let range = call.syntax().text_range();
|
||||
let resolved_call = ctx.sema.resolve_method_call(&call)?;
|
||||
|
||||
let current_module = ctx.sema.scope(&call.syntax()).module()?;
|
||||
let current_module = ctx.sema.scope(call.syntax()).module()?;
|
||||
let target_module_def = ModuleDef::from(resolved_call);
|
||||
let item_in_ns = ItemInNs::from(target_module_def);
|
||||
let receiver_path = current_module
|
||||
|
|
|
@ -406,7 +406,7 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
|
|||
}
|
||||
|
||||
fn gen_record_pat_field(field_name: &str, pat_name: &str) -> ast::RecordPatField {
|
||||
let pat = make::ext::simple_ident_pat(make::name(&pat_name));
|
||||
let pat = make::ext::simple_ident_pat(make::name(pat_name));
|
||||
let name_ref = make::name_ref(field_name);
|
||||
make::record_pat_field(name_ref, pat.into())
|
||||
}
|
||||
|
@ -455,10 +455,10 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
|
|||
let field_name = field.name()?.to_string();
|
||||
|
||||
let l_name = &format!("l_{}", field_name);
|
||||
l_fields.push(gen_record_pat_field(&field_name, &l_name));
|
||||
l_fields.push(gen_record_pat_field(&field_name, l_name));
|
||||
|
||||
let r_name = &format!("r_{}", field_name);
|
||||
r_fields.push(gen_record_pat_field(&field_name, &r_name));
|
||||
r_fields.push(gen_record_pat_field(&field_name, r_name));
|
||||
|
||||
let lhs = make::expr_path(make::ext::ident_path(l_name));
|
||||
let rhs = make::expr_path(make::ext::ident_path(r_name));
|
||||
|
|
|
@ -29,7 +29,7 @@ pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
Some("target_endian") => ["little", "big"].into_iter().for_each(add_completion),
|
||||
Some(name) => {
|
||||
if let Some(krate) = ctx.krate {
|
||||
krate.potential_cfg(ctx.db).get_cfg_values(&name).cloned().for_each(|s| {
|
||||
krate.potential_cfg(ctx.db).get_cfg_values(name).cloned().for_each(|s| {
|
||||
let insert_text = format!(r#""{}""#, s);
|
||||
let mut item =
|
||||
CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), s);
|
||||
|
|
|
@ -31,7 +31,7 @@ pub(crate) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext)
|
|||
CompletionItem::new(CompletionItemKind::Binding, ctx.source_range(), label)
|
||||
};
|
||||
let mut item = match &comma_wrapper {
|
||||
Some(fmt) => mk_item(&fmt(&label)),
|
||||
Some(fmt) => mk_item(&fmt(label)),
|
||||
None => mk_item(label),
|
||||
};
|
||||
item.lookup_by(lookup);
|
||||
|
@ -40,7 +40,7 @@ pub(crate) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext)
|
|||
|
||||
match param_kind {
|
||||
ParamKind::Function(function) => {
|
||||
fill_fn_params(ctx, function, ¶m_list, add_new_item_to_acc);
|
||||
fill_fn_params(ctx, function, param_list, add_new_item_to_acc);
|
||||
}
|
||||
ParamKind::Closure(closure) => {
|
||||
let stmt_list = closure.syntax().ancestors().find_map(ast::StmtList::cast)?;
|
||||
|
|
|
@ -51,7 +51,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
None => return,
|
||||
};
|
||||
|
||||
let postfix_snippet = match build_postfix_snippet_builder(ctx, cap, &dot_receiver) {
|
||||
let postfix_snippet = match build_postfix_snippet_builder(ctx, cap, dot_receiver) {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
|
@ -265,7 +265,7 @@ fn add_custom_postfix_completions(
|
|||
Some(imports) => imports,
|
||||
None => return,
|
||||
};
|
||||
let body = snippet.postfix_snippet(&receiver_text);
|
||||
let body = snippet.postfix_snippet(receiver_text);
|
||||
let mut builder =
|
||||
postfix_snippet(trigger, snippet.description.as_deref().unwrap_or_default(), &body);
|
||||
builder.documentation(Documentation::new(format!("```rust\n{}\n```", body)));
|
||||
|
|
|
@ -74,7 +74,7 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
|
|||
}
|
||||
_ => {
|
||||
// Add associated types on type parameters and `Self`.
|
||||
ctx.scope.assoc_type_shorthand_candidates(&resolution, |_, alias| {
|
||||
ctx.scope.assoc_type_shorthand_candidates(resolution, |_, alias| {
|
||||
acc.add_type_alias(ctx, alias);
|
||||
None::<()>
|
||||
});
|
||||
|
|
|
@ -112,7 +112,7 @@ fn add_custom_completions(
|
|||
None => return,
|
||||
};
|
||||
let body = snip.snippet();
|
||||
let mut builder = snippet(ctx, cap, &trigger, &body);
|
||||
let mut builder = snippet(ctx, cap, trigger, &body);
|
||||
builder.documentation(Documentation::new(format!("```rust\n{}\n```", body)));
|
||||
for import in imports.into_iter() {
|
||||
builder.add_import(import);
|
||||
|
|
|
@ -150,9 +150,9 @@ impl IdentClass {
|
|||
sema: &Semantics<RootDatabase>,
|
||||
lifetime: &ast::Lifetime,
|
||||
) -> Option<IdentClass> {
|
||||
NameRefClass::classify_lifetime(sema, &lifetime)
|
||||
NameRefClass::classify_lifetime(sema, lifetime)
|
||||
.map(IdentClass::NameRefClass)
|
||||
.or_else(|| NameClass::classify_lifetime(sema, &lifetime).map(IdentClass::NameClass))
|
||||
.or_else(|| NameClass::classify_lifetime(sema, lifetime).map(IdentClass::NameClass))
|
||||
}
|
||||
|
||||
pub fn definitions(self) -> ArrayVec<Definition, 2> {
|
||||
|
|
|
@ -75,7 +75,7 @@ fn try_merge_trees_mut(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehav
|
|||
lhs.split_prefix(&lhs_prefix);
|
||||
rhs.split_prefix(&rhs_prefix);
|
||||
}
|
||||
recursive_merge(&lhs, &rhs, merge)
|
||||
recursive_merge(lhs, rhs, merge)
|
||||
}
|
||||
|
||||
/// Recursively merges rhs to lhs
|
||||
|
@ -157,7 +157,7 @@ fn recursive_merge(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehavior)
|
|||
}
|
||||
lhs_t.split_prefix(&lhs_prefix);
|
||||
rhs_t.split_prefix(&rhs_prefix);
|
||||
recursive_merge(&lhs_t, &rhs_t, merge)?;
|
||||
recursive_merge(lhs_t, &rhs_t, merge)?;
|
||||
}
|
||||
Err(_)
|
||||
if merge == MergeBehavior::Module
|
||||
|
|
|
@ -69,7 +69,7 @@ pub fn insert_ws_into(syn: SyntaxNode) -> SyntaxNode {
|
|||
if indent > 0 {
|
||||
mods.push(do_indent(after, tok, indent));
|
||||
}
|
||||
mods.push(do_nl(after, &tok));
|
||||
mods.push(do_nl(after, tok));
|
||||
}
|
||||
R_CURLY if is_last(|it| it != L_CURLY, true) => {
|
||||
indent = indent.saturating_sub(1);
|
||||
|
|
|
@ -85,7 +85,7 @@ fn generate_lint_descriptor(buf: &mut String) {
|
|||
.sorted_by(|(ident, ..), (ident2, ..)| ident.cmp(ident2))
|
||||
.collect::<Vec<_>>();
|
||||
for (name, description, ..) in &lints {
|
||||
push_lint_completion(buf, &name.replace("-", "_"), &description);
|
||||
push_lint_completion(buf, &name.replace("-", "_"), description);
|
||||
}
|
||||
buf.push_str("];\n");
|
||||
buf.push_str(r#"pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &["#);
|
||||
|
@ -93,10 +93,10 @@ fn generate_lint_descriptor(buf: &mut String) {
|
|||
if !children.is_empty() {
|
||||
// HACK: warnings is emitted with a general description, not with its members
|
||||
if name == &"warnings" {
|
||||
push_lint_group(buf, &name, &description, &Vec::new());
|
||||
push_lint_group(buf, name, description, &Vec::new());
|
||||
continue;
|
||||
}
|
||||
push_lint_group(buf, &name.replace("-", "_"), &description, children);
|
||||
push_lint_group(buf, &name.replace("-", "_"), description, children);
|
||||
}
|
||||
}
|
||||
buf.push('\n');
|
||||
|
@ -136,14 +136,14 @@ fn generate_lint_descriptor(buf: &mut String) {
|
|||
.collect::<Vec<_>>();
|
||||
|
||||
for (name, description, ..) in &lints_rustdoc {
|
||||
push_lint_completion(buf, &name.replace("-", "_"), &description)
|
||||
push_lint_completion(buf, &name.replace("-", "_"), description)
|
||||
}
|
||||
buf.push_str("];\n");
|
||||
|
||||
buf.push_str(r#"pub const RUSTDOC_LINT_GROUPS: &[LintGroup] = &["#);
|
||||
for (name, description, children) in &lints_rustdoc {
|
||||
if !children.is_empty() {
|
||||
push_lint_group(buf, &name.replace("-", "_"), &description, children);
|
||||
push_lint_group(buf, &name.replace("-", "_"), description, children);
|
||||
}
|
||||
}
|
||||
buf.push('\n');
|
||||
|
|
|
@ -170,9 +170,9 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
|
|||
Some(it) => it,
|
||||
None => break,
|
||||
};
|
||||
let synth_id = token.synthetic_id(&conv);
|
||||
let synth_id = token.synthetic_id(conv);
|
||||
|
||||
let kind = token.kind(&conv);
|
||||
let kind = token.kind(conv);
|
||||
if kind == COMMENT {
|
||||
if let Some(tokens) = conv.convert_doc_comment(&token) {
|
||||
// FIXME: There has to be a better way to do this
|
||||
|
@ -227,7 +227,7 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
|
|||
continue;
|
||||
}
|
||||
|
||||
let spacing = match conv.peek().map(|next| next.kind(&conv)) {
|
||||
let spacing = match conv.peek().map(|next| next.kind(conv)) {
|
||||
Some(kind)
|
||||
if !kind.is_trivia()
|
||||
&& kind.is_punct()
|
||||
|
@ -240,7 +240,7 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
|
|||
}
|
||||
_ => tt::Spacing::Alone,
|
||||
};
|
||||
let char = match token.to_char(&conv) {
|
||||
let char = match token.to_char(conv) {
|
||||
Some(c) => c,
|
||||
None => {
|
||||
panic!("Token from lexer must be single char: token = {:#?}", token);
|
||||
|
|
|
@ -74,14 +74,11 @@ pub trait Message: Serialize + DeserializeOwned {
|
|||
impl Message for Request {}
|
||||
impl Message for Response {}
|
||||
|
||||
fn read_json<'a>(
|
||||
inp: &mut impl BufRead,
|
||||
mut buf: &'a mut String,
|
||||
) -> io::Result<Option<&'a String>> {
|
||||
fn read_json<'a>(inp: &mut impl BufRead, buf: &'a mut String) -> io::Result<Option<&'a String>> {
|
||||
loop {
|
||||
buf.clear();
|
||||
|
||||
inp.read_line(&mut buf)?;
|
||||
inp.read_line(buf)?;
|
||||
buf.pop(); // Remove trailing '\n'
|
||||
|
||||
if buf.is_empty() {
|
||||
|
|
|
@ -121,7 +121,7 @@ impl ProcMacroLibraryLibloading {
|
|||
let abs_file: &AbsPath = file.try_into().map_err(|_| {
|
||||
invalid_data_err(format!("expected an absolute path, got {}", file.display()))
|
||||
})?;
|
||||
let version_info = read_dylib_info(&abs_file)?;
|
||||
let version_info = read_dylib_info(abs_file)?;
|
||||
|
||||
let lib = load_library(file).map_err(invalid_data_err)?;
|
||||
let abi = Abi::from_lib(&lib, symbol_name, version_info)?;
|
||||
|
|
|
@ -24,7 +24,7 @@ fn diagnostic_severity(
|
|||
// HACK: special case for `warnings` rustc lint.
|
||||
Some(code)
|
||||
if config.warnings_as_hint.iter().any(|lint| {
|
||||
lint == "warnings" || ide_db::helpers::lint_eq_or_in_group(&code.code, &lint)
|
||||
lint == "warnings" || ide_db::helpers::lint_eq_or_in_group(&code.code, lint)
|
||||
}) =>
|
||||
{
|
||||
lsp_types::DiagnosticSeverity::HINT
|
||||
|
@ -32,7 +32,7 @@ fn diagnostic_severity(
|
|||
// HACK: special case for `warnings` rustc lint.
|
||||
Some(code)
|
||||
if config.warnings_as_info.iter().any(|lint| {
|
||||
lint == "warnings" || ide_db::helpers::lint_eq_or_in_group(&code.code, &lint)
|
||||
lint == "warnings" || ide_db::helpers::lint_eq_or_in_group(&code.code, lint)
|
||||
}) =>
|
||||
{
|
||||
lsp_types::DiagnosticSeverity::INFORMATION
|
||||
|
|
|
@ -48,7 +48,7 @@ impl<'a> RequestDispatcher<'a> {
|
|||
};
|
||||
let _pctx = stdx::panic_context::enter(panic_context);
|
||||
|
||||
let result = f(&mut self.global_state, params);
|
||||
let result = f(self.global_state, params);
|
||||
let response = result_to_response::<R>(id, result);
|
||||
|
||||
self.global_state.respond(response);
|
||||
|
|
Loading…
Reference in a new issue