mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-25 12:33:33 +00:00
Some clippy cleanups
This commit is contained in:
parent
c1e10a24fa
commit
6753051a45
15 changed files with 49 additions and 55 deletions
|
@ -8,7 +8,7 @@ use ra_syntax::{
|
|||
|
||||
use crate::{AssistCtx, Assist};
|
||||
|
||||
pub(crate) fn introduce_variable<'a>(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||
pub(crate) fn introduce_variable(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||
let node = ctx.covering_node();
|
||||
if !valid_covering_node(node) {
|
||||
return None;
|
||||
|
@ -61,13 +61,13 @@ fn valid_covering_node(node: &SyntaxNode) -> bool {
|
|||
/// Check wether the node is a valid expression which can be extracted to a variable.
|
||||
/// In general that's true for any expression, but in some cases that would produce invalid code.
|
||||
fn valid_target_expr(node: &SyntaxNode) -> Option<&ast::Expr> {
|
||||
return match node.kind() {
|
||||
match node.kind() {
|
||||
PATH_EXPR => None,
|
||||
BREAK_EXPR => ast::BreakExpr::cast(node).and_then(|e| e.expr()),
|
||||
RETURN_EXPR => ast::ReturnExpr::cast(node).and_then(|e| e.expr()),
|
||||
LOOP_EXPR => ast::ReturnExpr::cast(node).and_then(|e| e.expr()),
|
||||
_ => ast::Expr::cast(node),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the syntax node which will follow the freshly introduced var
|
||||
|
|
|
@ -805,7 +805,7 @@ impl ExprCollector {
|
|||
let lit = match child.flavor() {
|
||||
LiteralFlavor::IntNumber { suffix } => {
|
||||
let known_name = suffix
|
||||
.map(|s| Name::new(s))
|
||||
.map(Name::new)
|
||||
.and_then(|name| UncertainIntTy::from_name(&name));
|
||||
|
||||
Literal::Int(
|
||||
|
@ -815,7 +815,7 @@ impl ExprCollector {
|
|||
}
|
||||
LiteralFlavor::FloatNumber { suffix } => {
|
||||
let known_name = suffix
|
||||
.map(|s| Name::new(s))
|
||||
.map(Name::new)
|
||||
.and_then(|name| UncertainFloatTy::from_name(&name));
|
||||
|
||||
Literal::Float(
|
||||
|
@ -910,7 +910,7 @@ impl ExprCollector {
|
|||
}
|
||||
ast::PatKind::PathPat(p) => {
|
||||
let path = p.path().and_then(Path::from_ast);
|
||||
path.map(|path| Pat::Path(path)).unwrap_or(Pat::Missing)
|
||||
path.map(Pat::Path).unwrap_or(Pat::Missing)
|
||||
}
|
||||
ast::PatKind::TuplePat(p) => {
|
||||
let args = p.args().map(|p| self.collect_pat(p)).collect();
|
||||
|
|
|
@ -105,7 +105,7 @@ impl ExprScopes {
|
|||
fn add_params_bindings(&mut self, scope: ScopeId, params: &[PatId]) {
|
||||
let body = Arc::clone(&self.body);
|
||||
params
|
||||
.into_iter()
|
||||
.iter()
|
||||
.for_each(|pat| self.add_bindings(&body, scope, *pat));
|
||||
}
|
||||
|
||||
|
@ -147,7 +147,7 @@ impl ScopesWithSyntaxMapping {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn scope_for_offset<'a>(&'a self, offset: TextUnit) -> Option<ScopeId> {
|
||||
pub fn scope_for_offset(&self, offset: TextUnit) -> Option<ScopeId> {
|
||||
self.scopes
|
||||
.scope_for
|
||||
.iter()
|
||||
|
|
|
@ -72,7 +72,7 @@ impl ImplBlock {
|
|||
}
|
||||
|
||||
pub fn module(&self) -> Module {
|
||||
self.module_impl_blocks.module.clone()
|
||||
self.module_impl_blocks.module
|
||||
}
|
||||
|
||||
pub fn target_trait_ref(&self) -> Option<&TypeRef> {
|
||||
|
|
|
@ -78,7 +78,7 @@ impl Resolver {
|
|||
_ => return PerNs::none(),
|
||||
};
|
||||
let module_res = item_map.resolve_path(db, module, path);
|
||||
module_res.map(|def| Resolution::Def(def))
|
||||
module_res.map(Resolution::Def)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1225,7 +1225,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
Ty::Tuple(ref tuple_args) => &**tuple_args,
|
||||
_ => &[],
|
||||
};
|
||||
let expectations_iter = expectations.into_iter().chain(repeat(&Ty::Unknown));
|
||||
let expectations_iter = expectations.iter().chain(repeat(&Ty::Unknown));
|
||||
|
||||
let inner_tys = args
|
||||
.iter()
|
||||
|
@ -1398,10 +1398,10 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
let method_ty = self.insert_type_vars(method_ty);
|
||||
let (expected_receiver_ty, param_tys, ret_ty) = match &method_ty {
|
||||
Ty::FnPtr(sig) => {
|
||||
if sig.input.len() > 0 {
|
||||
if !sig.input.is_empty() {
|
||||
(
|
||||
sig.input[0].clone(),
|
||||
sig.input[1..].iter().cloned().collect(),
|
||||
sig.input[1..].to_vec(),
|
||||
sig.output.clone(),
|
||||
)
|
||||
} else {
|
||||
|
@ -1411,7 +1411,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
Ty::FnDef { substs, sig, .. } => {
|
||||
let ret_ty = sig.output.clone().subst(&substs);
|
||||
|
||||
if sig.input.len() > 0 {
|
||||
if !sig.input.is_empty() {
|
||||
let mut arg_iter = sig.input.iter().map(|ty| ty.clone().subst(&substs));
|
||||
let receiver_ty = arg_iter.next().unwrap();
|
||||
(receiver_ty, arg_iter.collect(), ret_ty)
|
||||
|
|
|
@ -113,7 +113,7 @@ impl CrateImplBlocks {
|
|||
krate: Crate,
|
||||
) -> Arc<CrateImplBlocks> {
|
||||
let mut crate_impl_blocks = CrateImplBlocks {
|
||||
krate: krate.clone(),
|
||||
krate,
|
||||
impls: FxHashMap::default(),
|
||||
impls_by_trait: FxHashMap::default(),
|
||||
};
|
||||
|
|
|
@ -130,12 +130,9 @@ impl<'a> CompletionContext<'a> {
|
|||
.ancestors()
|
||||
.take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
|
||||
.find_map(ast::FnDef::cast);
|
||||
match (self.module, self.function_syntax) {
|
||||
(Some(module), Some(fn_def)) => {
|
||||
let function = source_binder::function_from_module(self.db, module, fn_def);
|
||||
self.function = Some(function);
|
||||
}
|
||||
_ => (),
|
||||
if let (Some(module), Some(fn_def)) = (self.module, self.function_syntax) {
|
||||
let function = source_binder::function_from_module(self.db, module, fn_def);
|
||||
self.function = Some(function);
|
||||
}
|
||||
|
||||
let parent = match name_ref.syntax().parent() {
|
||||
|
|
|
@ -108,11 +108,11 @@ impl CompletionItem {
|
|||
self.lookup
|
||||
.as_ref()
|
||||
.map(|it| it.as_str())
|
||||
.unwrap_or(self.label())
|
||||
.unwrap_or_else(|| self.label())
|
||||
}
|
||||
|
||||
pub fn insert_text_format(&self) -> InsertTextFormat {
|
||||
self.insert_text_format.clone()
|
||||
self.insert_text_format
|
||||
}
|
||||
pub fn insert_text(&self) -> String {
|
||||
match &self.insert_text {
|
||||
|
@ -217,7 +217,7 @@ impl Builder {
|
|||
let def = resolution
|
||||
.as_ref()
|
||||
.take_types()
|
||||
.or(resolution.as_ref().take_values());
|
||||
.or_else(|| resolution.as_ref().take_values());
|
||||
let def = match def {
|
||||
None => return self,
|
||||
Some(it) => it,
|
||||
|
|
|
@ -89,7 +89,11 @@ pub(crate) fn reference_definition(
|
|||
.and_then(hir::Path::from_ast)
|
||||
{
|
||||
let resolved = resolver.resolve_path(db, &path);
|
||||
match resolved.clone().take_types().or(resolved.take_values()) {
|
||||
match resolved
|
||||
.clone()
|
||||
.take_types()
|
||||
.or_else(|| resolved.take_values())
|
||||
{
|
||||
Some(Resolution::Def(def)) => return Exact(NavigationTarget::from_def(db, def)),
|
||||
Some(Resolution::LocalBinding(pat)) => {
|
||||
let body = resolver.body().expect("no body for local binding");
|
||||
|
|
|
@ -117,7 +117,7 @@ impl fmt::Debug for AnalysisChange {
|
|||
if !self.libraries_added.is_empty() {
|
||||
d.field("libraries_added", &self.libraries_added.len());
|
||||
}
|
||||
if !self.crate_graph.is_some() {
|
||||
if self.crate_graph.is_none() {
|
||||
d.field("crate_graph", &self.crate_graph);
|
||||
}
|
||||
d.finish()
|
||||
|
|
|
@ -95,12 +95,12 @@ fn rename_mod(
|
|||
};
|
||||
source_file_edits.push(edit);
|
||||
|
||||
return Some(SourceChange {
|
||||
Some(SourceChange {
|
||||
label: "rename".to_string(),
|
||||
source_file_edits,
|
||||
file_system_edits,
|
||||
cursor_position: None,
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
fn rename_reference(
|
||||
|
@ -124,12 +124,12 @@ fn rename_reference(
|
|||
return None;
|
||||
}
|
||||
|
||||
return Some(SourceChange {
|
||||
Some(SourceChange {
|
||||
label: "rename".to_string(),
|
||||
source_file_edits: edit,
|
||||
file_system_edits: Vec::new(),
|
||||
cursor_position: None,
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -137,7 +137,7 @@ impl SymbolIndex {
|
|||
symbols.par_sort_by(cmp);
|
||||
symbols.dedup_by(|s1, s2| cmp(s1, s2) == Ordering::Equal);
|
||||
let names = symbols.iter().map(|it| it.name.as_str().to_lowercase());
|
||||
let map = fst::Map::from_iter(names.into_iter().zip(0u64..)).unwrap();
|
||||
let map = fst::Map::from_iter(names.zip(0u64..)).unwrap();
|
||||
SymbolIndex { symbols, map }
|
||||
}
|
||||
|
||||
|
|
|
@ -169,10 +169,7 @@ impl ConvWith for TextEdit {
|
|||
type Output = Vec<lsp_types::TextEdit>;
|
||||
|
||||
fn conv_with(self, line_index: &LineIndex) -> Vec<lsp_types::TextEdit> {
|
||||
self.as_atoms()
|
||||
.into_iter()
|
||||
.map_conv_with(line_index)
|
||||
.collect()
|
||||
self.as_atoms().iter().map_conv_with(line_index).collect()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -394,7 +391,7 @@ pub fn to_location_link(
|
|||
origin_selection_range: Some(target.range.conv_with(line_index)),
|
||||
target_uri,
|
||||
target_range,
|
||||
target_selection_range: target_selection_range,
|
||||
target_selection_range,
|
||||
};
|
||||
Ok(res)
|
||||
}
|
||||
|
|
|
@ -123,7 +123,7 @@ pub fn handle_on_type_formatting(
|
|||
let edit = edit.source_file_edits.pop().unwrap();
|
||||
|
||||
let change: Vec<TextEdit> = edit.edit.conv_with(&line_index);
|
||||
return Ok(Some(change));
|
||||
Ok(Some(change))
|
||||
}
|
||||
|
||||
pub fn handle_document_symbol(
|
||||
|
@ -319,7 +319,7 @@ pub fn handle_runnables(
|
|||
args: check_args,
|
||||
env: FxHashMap::default(),
|
||||
});
|
||||
return Ok(res);
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub fn handle_decorations(
|
||||
|
@ -622,10 +622,8 @@ pub fn handle_code_lens(
|
|||
// Gather runnables
|
||||
for runnable in world.analysis().runnables(file_id)? {
|
||||
let title = match &runnable.kind {
|
||||
RunnableKind::Test { name: _ } | RunnableKind::TestMod { path: _ } => {
|
||||
Some("▶️Run Test")
|
||||
}
|
||||
RunnableKind::Bench { name: _ } => Some("Run Bench"),
|
||||
RunnableKind::Test { .. } | RunnableKind::TestMod { .. } => Some("▶️Run Test"),
|
||||
RunnableKind::Bench { .. } => Some("Run Bench"),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
|
@ -679,7 +677,7 @@ pub fn handle_code_lens(
|
|||
}),
|
||||
);
|
||||
|
||||
return Ok(Some(lenses));
|
||||
Ok(Some(lenses))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
|
@ -722,22 +720,20 @@ pub fn handle_code_lens_resolve(world: ServerWorld, code_lens: CodeLens) -> Resu
|
|||
to_value(locations).unwrap(),
|
||||
]),
|
||||
};
|
||||
return Ok(CodeLens {
|
||||
Ok(CodeLens {
|
||||
range: code_lens.range,
|
||||
command: Some(cmd),
|
||||
data: None,
|
||||
});
|
||||
}
|
||||
None => {
|
||||
return Ok(CodeLens {
|
||||
range: code_lens.range,
|
||||
command: Some(Command {
|
||||
title: "Error".into(),
|
||||
..Default::default()
|
||||
}),
|
||||
data: None,
|
||||
});
|
||||
})
|
||||
}
|
||||
None => Ok(CodeLens {
|
||||
range: code_lens.range,
|
||||
command: Some(Command {
|
||||
title: "Error".into(),
|
||||
..Default::default()
|
||||
}),
|
||||
data: None,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue