mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 05:23:24 +00:00
Some clippy fixes
This commit is contained in:
parent
24af351018
commit
d7f3d858ad
11 changed files with 16 additions and 19 deletions
|
@ -98,7 +98,7 @@ fn already_has_from_impl(
|
||||||
};
|
};
|
||||||
let var_ty = hir_enum_var.fields(sema.db)[0].signature_ty(sema.db);
|
let var_ty = hir_enum_var.fields(sema.db)[0].signature_ty(sema.db);
|
||||||
|
|
||||||
e_ty.impls_trait(sema.db, from_trait, &[var_ty.clone()])
|
e_ty.impls_trait(sema.db, from_trait, &[var_ty])
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
@ -124,7 +124,7 @@ fn anchor_stmt(expr: ast::Expr) -> Option<(SyntaxNode, bool)> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if ast::Stmt::cast(node.clone().into()).is_some() {
|
if ast::Stmt::cast(node.clone()).is_some() {
|
||||||
return Some((node, false));
|
return Some((node, false));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -30,7 +30,7 @@ pub(crate) fn merge_imports(ctx: AssistCtx) -> Option<Assist> {
|
||||||
.filter_map(|dir| neighbor(&use_item, dir))
|
.filter_map(|dir| neighbor(&use_item, dir))
|
||||||
.filter_map(|it| Some((it.clone(), it.use_tree()?)))
|
.filter_map(|it| Some((it.clone(), it.use_tree()?)))
|
||||||
.find_map(|(use_item, use_tree)| {
|
.find_map(|(use_item, use_tree)| {
|
||||||
Some((try_merge_trees(&tree, &use_tree)?, use_item.clone()))
|
Some((try_merge_trees(&tree, &use_tree)?, use_item))
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
rewriter.replace_ast(&tree, &merged);
|
rewriter.replace_ast(&tree, &merged);
|
||||||
|
|
|
@ -235,7 +235,7 @@ fn parse_meta(meta: &str) -> ParsedMeta {
|
||||||
"env" => {
|
"env" => {
|
||||||
for key in value.split(',') {
|
for key in value.split(',') {
|
||||||
if let Some((k, v)) = split1(key, '=') {
|
if let Some((k, v)) = split1(key, '=') {
|
||||||
env.set(k.into(), v.into());
|
env.set(k, v.into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -327,7 +327,7 @@ impl ExternSource {
|
||||||
self.extern_paths.iter().find_map(|(root_path, id)| {
|
self.extern_paths.iter().find_map(|(root_path, id)| {
|
||||||
if let Ok(rel_path) = path.strip_prefix(root_path) {
|
if let Ok(rel_path) = path.strip_prefix(root_path) {
|
||||||
let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
|
let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
|
||||||
Some((id.clone(), rel_path))
|
Some((*id, rel_path))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
|
@ -473,16 +473,14 @@ impl ExprCollector<'_> {
|
||||||
self.collect_block_items(&block);
|
self.collect_block_items(&block);
|
||||||
let statements = block
|
let statements = block
|
||||||
.statements()
|
.statements()
|
||||||
.filter_map(|s| match s {
|
.map(|s| match s {
|
||||||
ast::Stmt::LetStmt(stmt) => {
|
ast::Stmt::LetStmt(stmt) => {
|
||||||
let pat = self.collect_pat_opt(stmt.pat());
|
let pat = self.collect_pat_opt(stmt.pat());
|
||||||
let type_ref = stmt.ascribed_type().map(TypeRef::from_ast);
|
let type_ref = stmt.ascribed_type().map(TypeRef::from_ast);
|
||||||
let initializer = stmt.initializer().map(|e| self.collect_expr(e));
|
let initializer = stmt.initializer().map(|e| self.collect_expr(e));
|
||||||
Some(Statement::Let { pat, type_ref, initializer })
|
Statement::Let { pat, type_ref, initializer }
|
||||||
}
|
|
||||||
ast::Stmt::ExprStmt(stmt) => {
|
|
||||||
Some(Statement::Expr(self.collect_expr_opt(stmt.expr())))
|
|
||||||
}
|
}
|
||||||
|
ast::Stmt::ExprStmt(stmt) => Statement::Expr(self.collect_expr_opt(stmt.expr())),
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
let tail = block.expr().map(|e| self.collect_expr(e));
|
let tail = block.expr().map(|e| self.collect_expr(e));
|
||||||
|
|
|
@ -66,7 +66,7 @@ impl AstIdMap {
|
||||||
// change parent's id. This means that, say, adding a new function to a
|
// change parent's id. This means that, say, adding a new function to a
|
||||||
// trait does not change ids of top-level items, which helps caching.
|
// trait does not change ids of top-level items, which helps caching.
|
||||||
bfs(node, |it| {
|
bfs(node, |it| {
|
||||||
if let Some(module_item) = ast::ModuleItem::cast(it.clone()) {
|
if let Some(module_item) = ast::ModuleItem::cast(it) {
|
||||||
res.alloc(module_item.syntax());
|
res.alloc(module_item.syntax());
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -301,7 +301,7 @@ fn relative_file(db: &dyn AstDatabase, call_id: MacroCallId, path: &str) -> Opti
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extern paths ?
|
// Extern paths ?
|
||||||
let krate = db.relevant_crates(call_site).get(0)?.clone();
|
let krate = *db.relevant_crates(call_site).get(0)?;
|
||||||
let (extern_source_id, relative_file) =
|
let (extern_source_id, relative_file) =
|
||||||
db.crate_graph()[krate].extern_source.extern_path(path)?;
|
db.crate_graph()[krate].extern_source.extern_path(path)?;
|
||||||
|
|
||||||
|
@ -329,7 +329,7 @@ fn include_expand(
|
||||||
|
|
||||||
// FIXME:
|
// FIXME:
|
||||||
// Handle include as expression
|
// Handle include as expression
|
||||||
let res = parse_to_token_tree(&db.file_text(file_id.into()))
|
let res = parse_to_token_tree(&db.file_text(file_id))
|
||||||
.ok_or_else(|| mbe::ExpandError::ConversionError)?
|
.ok_or_else(|| mbe::ExpandError::ConversionError)?
|
||||||
.0;
|
.0;
|
||||||
|
|
||||||
|
@ -340,7 +340,7 @@ fn get_env_inner(db: &dyn AstDatabase, arg_id: EagerMacroId, key: &str) -> Optio
|
||||||
let call_id: MacroCallId = arg_id.into();
|
let call_id: MacroCallId = arg_id.into();
|
||||||
let original_file = call_id.as_file().original_file(db);
|
let original_file = call_id.as_file().original_file(db);
|
||||||
|
|
||||||
let krate = db.relevant_crates(original_file).get(0)?.clone();
|
let krate = *db.relevant_crates(original_file).get(0)?;
|
||||||
db.crate_graph()[krate].env.get(key)
|
db.crate_graph()[krate].env.get(key)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -447,7 +447,7 @@ mod tests {
|
||||||
file_id: file_id.into(),
|
file_id: file_id.into(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let id: MacroCallId = db.intern_eager_expansion(eager.into()).into();
|
let id: MacroCallId = db.intern_eager_expansion(eager).into();
|
||||||
id.as_file()
|
id.as_file()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -96,7 +96,7 @@ fn try_extend_selection(
|
||||||
return Some(node.text_range());
|
return Some(node.text_range());
|
||||||
}
|
}
|
||||||
|
|
||||||
let node = shallowest_node(&node.into());
|
let node = shallowest_node(&node);
|
||||||
|
|
||||||
if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
|
if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
|
||||||
if let Some(range) = extend_list_item(&node) {
|
if let Some(range) = extend_list_item(&node) {
|
||||||
|
|
|
@ -303,8 +303,7 @@ pub fn load_extern_resources(
|
||||||
if message.target.kind.contains(&"proc-macro".to_string()) {
|
if message.target.kind.contains(&"proc-macro".to_string()) {
|
||||||
let package_id = message.package_id;
|
let package_id = message.package_id;
|
||||||
// Skip rmeta file
|
// Skip rmeta file
|
||||||
if let Some(filename) =
|
if let Some(filename) = message.filenames.iter().find(|name| is_dylib(name))
|
||||||
message.filenames.iter().filter(|name| is_dylib(name)).next()
|
|
||||||
{
|
{
|
||||||
res.proc_dylib_paths.insert(package_id, filename.clone());
|
res.proc_dylib_paths.insert(package_id, filename.clone());
|
||||||
}
|
}
|
||||||
|
|
|
@ -184,7 +184,7 @@ impl WorldState {
|
||||||
let mut analysis_host = AnalysisHost::new(lru_capacity);
|
let mut analysis_host = AnalysisHost::new(lru_capacity);
|
||||||
analysis_host.apply_change(change);
|
analysis_host.apply_change(change);
|
||||||
WorldState {
|
WorldState {
|
||||||
config: config,
|
config,
|
||||||
roots: folder_roots,
|
roots: folder_roots,
|
||||||
workspaces: Arc::new(workspaces),
|
workspaces: Arc::new(workspaces),
|
||||||
analysis_host,
|
analysis_host,
|
||||||
|
|
Loading…
Reference in a new issue