Some clippy fixes for 1.36

This commit is contained in:
Jeremy Kolb 2019-07-04 13:26:44 -04:00
parent c6a6e43372
commit 4ad9e986ad
31 changed files with 62 additions and 70 deletions

View file

@ -130,7 +130,7 @@ fn initialize(
Ok(RawMessage::Notification(n)) => {
n.cast::<Initialized>().map_err(|_| "expected initialized notification")?;
}
_ => Err(format!("expected initialized notification"))?,
_ => Err("expected initialized notification".to_string())?,
}
Ok(params)
}

View file

@ -39,13 +39,13 @@ fn collect_path_segments_raw<'a>(
// We need to reverse only the new added segments
let only_new_segments = segments.split_at_mut(oldlen).1;
only_new_segments.reverse();
return Some(segments.len() - oldlen);
Some(segments.len() - oldlen)
}
fn fmt_segments(segments: &[SmolStr]) -> String {
let mut buf = String::new();
fmt_segments_raw(segments, &mut buf);
return buf;
buf
}
fn fmt_segments_raw(segments: &[SmolStr], buf: &mut String) {
@ -61,7 +61,7 @@ fn fmt_segments_raw(segments: &[SmolStr], buf: &mut String) {
// Returns the numeber of common segments.
fn compare_path_segments(left: &[SmolStr], right: &[&ast::PathSegment]) -> usize {
return left.iter().zip(right).filter(|(l, r)| compare_path_segment(l, r)).count();
left.iter().zip(right).filter(|(l, r)| compare_path_segment(l, r)).count()
}
fn compare_path_segment(a: &SmolStr, b: &ast::PathSegment) -> bool {
@ -320,7 +320,7 @@ fn walk_use_tree_for_best_action<'a>(
// We remove the segments added
current_path_segments.truncate(prev_len);
return action;
action
}
fn best_action_for_target<'b, 'a: 'b>(
@ -339,7 +339,7 @@ fn best_action_for_target<'b, 'a: 'b>(
});
match best_action {
Some(action) => return action,
Some(action) => action,
None => {
// We have no action and no UseItem was found in container so we find
// another item and we use it as anchor.
@ -350,7 +350,7 @@ fn best_action_for_target<'b, 'a: 'b>(
.find(|n| n.range().start() < anchor.range().start())
.or_else(|| Some(anchor));
return ImportAction::add_new_use(anchor, false);
ImportAction::add_new_use(anchor, false)
}
}
}

View file

@ -112,6 +112,6 @@ fn read_stdin() -> Result<String> {
}
fn rsplit_at_char(s: &str, c: char) -> Result<(&str, &str)> {
let idx = s.rfind(":").ok_or_else(|| format!("no `{}` in {}", c, s))?;
let idx = s.rfind(':').ok_or_else(|| format!("no `{}` in {}", c, s))?;
Ok((&s[..idx], &s[idx + 1..]))
}

View file

@ -139,7 +139,7 @@ impl CrateGraph {
}
pub fn iter<'a>(&'a self) -> impl Iterator<Item = CrateId> + 'a {
self.arena.keys().map(|it| *it)
self.arena.keys().copied()
}
pub fn crate_root(&self, crate_id: CrateId) -> FileId {
@ -191,7 +191,7 @@ impl CrateGraph {
return true;
}
}
return false;
false
}
}

View file

@ -74,7 +74,7 @@ pub(crate) fn documentation_query(
DocDef::Module(it) => docs_from_ast(&*it.declaration_source(db)?.ast),
DocDef::StructField(it) => match it.source(db).ast {
FieldSource::Named(named) => docs_from_ast(&*named),
FieldSource::Pos(..) => return None,
FieldSource::Pos(..) => None,
},
DocDef::Struct(it) => docs_from_ast(&*it.source(db).ast),
DocDef::Enum(it) => docs_from_ast(&*it.source(db).ast),

View file

@ -150,7 +150,7 @@ impl BodySourceMap {
}
pub(crate) fn field_syntax(&self, expr: ExprId, field: usize) -> AstPtr<ast::NamedField> {
self.field_map[&(expr, field)].clone()
self.field_map[&(expr, field)]
}
}
@ -471,15 +471,15 @@ impl Pat {
match self {
Pat::Range { .. } | Pat::Lit(..) | Pat::Path(..) | Pat::Wild | Pat::Missing => {}
Pat::Bind { subpat, .. } => {
subpat.iter().map(|pat| *pat).for_each(f);
subpat.iter().copied().for_each(f);
}
Pat::Tuple(args) | Pat::TupleStruct { args, .. } => {
args.iter().map(|pat| *pat).for_each(f);
args.iter().copied().for_each(f);
}
Pat::Ref { pat, .. } => f(*pat),
Pat::Slice { prefix, rest, suffix } => {
let total_iter = prefix.iter().chain(rest.iter()).chain(suffix.iter());
total_iter.map(|pat| *pat).for_each(f);
total_iter.copied().for_each(f);
}
Pat::Struct { args, .. } => {
args.iter().map(|f| f.pat).for_each(f);

View file

@ -72,7 +72,7 @@ impl ExprScopes {
}
pub(crate) fn scope_for(&self, expr: ExprId) -> Option<ScopeId> {
self.scope_by_expr.get(&expr).map(|&scope| scope)
self.scope_by_expr.get(&expr).copied()
}
pub(crate) fn scope_by_expr(&self) -> &FxHashMap<ExprId, ScopeId> {

View file

@ -122,7 +122,7 @@ impl LangItems {
module: Module,
) {
if let Some(module_lang_items) = db.module_lang_items(module) {
self.items.extend(module_lang_items.items.iter().map(|(k, v)| (k.clone(), v.clone())))
self.items.extend(module_lang_items.items.iter().map(|(k, v)| (k.clone(), *v)))
}
// Look for lang items in the children
@ -142,7 +142,7 @@ impl LangItems {
{
let node = item.source(db).ast;
if let Some(lang_item_name) = lang_item_name(&*node) {
self.items.entry(lang_item_name).or_insert(constructor(item));
self.items.entry(lang_item_name).or_insert_with(|| constructor(item));
}
}
}

View file

@ -76,7 +76,7 @@ impl MockDatabase {
pub fn diagnostics(&self) -> String {
let mut buf = String::from("\n");
let mut files: Vec<FileId> = self.files.values().map(|&it| it).collect();
let mut files: Vec<FileId> = self.files.values().copied().collect();
files.sort();
for file in files {
let module = crate::source_binder::module_from_file_id(self, file).unwrap();

View file

@ -227,10 +227,8 @@ where
.items
.iter()
.map(|(name, res)| (name.clone(), Either::A(res.clone())));
let macros = scope
.macros
.iter()
.map(|(name, res)| (name.clone(), Either::B(res.clone())));
let macros =
scope.macros.iter().map(|(name, res)| (name.clone(), Either::B(*res)));
let all = items.chain(macros).collect::<Vec<_>>();
self.update(module_id, Some(import_id), &all);
@ -243,10 +241,8 @@ where
.items
.iter()
.map(|(name, res)| (name.clone(), Either::A(res.clone())));
let macros = scope
.macros
.iter()
.map(|(name, res)| (name.clone(), Either::B(res.clone())));
let macros =
scope.macros.iter().map(|(name, res)| (name.clone(), Either::B(*res)));
let all = items.chain(macros).collect::<Vec<_>>();
@ -651,7 +647,7 @@ fn resolve_submodule(
candidates.push(file_dir_mod.clone());
};
let sr = db.source_root(source_root_id);
let mut points_to = candidates.into_iter().filter_map(|path| sr.files.get(&path)).map(|&it| it);
let mut points_to = candidates.into_iter().filter_map(|path| sr.files.get(&path)).copied();
// FIXME: handle ambiguity
match points_to.next() {
Some(file_id) => Ok(file_id),

View file

@ -272,7 +272,7 @@ impl Scope {
},
Scope::ImplBlockScope(i) => {
if name.as_known_name() == Some(KnownName::SelfType) {
PerNs::types(Resolution::SelfType(i.clone()))
PerNs::types(Resolution::SelfType(*i))
} else {
PerNs::none()
}
@ -317,7 +317,7 @@ impl Scope {
}
}
Scope::ImplBlockScope(i) => {
f(Name::self_type(), PerNs::types(Resolution::SelfType(i.clone())));
f(Name::self_type(), PerNs::types(Resolution::SelfType(*i)));
}
Scope::ExprScope(e) => {
e.expr_scopes.entries(e.scope_id).iter().for_each(|e| {

View file

@ -116,16 +116,16 @@ pub struct InferenceResult {
impl InferenceResult {
pub fn method_resolution(&self, expr: ExprId) -> Option<Function> {
self.method_resolutions.get(&expr).map(|it| *it)
self.method_resolutions.get(&expr).copied()
}
pub fn field_resolution(&self, expr: ExprId) -> Option<StructField> {
self.field_resolutions.get(&expr).map(|it| *it)
self.field_resolutions.get(&expr).copied()
}
pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option<ImplItem> {
self.assoc_resolutions.get(&id.into()).map(|it| *it)
self.assoc_resolutions.get(&id.into()).copied()
}
pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<ImplItem> {
self.assoc_resolutions.get(&id.into()).map(|it| *it)
self.assoc_resolutions.get(&id.into()).copied()
}
pub(crate) fn add_diagnostics(
&self,
@ -239,8 +239,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
&self.resolver,
type_ref,
);
let ty = self.insert_type_vars(ty);
ty
self.insert_type_vars(ty)
}
fn unify_substs(&mut self, substs1: &Substs, substs2: &Substs, depth: usize) -> bool {
@ -973,8 +972,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
for (arg_pat, arg_type) in args.iter().zip(arg_types.iter()) {
let expected = if let Some(type_ref) = arg_type {
let ty = self.make_ty(type_ref);
ty
self.make_ty(type_ref)
} else {
Ty::Unknown
};

View file

@ -102,7 +102,7 @@ impl<T> Canonicalized<T> {
ty.fold(&mut |ty| match ty {
Ty::Bound(idx) => {
if (idx as usize) < self.free_vars.len() {
Ty::Infer(self.free_vars[idx as usize].clone())
Ty::Infer(self.free_vars[idx as usize])
} else {
Ty::Bound(idx)
}
@ -120,7 +120,7 @@ impl<T> Canonicalized<T> {
let new_vars =
(0..solution.num_vars).map(|_| ctx.new_type_var()).collect::<Vec<_>>().into();
for (i, ty) in solution.value.into_iter().enumerate() {
let var = self.free_vars[i].clone();
let var = self.free_vars[i];
ctx.unify(&Ty::Infer(var), &ty.subst_bound_vars(&new_vars));
}
}

View file

@ -72,8 +72,8 @@ impl salsa::ParallelDatabase for RootDatabase {
fn snapshot(&self) -> salsa::Snapshot<RootDatabase> {
salsa::Snapshot::new(RootDatabase {
runtime: self.runtime.snapshot(self),
last_gc: self.last_gc.clone(),
last_gc_check: self.last_gc_check.clone(),
last_gc: self.last_gc,
last_gc_check: self.last_gc_check,
})
}
}

View file

@ -25,7 +25,7 @@ pub fn file_structure(file: &SourceFile) -> Vec<StructureNode> {
match event {
WalkEvent::Enter(node) => {
if let Some(mut symbol) = structure_node(node) {
symbol.parent = stack.last().map(|&n| n);
symbol.parent = stack.last().copied();
stack.push(res.len());
res.push(symbol);
}

View file

@ -137,7 +137,7 @@ impl<'a> Edits<'a> {
Step::Newline(n) => n,
Step::Utf16Char(r) => r.end(),
};
let res = match &mut self.current {
match &mut self.current {
Some(edit) => {
if step_pos <= edit.delete.start() {
NextSteps::Use
@ -155,8 +155,7 @@ impl<'a> Edits<'a> {
}
}
None => NextSteps::Use,
};
res
}
}
fn translate_range(&self, range: TextRange) -> TextRange {

View file

@ -87,7 +87,7 @@ pub(crate) fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol>
let mut files = Vec::new();
for &root in db.local_roots().iter() {
let sr = db.source_root(root);
files.extend(sr.files.values().map(|&it| it))
files.extend(sr.files.values().copied())
}
let snap = Snap(db.snapshot());

View file

@ -171,7 +171,7 @@ impl Conv for ra_ide_api::Documentation {
fn conv(self) -> Documentation {
Documentation::MarkupContent(MarkupContent {
kind: MarkupKind::Markdown,
value: crate::markdown::mark_fenced_blocks_as_rust(self.as_str()).into(),
value: crate::markdown::mark_fenced_blocks_as_rust(self.as_str()),
})
}
}

View file

@ -5,9 +5,7 @@ pub(crate) mod pending_requests;
use std::{error::Error, fmt, path::PathBuf, sync::Arc, time::Instant};
use crossbeam_channel::{select, unbounded, Receiver, RecvError, Sender};
use gen_lsp_server::{
handle_shutdown, ErrorCode, RawMessage, RawNotification, RawRequest, RawResponse,
};
use gen_lsp_server::{handle_shutdown, ErrorCode, RawMessage, RawNotification, RawRequest, RawResponse};
use lsp_types::NumberOrString;
use ra_ide_api::{Canceled, FileId, LibraryData};
use ra_prof::profile;
@ -398,7 +396,8 @@ fn on_notification(
Ok(mut params) => {
let uri = params.text_document.uri;
let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?;
let text = params.content_changes.pop().ok_or_else(|| format!("empty changes"))?.text;
let text =
params.content_changes.pop().ok_or_else(|| "empty changes".to_string())?.text;
state.vfs.write().change_file_overlay(path.as_path(), text);
return Ok(());
}

View file

@ -498,7 +498,7 @@ fn expand_tt(
tt::Leaf::from(tt::Ident { text: ident.text.clone(), id: TokenId::unspecified() })
.into()
}
crate::Leaf::Punct(punct) => tt::Leaf::from(punct.clone()).into(),
crate::Leaf::Punct(punct) => tt::Leaf::from(*punct).into(),
crate::Leaf::Var(v) => {
if v.text == "crate" {
// FIXME: Properly handle $crate token

View file

@ -56,7 +56,7 @@ fn parse_subtree(tt: &tt::Subtree, transcriber: bool) -> Result<crate::Subtree,
}
}
tt::Leaf::Punct(punct) => crate::Leaf::from(*punct).into(),
tt::Leaf::Ident(tt::Ident { text, id: _ }) => {
tt::Leaf::Ident(tt::Ident { text, .. }) => {
crate::Leaf::from(crate::Ident { text: text.clone() }).into()
}
tt::Leaf::Literal(tt::Literal { text }) => {

View file

@ -78,7 +78,7 @@ impl<'a> SubtreeTokenSource<'a> {
}
}
return cached[pos].clone();
cached[pos].clone()
}
}

View file

@ -107,7 +107,7 @@ pub fn token_tree_to_ast_item_list(tt: &tt::Subtree) -> TreeArc<ast::SourceFile>
impl TokenMap {
pub fn relative_range_of(&self, tt: tt::TokenId) -> Option<TextRange> {
let idx = tt.0 as usize;
self.tokens.get(idx).map(|&it| it)
self.tokens.get(idx).copied()
}
fn alloc(&mut self, relative_range: TextRange) -> tt::TokenId {

View file

@ -171,14 +171,14 @@ impl<'a> TtCursor<'a> {
}
fn eat_punct3(&mut self, p: &tt::Punct) -> Option<SmallVec<[tt::Punct; 3]>> {
let sec = self.eat_punct()?.clone();
let third = self.eat_punct()?.clone();
Some(smallvec![p.clone(), sec, third])
let sec = *self.eat_punct()?;
let third = *self.eat_punct()?;
Some(smallvec![*p, sec, third])
}
fn eat_punct2(&mut self, p: &tt::Punct) -> Option<SmallVec<[tt::Punct; 3]>> {
let sec = self.eat_punct()?.clone();
Some(smallvec![p.clone(), sec])
let sec = *self.eat_punct()?;
Some(smallvec![*p, sec])
}
fn eat_multi_char_punct<'b, I>(
@ -251,7 +251,7 @@ impl<'a> TtCursor<'a> {
// So we by pass that check here.
let mut peekable = TokenPeek::new(self.subtree.token_trees[self.pos..].iter());
let puncts = self.eat_multi_char_punct(punct, &mut peekable);
let puncts = puncts.unwrap_or_else(|| smallvec![punct.clone()]);
let puncts = puncts.unwrap_or_else(|| smallvec![*punct]);
Some(crate::Separator::Puncts(puncts))
}

View file

@ -191,7 +191,7 @@ impl ProjectWorkspace {
}
}
let libstd = sysroot.std().and_then(|it| sysroot_crates.get(&it).map(|&it| it));
let libstd = sysroot.std().and_then(|it| sysroot_crates.get(&it).copied());
let mut pkg_to_lib_crate = FxHashMap::default();
let mut pkg_crates = FxHashMap::default();

View file

@ -94,7 +94,7 @@ impl SysrootCrate {
self.root(sysroot).parent().unwrap()
}
pub fn deps<'a>(self, sysroot: &'a Sysroot) -> impl Iterator<Item = SysrootCrate> + 'a {
sysroot.crates[self].deps.iter().map(|&it| it)
sysroot.crates[self].deps.iter().copied()
}
}

View file

@ -260,7 +260,7 @@ impl ast::Literal {
.iter()
.find(|&s| text.ends_with(s))
.map(|&suf| SmolStr::new(suf));
LiteralKind::FloatNumber { suffix: suffix }
LiteralKind::FloatNumber { suffix }
}
STRING | RAW_STRING => LiteralKind::String,
T![true] | T![false] => LiteralKind::Bool,

View file

@ -21,8 +21,8 @@ pub struct CheckReparse {
impl CheckReparse {
pub fn from_data(data: &[u8]) -> Option<Self> {
const PREFIX: &'static str = "fn main(){\n\t";
const SUFFIX: &'static str = "\n}";
const PREFIX: &str = "fn main(){\n\t";
const SUFFIX: &str = "\n}";
let data = str::from_utf8(data).ok()?;
let mut lines = data.lines();

View file

@ -28,7 +28,7 @@ pub(crate) struct TextTokenSource<'t> {
impl<'t> TokenSource for TextTokenSource<'t> {
fn current(&self) -> PToken {
return self.curr.0;
self.curr.0
}
fn lookahead_nth(&self, n: usize) -> PToken {

View file

@ -357,7 +357,7 @@ impl SyntaxNode {
// `range` private afterwards
let mut ptr = SyntaxNodePtr::new(self);
ptr.range = TextRange::offset_len(ptr.range().start(), len);
return ptr.to_node(file.syntax()).to_owned();
ptr.to_node(file.syntax()).to_owned()
}
fn position_of_child(&self, child: SyntaxElement) -> usize {

View file

@ -173,7 +173,7 @@ fn scan_escape(first_char: char, chars: &mut Chars<'_>, mode: Mode) -> Result<ch
}
break std::char::from_u32(value).ok_or_else(|| {
if value > 0x10FFFF {
if value > 0x0010_FFFF {
EscapeError::OutOfRangeUnicodeEscape
} else {
EscapeError::LoneSurrogateUnicodeEscape