clippy::useless_conversion

This commit is contained in:
Maan2003 2021-06-13 09:25:55 +05:30
parent c9b4ac5be4
commit 6cc6dee9e9
No known key found for this signature in database
GPG key ID: E9AF024BA63C70ED
9 changed files with 16 additions and 22 deletions

View file

@ -426,7 +426,7 @@ impl<'a> Printer<'a> {
w!(self, " {{"); w!(self, " {{");
self.indented(|this| { self.indented(|this| {
for item in &**items { for item in &**items {
this.print_mod_item((*item).into()); this.print_mod_item(*item);
} }
}); });
wln!(self, "}}"); wln!(self, "}}");

View file

@ -736,8 +736,7 @@ fn macro_call_as_call_id(
db.upcast(), db.upcast(),
krate, krate,
MacroCallKind::FnLike { ast_id: call.ast_id, fragment }, MacroCallKind::FnLike { ast_id: call.ast_id, fragment },
) ))
.into())
}; };
Ok(res) Ok(res)
} }
@ -765,8 +764,7 @@ fn derive_macro_as_call_id(
derive_name: last_segment.to_string(), derive_name: last_segment.to_string(),
derive_attr_index: derive_attr.ast_index, derive_attr_index: derive_attr.ast_index,
}, },
) );
.into();
Ok(res) Ok(res)
} }
@ -804,7 +802,6 @@ fn attr_macro_as_call_id(
attr_args: arg, attr_args: arg,
invoc_attr_index: macro_attr.id.ast_index, invoc_attr_index: macro_attr.id.ast_index,
}, },
) );
.into();
Ok(res) Ok(res)
} }

View file

@ -325,7 +325,7 @@ impl TestDB {
for diag in source_map.diagnostics() { for diag in source_map.diagnostics() {
let (ptr, message): (InFile<SyntaxNodePtr>, &str) = match diag { let (ptr, message): (InFile<SyntaxNodePtr>, &str) = match diag {
BodyDiagnostic::InactiveCode { node, .. } => { BodyDiagnostic::InactiveCode { node, .. } => {
(node.clone().map(|it| it.into()), "InactiveCode") (node.clone().map(|it| it), "InactiveCode")
} }
BodyDiagnostic::MacroError { node, message } => { BodyDiagnostic::MacroError { node, message } => {
(node.clone().map(|it| it.into()), message.as_str()) (node.clone().map(|it| it.into()), message.as_str())

View file

@ -325,7 +325,7 @@ $0
}, },
}; };
let id: MacroCallId = db.intern_macro(loc).into(); let id: MacroCallId = db.intern_macro(loc);
let parsed = db.parse_or_expand(id.as_file()).unwrap(); let parsed = db.parse_or_expand(id.as_file()).unwrap();
// FIXME text() for syntax nodes parsed from token tree looks weird // FIXME text() for syntax nodes parsed from token tree looks weird

View file

@ -430,7 +430,7 @@ fn include_expand(
) -> ExpandResult<Option<ExpandedEager>> { ) -> ExpandResult<Option<ExpandedEager>> {
let res = (|| { let res = (|| {
let path = parse_string(tt)?; let path = parse_string(tt)?;
let file_id = relative_file(db, arg_id.into(), &path, false)?; let file_id = relative_file(db, arg_id, &path, false)?;
let subtree = parse_to_token_tree(&db.file_text(file_id)) let subtree = parse_to_token_tree(&db.file_text(file_id))
.ok_or_else(|| mbe::ExpandError::ConversionError)? .ok_or_else(|| mbe::ExpandError::ConversionError)?
@ -480,7 +480,7 @@ fn include_str_expand(
// it's unusual to `include_str!` a Rust file), but we can return an empty string. // it's unusual to `include_str!` a Rust file), but we can return an empty string.
// Ideally, we'd be able to offer a precise expansion if the user asks for macro // Ideally, we'd be able to offer a precise expansion if the user asks for macro
// expansion. // expansion.
let file_id = match relative_file(db, arg_id.into(), &path, true) { let file_id = match relative_file(db, arg_id, &path, true) {
Ok(file_id) => file_id, Ok(file_id) => file_id,
Err(_) => { Err(_) => {
return ExpandResult::ok(Some(ExpandedEager::new(quote!("")))); return ExpandResult::ok(Some(ExpandedEager::new(quote!(""))));
@ -598,7 +598,7 @@ mod tests {
}, },
}; };
let id: MacroCallId = db.intern_macro(loc).into(); let id: MacroCallId = db.intern_macro(loc);
id.as_file() id.as_file()
} }
Either::Right(expander) => { Either::Right(expander) => {
@ -635,7 +635,7 @@ mod tests {
kind: MacroCallKind::FnLike { ast_id: call_id, fragment }, kind: MacroCallKind::FnLike { ast_id: call_id, fragment },
}; };
let id: MacroCallId = db.intern_macro(loc).into(); let id: MacroCallId = db.intern_macro(loc);
id.as_file() id.as_file()
} }
}; };

View file

@ -57,7 +57,7 @@ impl TokenExpander {
// We store the result in salsa db to prevent non-deterministic behavior in // We store the result in salsa db to prevent non-deterministic behavior in
// some proc-macro implementation // some proc-macro implementation
// See #4315 for details // See #4315 for details
db.expand_proc_macro(id.into()).into() db.expand_proc_macro(id).into()
} }
} }
} }

View file

@ -128,7 +128,7 @@ pub fn expand_eager_macro(
}), }),
kind: MacroCallKind::FnLike { ast_id: call_id, fragment: FragmentKind::Expr }, kind: MacroCallKind::FnLike { ast_id: call_id, fragment: FragmentKind::Expr },
}); });
let arg_file_id: MacroCallId = arg_id.into(); let arg_file_id: MacroCallId = arg_id;
let parsed_args = let parsed_args =
diagnostic_sink.result(mbe::token_tree_to_syntax_node(&parsed_args, FragmentKind::Expr))?.0; diagnostic_sink.result(mbe::token_tree_to_syntax_node(&parsed_args, FragmentKind::Expr))?.0;
@ -182,8 +182,7 @@ fn lazy_expand(
db, db,
krate, krate,
MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id), fragment }, MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id), fragment },
) );
.into();
let err = db.macro_expand_error(id); let err = db.macro_expand_error(id);
let value = db.parse_or_expand(id.as_file()).map(|node| InFile::new(id.as_file(), node)); let value = db.parse_or_expand(id.as_file()).map(|node| InFile::new(id.as_file(), node));
@ -216,8 +215,7 @@ fn eager_macro_recur(
def, def,
macro_resolver, macro_resolver,
diagnostic_sink, diagnostic_sink,
)? )?;
.into();
db.parse_or_expand(id.as_file()) db.parse_or_expand(id.as_file())
.expect("successful macro expansion should be parseable") .expect("successful macro expansion should be parseable")
.clone_for_update() .clone_for_update()

View file

@ -202,7 +202,7 @@ impl<T: HasInterner<Interner = Interner> + Fold<Interner>> TyBuilder<Binders<T>>
impl TyBuilder<Binders<Ty>> { impl TyBuilder<Binders<Ty>> {
pub fn def_ty(db: &dyn HirDatabase, def: TyDefId) -> TyBuilder<Binders<Ty>> { pub fn def_ty(db: &dyn HirDatabase, def: TyDefId) -> TyBuilder<Binders<Ty>> {
TyBuilder::subst_binders(db.ty(def.into())) TyBuilder::subst_binders(db.ty(def))
} }
pub fn impl_self_ty(db: &dyn HirDatabase, def: hir_def::ImplId) -> TyBuilder<Binders<Ty>> { pub fn impl_self_ty(db: &dyn HirDatabase, def: hir_def::ImplId) -> TyBuilder<Binders<Ty>> {

View file

@ -140,8 +140,7 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) -> Result<()> {
.expect("should be prefixed by what it does") .expect("should be prefixed by what it does")
.strip_suffix(suffix_to_strip) .strip_suffix(suffix_to_strip)
.map(unescape) .map(unescape)
.expect("should be suffixed by comma") .expect("should be suffixed by comma");
.into();
} }
} }
clippy_lints.sort_by(|lint, lint2| lint.id.cmp(&lint2.id)); clippy_lints.sort_by(|lint, lint2| lint.id.cmp(&lint2.id));