mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-13 13:48:50 +00:00
Clippy-fix explicit auto-deref
Seems like these can be safely fixed. With one, I was particularly surprised -- `Some(pats) => &**pats,` in body.rs? ``` cargo clippy --fix -- -A clippy::all -D clippy::explicit_auto_deref ```
This commit is contained in:
parent
f1785f7a21
commit
e341e996f7
20 changed files with 22 additions and 22 deletions
|
@ -128,7 +128,7 @@ impl fmt::Display for CrateName {
|
|||
impl ops::Deref for CrateName {
|
||||
type Target = str;
|
||||
fn deref(&self) -> &str {
|
||||
&*self.0
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -211,7 +211,7 @@ impl fmt::Display for CrateDisplayName {
|
|||
impl ops::Deref for CrateDisplayName {
|
||||
type Target = str;
|
||||
fn deref(&self) -> &str {
|
||||
&*self.crate_name
|
||||
&self.crate_name
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -77,7 +77,7 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug {
|
|||
fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
|
||||
let _p = profile::span("parse_query").detail(|| format!("{:?}", file_id));
|
||||
let text = db.file_text(file_id);
|
||||
SourceFile::parse(&*text)
|
||||
SourceFile::parse(&text)
|
||||
}
|
||||
|
||||
/// We don't want to give HIR knowledge of source roots, hence we extract these
|
||||
|
|
|
@ -372,7 +372,7 @@ impl Body {
|
|||
/// Retrieves all ident patterns this pattern shares the ident with.
|
||||
pub fn ident_patterns_for<'slf>(&'slf self, pat: &'slf PatId) -> &'slf [PatId] {
|
||||
match self.or_pats.get(pat) {
|
||||
Some(pats) => &**pats,
|
||||
Some(pats) => pats,
|
||||
None => std::slice::from_ref(pat),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,7 +47,7 @@ pub struct ScopeData {
|
|||
impl ExprScopes {
|
||||
pub(crate) fn expr_scopes_query(db: &dyn DefDatabase, def: DefWithBodyId) -> Arc<ExprScopes> {
|
||||
let body = db.body(def);
|
||||
let mut scopes = ExprScopes::new(&*body);
|
||||
let mut scopes = ExprScopes::new(&body);
|
||||
scopes.shrink_to_fit();
|
||||
Arc::new(scopes)
|
||||
}
|
||||
|
|
|
@ -1246,7 +1246,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
|
||||
fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
|
||||
let mut cache = self.s2d_cache.borrow_mut();
|
||||
let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache };
|
||||
let mut ctx = SourceToDefCtx { db: self.db, cache: &mut cache };
|
||||
f(&mut ctx)
|
||||
}
|
||||
|
||||
|
|
|
@ -394,7 +394,7 @@ fn inline(
|
|||
// Inline parameter expressions or generate `let` statements depending on whether inlining works or not.
|
||||
for ((pat, param_ty, _), usages, expr) in izip!(params, param_use_nodes, arguments).rev() {
|
||||
// izip confuses RA due to our lack of hygiene info currently losing us type info causing incorrect errors
|
||||
let usages: &[ast::PathExpr] = &*usages;
|
||||
let usages: &[ast::PathExpr] = &usages;
|
||||
let expr: &ast::Expr = expr;
|
||||
|
||||
let insert_let_stmt = || {
|
||||
|
|
|
@ -165,7 +165,7 @@ pub trait LineIndexDatabase: base_db::SourceDatabase {
|
|||
|
||||
fn line_index(db: &dyn LineIndexDatabase, file_id: FileId) -> Arc<LineIndex> {
|
||||
let text = db.file_text(file_id);
|
||||
Arc::new(LineIndex::new(&*text))
|
||||
Arc::new(LineIndex::new(&text))
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
|
|
|
@ -102,7 +102,7 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur
|
|||
for file_id in files {
|
||||
let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
|
||||
|
||||
let expected = extract_annotations(&*db.file_text(file_id));
|
||||
let expected = extract_annotations(&db.file_text(file_id));
|
||||
let mut actual = diagnostics
|
||||
.into_iter()
|
||||
.map(|d| {
|
||||
|
|
|
@ -459,7 +459,7 @@ mod tests {
|
|||
#[track_caller]
|
||||
pub(super) fn check_with_config(config: InlayHintsConfig, ra_fixture: &str) {
|
||||
let (analysis, file_id) = fixture::file(ra_fixture);
|
||||
let mut expected = extract_annotations(&*analysis.file_text(file_id).unwrap());
|
||||
let mut expected = extract_annotations(&analysis.file_text(file_id).unwrap());
|
||||
let inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap();
|
||||
let actual = inlay_hints
|
||||
.into_iter()
|
||||
|
|
|
@ -463,7 +463,7 @@ fn main() {
|
|||
}
|
||||
"#;
|
||||
let (analysis, file_id) = fixture::file(fixture);
|
||||
let expected = extract_annotations(&*analysis.file_text(file_id).unwrap());
|
||||
let expected = extract_annotations(&analysis.file_text(file_id).unwrap());
|
||||
let inlay_hints = analysis
|
||||
.inlay_hints(
|
||||
&InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG },
|
||||
|
|
|
@ -286,7 +286,7 @@ impl BridgeState<'_> {
|
|||
BRIDGE_STATE.with(|state| {
|
||||
state.replace(BridgeState::InUse, |mut state| {
|
||||
// FIXME(#52812) pass `f` directly to `replace` when `RefMutL` is gone
|
||||
f(&mut *state)
|
||||
f(&mut state)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
|
|
@ -301,7 +301,7 @@ impl BridgeState<'_> {
|
|||
BRIDGE_STATE.with(|state| {
|
||||
state.replace(BridgeState::InUse, |mut state| {
|
||||
// FIXME(#52812) pass `f` directly to `replace` when `RefMutL` is gone
|
||||
f(&mut *state)
|
||||
f(&mut state)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
|
|
@ -133,7 +133,7 @@ static FILTER: Lazy<RwLock<Filter>> = Lazy::new(Default::default);
|
|||
|
||||
fn with_profile_stack<T>(f: impl FnOnce(&mut ProfileStack) -> T) -> T {
|
||||
thread_local!(static STACK: RefCell<ProfileStack> = RefCell::new(ProfileStack::new()));
|
||||
STACK.with(|it| f(&mut *it.borrow_mut()))
|
||||
STACK.with(|it| f(&mut it.borrow_mut()))
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Debug)]
|
||||
|
|
|
@ -427,7 +427,7 @@ impl CargoWorkspace {
|
|||
}
|
||||
|
||||
pub fn package_flag(&self, package: &PackageData) -> String {
|
||||
if self.is_unique(&*package.name) {
|
||||
if self.is_unique(&package.name) {
|
||||
package.name.clone()
|
||||
} else {
|
||||
format!("{}:{}", package.name, package.version)
|
||||
|
|
|
@ -40,7 +40,7 @@ impl ops::Deref for ManifestPath {
|
|||
type Target = AbsPath;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&*self.file
|
||||
&self.file
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -253,7 +253,7 @@ impl LsifManager<'_> {
|
|||
};
|
||||
let result = folds
|
||||
.into_iter()
|
||||
.map(|it| to_proto::folding_range(&*text, &line_index, false, it))
|
||||
.map(|it| to_proto::folding_range(&text, &line_index, false, it))
|
||||
.collect();
|
||||
let folding_id = self.add_vertex(lsif::Vertex::FoldingRangeResult { result });
|
||||
self.add_edge(lsif::Edge::FoldingRange(lsif::EdgeData {
|
||||
|
|
|
@ -2178,7 +2178,7 @@ fn manual(fields: &[(&'static str, &'static str, &[&str], &str)]) -> String {
|
|||
.iter()
|
||||
.map(|(field, _ty, doc, default)| {
|
||||
let name = format!("rust-analyzer.{}", field.replace('_', "."));
|
||||
let doc = doc_comment_to_string(*doc);
|
||||
let doc = doc_comment_to_string(doc);
|
||||
if default.contains('\n') {
|
||||
format!(
|
||||
r#"[[{}]]{}::
|
||||
|
|
|
@ -899,7 +899,7 @@ pub(crate) fn handle_folding_range(
|
|||
let line_folding_only = snap.config.line_folding_only();
|
||||
let res = folds
|
||||
.into_iter()
|
||||
.map(|it| to_proto::folding_range(&*text, &line_index, line_folding_only, it))
|
||||
.map(|it| to_proto::folding_range(&text, &line_index, line_folding_only, it))
|
||||
.collect();
|
||||
Ok(Some(res))
|
||||
}
|
||||
|
@ -979,7 +979,7 @@ pub(crate) fn handle_rename(
|
|||
let position = from_proto::file_position(&snap, params.text_document_position)?;
|
||||
|
||||
let mut change =
|
||||
snap.analysis.rename(position, &*params.new_name)?.map_err(to_proto::rename_error)?;
|
||||
snap.analysis.rename(position, ¶ms.new_name)?.map_err(to_proto::rename_error)?;
|
||||
|
||||
// this is kind of a hack to prevent double edits from happening when moving files
|
||||
// When a module gets renamed by renaming the mod declaration this causes the file to move
|
||||
|
|
|
@ -45,5 +45,5 @@ fn with_ctx(f: impl FnOnce(&mut Vec<String>)) {
|
|||
thread_local! {
|
||||
static CTX: RefCell<Vec<String>> = RefCell::new(Vec::new());
|
||||
}
|
||||
CTX.with(|ctx| f(&mut *ctx.borrow_mut()));
|
||||
CTX.with(|ctx| f(&mut ctx.borrow_mut()));
|
||||
}
|
||||
|
|
|
@ -190,7 +190,7 @@ impl<'a> Cursor<'a> {
|
|||
pub fn token_tree(self) -> Option<TokenTreeRef<'a>> {
|
||||
match self.entry() {
|
||||
Some(Entry::Leaf(tt)) => match tt {
|
||||
TokenTree::Leaf(leaf) => Some(TokenTreeRef::Leaf(leaf, *tt)),
|
||||
TokenTree::Leaf(leaf) => Some(TokenTreeRef::Leaf(leaf, tt)),
|
||||
TokenTree::Subtree(subtree) => Some(TokenTreeRef::Subtree(subtree, Some(tt))),
|
||||
},
|
||||
Some(Entry::Subtree(tt, subtree, _)) => Some(TokenTreeRef::Subtree(subtree, *tt)),
|
||||
|
|
Loading…
Reference in a new issue