mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 13:03:31 +00:00
Merge #10504
10504: Remove needless clone r=lnicola a=k-nasa ## Why Delete clones for efficiency ## What - I erased unnecessary clones Co-authored-by: k-nasa <htilcs1115@gmail.com>
This commit is contained in:
commit
64ca0f63bf
8 changed files with 10 additions and 10 deletions
|
@ -392,7 +392,7 @@ impl HasChildSource<LocalTypeParamId> for GenericDefId {
|
||||||
|
|
||||||
// For traits the first type index is `Self`, we need to add it before the other params.
|
// For traits the first type index is `Self`, we need to add it before the other params.
|
||||||
if let GenericDefId::TraitId(id) = *self {
|
if let GenericDefId::TraitId(id) = *self {
|
||||||
let trait_ref = id.lookup(db).source(db).value.clone();
|
let trait_ref = id.lookup(db).source(db).value;
|
||||||
let idx = idx_iter.next().unwrap();
|
let idx = idx_iter.next().unwrap();
|
||||||
params.insert(idx, Either::Right(trait_ref))
|
params.insert(idx, Either::Right(trait_ref))
|
||||||
}
|
}
|
||||||
|
|
|
@ -254,7 +254,7 @@ impl DocCommentToken {
|
||||||
let original_start = doc_token.text_range().start();
|
let original_start = doc_token.text_range().start();
|
||||||
let relative_comment_offset = offset - original_start - prefix_len;
|
let relative_comment_offset = offset - original_start - prefix_len;
|
||||||
|
|
||||||
sema.descend_into_macros_many(doc_token.clone()).into_iter().find_map(|t| {
|
sema.descend_into_macros_many(doc_token).into_iter().find_map(|t| {
|
||||||
let (node, descended_prefix_len) = match_ast! {
|
let (node, descended_prefix_len) = match_ast! {
|
||||||
match t {
|
match t {
|
||||||
ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?),
|
ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?),
|
||||||
|
|
|
@ -47,7 +47,7 @@ pub(crate) fn goto_definition(
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|token| {
|
.filter_map(|token| {
|
||||||
let parent = token.parent()?;
|
let parent = token.parent()?;
|
||||||
if let Some(tt) = ast::TokenTree::cast(parent.clone()) {
|
if let Some(tt) = ast::TokenTree::cast(parent) {
|
||||||
if let x @ Some(_) =
|
if let x @ Some(_) =
|
||||||
try_lookup_include_path(&sema, tt, token.clone(), position.file_id)
|
try_lookup_include_path(&sema, tt, token.clone(), position.file_id)
|
||||||
{
|
{
|
||||||
|
@ -77,7 +77,7 @@ fn try_lookup_include_path(
|
||||||
token: SyntaxToken,
|
token: SyntaxToken,
|
||||||
file_id: FileId,
|
file_id: FileId,
|
||||||
) -> Option<Vec<NavigationTarget>> {
|
) -> Option<Vec<NavigationTarget>> {
|
||||||
let token = ast::String::cast(token.clone())?;
|
let token = ast::String::cast(token)?;
|
||||||
let path = token.value()?.into_owned();
|
let path = token.value()?.into_owned();
|
||||||
let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
|
let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
|
||||||
let name = macro_call.path()?.segment()?.name_ref()?;
|
let name = macro_call.path()?.segment()?.name_ref()?;
|
||||||
|
|
|
@ -169,7 +169,7 @@ fn extraction_target(node: &SyntaxNode, selection_range: TextRange) -> Option<Fu
|
||||||
let expr = ast::Expr::cast(node.clone())?;
|
let expr = ast::Expr::cast(node.clone())?;
|
||||||
// A node got selected fully
|
// A node got selected fully
|
||||||
if node.text_range() == selection_range {
|
if node.text_range() == selection_range {
|
||||||
return FunctionBody::from_expr(expr.clone());
|
return FunctionBody::from_expr(expr);
|
||||||
}
|
}
|
||||||
|
|
||||||
node.ancestors().find_map(ast::Expr::cast).and_then(FunctionBody::from_expr)
|
node.ancestors().find_map(ast::Expr::cast).and_then(FunctionBody::from_expr)
|
||||||
|
|
|
@ -53,7 +53,7 @@ impl<'a> EnumRender<'a> {
|
||||||
}
|
}
|
||||||
None => (
|
None => (
|
||||||
hir::ModPath::from_segments(hir::PathKind::Plain, iter::once(name.clone())),
|
hir::ModPath::from_segments(hir::PathKind::Plain, iter::once(name.clone())),
|
||||||
hir::ModPath::from_segments(hir::PathKind::Plain, iter::once(name.clone())),
|
hir::ModPath::from_segments(hir::PathKind::Plain, iter::once(name)),
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -58,7 +58,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
|
||||||
let mut locals = FxHashMap::default();
|
let mut locals = FxHashMap::default();
|
||||||
ctx.sema.scope(field_list_parent.syntax()).process_all_names(&mut |name, def| {
|
ctx.sema.scope(field_list_parent.syntax()).process_all_names(&mut |name, def| {
|
||||||
if let hir::ScopeDef::Local(local) = def {
|
if let hir::ScopeDef::Local(local) = def {
|
||||||
locals.insert(name.clone(), local);
|
locals.insert(name, local);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
let missing_fields = ctx.sema.record_literal_missing_fields(&field_list_parent);
|
let missing_fields = ctx.sema.record_literal_missing_fields(&field_list_parent);
|
||||||
|
|
|
@ -100,7 +100,7 @@ impl LsifManager<'_> {
|
||||||
let doc_id = self.get_file_id(file_id);
|
let doc_id = self.get_file_id(file_id);
|
||||||
let line_index = self.db.line_index(file_id);
|
let line_index = self.db.line_index(file_id);
|
||||||
let line_index = LineIndex {
|
let line_index = LineIndex {
|
||||||
index: line_index.clone(),
|
index: line_index,
|
||||||
encoding: OffsetEncoding::Utf16,
|
encoding: OffsetEncoding::Utf16,
|
||||||
endings: LineEndings::Unix,
|
endings: LineEndings::Unix,
|
||||||
};
|
};
|
||||||
|
@ -191,7 +191,7 @@ impl LsifManager<'_> {
|
||||||
let text = self.analysis.file_text(file_id).unwrap();
|
let text = self.analysis.file_text(file_id).unwrap();
|
||||||
let line_index = self.db.line_index(file_id);
|
let line_index = self.db.line_index(file_id);
|
||||||
let line_index = LineIndex {
|
let line_index = LineIndex {
|
||||||
index: line_index.clone(),
|
index: line_index,
|
||||||
encoding: OffsetEncoding::Utf16,
|
encoding: OffsetEncoding::Utf16,
|
||||||
endings: LineEndings::Unix,
|
endings: LineEndings::Unix,
|
||||||
};
|
};
|
||||||
|
|
|
@ -336,7 +336,7 @@ impl AstNode for CallableExpr {
|
||||||
{
|
{
|
||||||
if let Some(it) = ast::CallExpr::cast(syntax.clone()) {
|
if let Some(it) = ast::CallExpr::cast(syntax.clone()) {
|
||||||
Some(Self::Call(it))
|
Some(Self::Call(it))
|
||||||
} else if let Some(it) = ast::MethodCallExpr::cast(syntax.clone()) {
|
} else if let Some(it) = ast::MethodCallExpr::cast(syntax) {
|
||||||
Some(Self::MethodCall(it))
|
Some(Self::MethodCall(it))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
|
Loading…
Reference in a new issue