remove unnecessary lazy evaluations

This commit is contained in:
Daniel Eades 2022-12-30 08:30:23 +00:00
parent 7530d76f00
commit cc80c5bd07
31 changed files with 50 additions and 51 deletions

View file

@ -159,15 +159,14 @@ impl ItemScope {
pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility)> {
let (def, mut iter) = match item {
ItemInNs::Macros(def) => {
return self
.macros
.iter()
.find_map(|(name, &(other_def, vis))| (other_def == def).then(|| (name, vis)));
return self.macros.iter().find_map(|(name, &(other_def, vis))| {
(other_def == def).then_some((name, vis))
});
}
ItemInNs::Types(def) => (def, self.types.iter()),
ItemInNs::Values(def) => (def, self.values.iter()),
};
iter.find_map(|(name, &(other_def, vis))| (other_def == def).then(|| (name, vis)))
iter.find_map(|(name, &(other_def, vis))| (other_def == def).then_some((name, vis)))
}
pub(crate) fn traits<'a>(&'a self) -> impl Iterator<Item = TraitId> + 'a {

View file

@ -170,7 +170,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
}
let pp = pretty_print_macro_expansion(
parse.syntax_node(),
show_token_ids.then(|| &*token_map),
show_token_ids.then_some(&*token_map),
);
let indent = IndentLevel::from_node(call.syntax());
let pp = reindent(indent, pp);

View file

@ -208,7 +208,7 @@ fn eager_macro_recur(
// Collect replacement
for child in children {
let def = match child.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
Some(path) => macro_resolver(path.clone()).ok_or_else(|| UnresolvedMacro { path })?,
Some(path) => macro_resolver(path.clone()).ok_or(UnresolvedMacro { path })?,
None => {
diagnostic_sink(ExpandError::Other("malformed macro invocation".into()));
continue;

View file

@ -37,7 +37,7 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
hir_def::AdtId::UnionId(x) => db.union_data(x).name.to_smol_str(),
hir_def::AdtId::EnumId(x) => db.enum_data(x).name.to_smol_str(),
};
(name == "Goal").then(|| x)
(name == "Goal").then_some(x)
}
_ => None,
})

View file

@ -714,7 +714,7 @@ fn lookup_impl_assoc_item_for_trait_ref(
let impl_data = find_matching_impl(impls, table, trait_ref)?;
impl_data.items.iter().find_map(|it| match it {
AssocItemId::FunctionId(f) => {
(db.function_data(*f).name == *name).then(|| AssocItemId::FunctionId(*f))
(db.function_data(*f).name == *name).then_some(AssocItemId::FunctionId(*f))
}
AssocItemId::ConstId(c) => db
.const_data(*c)

View file

@ -61,7 +61,7 @@ impl TraitEnvironment {
) -> impl Iterator<Item = TraitId> + 'a {
self.traits_from_clauses
.iter()
.filter_map(move |(self_ty, trait_id)| (*self_ty == ty).then(|| *trait_id))
.filter_map(move |(self_ty, trait_id)| (*self_ty == ty).then_some(*trait_id))
}
}

View file

@ -1559,7 +1559,7 @@ impl Function {
}
pub fn self_param(self, db: &dyn HirDatabase) -> Option<SelfParam> {
self.has_self_param(db).then(|| SelfParam { func: self.id })
self.has_self_param(db).then_some(SelfParam { func: self.id })
}
pub fn assoc_fn_params(self, db: &dyn HirDatabase) -> Vec<Param> {

View file

@ -795,7 +795,7 @@ impl<'db> SemanticsImpl<'db> {
// requeue the tokens we got from mapping our current token down
stack.extend(mapped_tokens);
// if the length changed we have found a mapping for the token
(stack.len() != len).then(|| ())
(stack.len() != len).then_some(())
};
// Remap the next token in the queue into a macro call its in, if it is not being remapped
@ -1221,7 +1221,7 @@ impl<'db> SemanticsImpl<'db> {
krate
.dependencies(self.db)
.into_iter()
.find_map(|dep| (dep.name == name).then(|| dep.krate))
.find_map(|dep| (dep.name == name).then_some(dep.krate))
}
fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {

View file

@ -987,7 +987,7 @@ fn resolve_hir_path_(
db,
def,
res.in_type_ns()?,
|name, id| (name == unresolved.name).then(|| id),
|name, id| (name == unresolved.name).then_some(id),
)
})
.map(TypeAlias::from)

View file

@ -326,7 +326,7 @@ impl ExtendedEnum {
fn resolve_enum_def(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> Option<ExtendedEnum> {
sema.type_of_expr(expr)?.adjusted().autoderef(sema.db).find_map(|ty| match ty.as_adt() {
Some(Adt::Enum(e)) => Some(ExtendedEnum::Enum(e)),
_ => ty.is_bool().then(|| ExtendedEnum::Bool),
_ => ty.is_bool().then_some(ExtendedEnum::Bool),
})
}
@ -344,7 +344,7 @@ fn resolve_tuple_of_enum_def(
// For now we only handle expansion for a tuple of enums. Here
// we map non-enum items to None and rely on `collect` to
// convert Vec<Option<hir::Enum>> into Option<Vec<hir::Enum>>.
_ => ty.is_bool().then(|| ExtendedEnum::Bool),
_ => ty.is_bool().then_some(ExtendedEnum::Bool),
})
})
.collect()

View file

@ -216,7 +216,7 @@ fn validate_method_call_expr(
let krate = module.krate();
let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
it_type.impls_trait(sema.db, iter_trait, &[]).then(|| (expr, receiver))
it_type.impls_trait(sema.db, iter_trait, &[]).then_some((expr, receiver))
}
#[cfg(test)]

View file

@ -588,7 +588,7 @@ impl FunctionBody {
FunctionBody::Expr(expr) => Some(expr.clone()),
FunctionBody::Span { parent, text_range } => {
let tail_expr = parent.tail_expr()?;
text_range.contains_range(tail_expr.syntax().text_range()).then(|| tail_expr)
text_range.contains_range(tail_expr.syntax().text_range()).then_some(tail_expr)
}
}
}

View file

@ -178,7 +178,7 @@ fn extract_generic_params(
.fold(false, |tagged, ty| tag_generics_in_variant(&ty, &mut generics) || tagged),
};
let generics = generics.into_iter().filter_map(|(param, tag)| tag.then(|| param));
let generics = generics.into_iter().filter_map(|(param, tag)| tag.then_some(param));
tagged_one.then(|| make::generic_param_list(generics))
}

View file

@ -271,7 +271,7 @@ fn generate_getter_from_info(
}}",
vis,
record_field_info.fn_name,
info.mutable.then(|| "mut ").unwrap_or_default(),
info.mutable.then_some("mut ").unwrap_or_default(),
ty,
body,
);

View file

@ -64,7 +64,7 @@ fn compute_dbg_replacement(macro_call: ast::MacroCall) -> Option<(TextRange, Str
let input_expressions = mac_input.group_by(|tok| tok.kind() == T![,]);
let input_expressions = input_expressions
.into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
.map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
.collect::<Option<Vec<ast::Expr>>>()?;

View file

@ -613,7 +613,7 @@ pub(crate) fn convert_reference_type(
}
fn handle_copy(ty: &hir::Type, db: &dyn HirDatabase) -> Option<ReferenceConversionType> {
ty.is_copy(db).then(|| ReferenceConversionType::Copy)
ty.is_copy(db).then_some(ReferenceConversionType::Copy)
}
fn handle_as_ref_str(
@ -624,7 +624,7 @@ fn handle_as_ref_str(
let str_type = hir::BuiltinType::str().ty(db);
ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[str_type])
.then(|| ReferenceConversionType::AsRefStr)
.then_some(ReferenceConversionType::AsRefStr)
}
fn handle_as_ref_slice(
@ -636,7 +636,7 @@ fn handle_as_ref_slice(
let slice_type = hir::Type::new_slice(type_argument);
ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[slice_type])
.then(|| ReferenceConversionType::AsRefSlice)
.then_some(ReferenceConversionType::AsRefSlice)
}
fn handle_dereferenced(
@ -647,7 +647,7 @@ fn handle_dereferenced(
let type_argument = ty.type_arguments().next()?;
ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[type_argument])
.then(|| ReferenceConversionType::Dereferenced)
.then_some(ReferenceConversionType::Dereferenced)
}
fn handle_option_as_ref(

View file

@ -357,7 +357,7 @@ fn parse_comma_sep_expr(input: ast::TokenTree) -> Option<Vec<ast::Expr>> {
Some(
input_expressions
.into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
.filter_map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
.collect::<Vec<ast::Expr>>(),
)

View file

@ -91,7 +91,7 @@ fn recursive_merge(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehavior)
.flat_map(|list| list.use_trees())
// We use Option here to early return from this function(this is not the
// same as a `filter` op).
.map(|tree| merge.is_tree_allowed(&tree).then(|| tree))
.map(|tree| merge.is_tree_allowed(&tree).then_some(tree))
.collect::<Option<_>>()?;
use_trees.sort_unstable_by(|a, b| path_cmp_for_sort(a.path(), b.path()));
for rhs_t in rhs.use_tree_list().into_iter().flat_map(|list| list.use_trees()) {

View file

@ -608,7 +608,7 @@ impl<'a> FindUsages<'a> {
let reference = FileReference {
range,
name: ast::NameLike::NameRef(name_ref.clone()),
category: is_name_ref_in_import(name_ref).then(|| ReferenceCategory::Import),
category: is_name_ref_in_import(name_ref).then_some(ReferenceCategory::Import),
};
sink(file_id, reference)
}
@ -787,7 +787,7 @@ impl ReferenceCategory {
fn new(def: &Definition, r: &ast::NameRef) -> Option<ReferenceCategory> {
// Only Locals and Fields have accesses for now.
if !matches!(def, Definition::Local(_) | Definition::Field(_)) {
return is_name_ref_in_import(r).then(|| ReferenceCategory::Import);
return is_name_ref_in_import(r).then_some(ReferenceCategory::Import);
}
let mode = r.syntax().ancestors().find_map(|node| {

View file

@ -449,7 +449,7 @@ pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Pat
let input_expressions = tokens.group_by(|tok| tok.kind() == T![,]);
let paths = input_expressions
.into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
.filter_map(|mut tokens| {
syntax::hacks::parse_expr_from_str(&tokens.join("")).and_then(|expr| match expr {
ast::Expr::PathExpr(it) => it.path(),

View file

@ -273,7 +273,7 @@ impl DocCommentToken {
let (in_expansion_range, link, ns) =
extract_definitions_from_docs(&docs).into_iter().find_map(|(range, link, ns)| {
let mapped = doc_mapping.map(range)?;
(mapped.value.contains(abs_in_expansion_offset)).then(|| (mapped.value, link, ns))
(mapped.value.contains(abs_in_expansion_offset)).then_some((mapped.value, link, ns))
})?;
// get the relative range to the doc/attribute in the expansion
let in_expansion_relative_range = in_expansion_range - descended_prefix_len - token_start;

View file

@ -205,7 +205,7 @@ fn extend_single_word_in_comment_or_string(
}
let start_idx = before.rfind(non_word_char)? as u32;
let end_idx = after.find(non_word_char).unwrap_or_else(|| after.len()) as u32;
let end_idx = after.find(non_word_char).unwrap_or(after.len()) as u32;
let from: TextSize = (start_idx + 1).into();
let to: TextSize = (cursor_position + end_idx).into();

View file

@ -110,7 +110,7 @@ fn impls_for_trait_item(
.filter_map(|imp| {
let item = imp.items(sema.db).iter().find_map(|itm| {
let itm_name = itm.name(sema.db)?;
(itm_name == fun_name).then(|| *itm)
(itm_name == fun_name).then_some(*itm)
})?;
item.try_to_nav(sema.db)
})

View file

@ -110,7 +110,7 @@ fn highlight_references(
.and_then(|decl| decl.focus_range)
.map(|range| {
let category =
references::decl_mutability(&def, node, range).then(|| ReferenceCategory::Write);
references::decl_mutability(&def, node, range).then_some(ReferenceCategory::Write);
HighlightedRange { range, category }
});
if let Some(hl_range) = hl_range {
@ -365,7 +365,7 @@ mod tests {
let mut expected = annotations
.into_iter()
.map(|(r, access)| (r.range, (!access.is_empty()).then(|| access)))
.map(|(r, access)| (r.range, (!access.is_empty()).then_some(access)))
.collect::<Vec<_>>();
let mut actual = hls

View file

@ -167,7 +167,7 @@ fn is_named_constructor(
ast::PathSegmentKind::Type { type_ref: Some(ty), trait_ref: None } => ty.to_string(),
_ => return None,
};
(ctor_name == ty_name).then(|| ())
(ctor_name == ty_name).then_some(())
}
fn pat_is_enum_variant(db: &RootDatabase, bind_pat: &ast::IdentPat, pat_ty: &hir::Type) -> bool {

View file

@ -111,7 +111,7 @@ fn punctuation(
let is_raw_ptr = (|| {
let prefix_expr = parent.and_then(ast::PrefixExpr::cast)?;
let expr = prefix_expr.expr()?;
sema.type_of_expr(&expr)?.original.is_raw_ptr().then(|| ())
sema.type_of_expr(&expr)?.original.is_raw_ptr().then_some(())
})();
if let Some(()) = is_raw_ptr {
HlTag::Operator(HlOperator::Other) | HlMod::Unsafe

View file

@ -140,7 +140,7 @@ impl Shift {
| tt::Leaf::Punct(tt::Punct { id, .. })
| tt::Leaf::Literal(tt::Literal { id, .. })) = leaf;
(id != tt::TokenId::unspecified()).then(|| id.0)
(id != tt::TokenId::unspecified()).then_some(id.0)
}
};
subtree.token_trees.iter().filter_map(filter).max()

View file

@ -273,7 +273,7 @@ fn parse_repeat(src: &mut TtIter<'_>) -> Result<(Option<Separator>, RepeatKind),
_ => return Err(ParseError::InvalidRepeat),
},
};
return Ok((has_sep.then(|| separator), repeat_kind));
return Ok((has_sep.then_some(separator), repeat_kind));
}
}
}

View file

@ -228,7 +228,7 @@ fn completion_item(
max_relevance: u32,
item: CompletionItem,
) {
let insert_replace_support = config.insert_replace_support().then(|| tdpp.position);
let insert_replace_support = config.insert_replace_support().then_some(tdpp.position);
let mut additional_text_edits = Vec::new();
// LSP does not allow arbitrary edits in completion, so we have to do a
@ -258,7 +258,7 @@ fn completion_item(
text_edit.unwrap()
};
let insert_text_format = item.is_snippet().then(|| lsp_types::InsertTextFormat::SNIPPET);
let insert_text_format = item.is_snippet().then_some(lsp_types::InsertTextFormat::SNIPPET);
let tags = item.deprecated().then(|| vec![lsp_types::CompletionItemTag::DEPRECATED]);
let command = if item.trigger_call_info() && config.client_commands().trigger_parameter_hints {
Some(command::trigger_parameter_hints())

View file

@ -3921,7 +3921,7 @@ impl AnyHasArgList {
impl AstNode for AnyHasArgList {
fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, CALL_EXPR | METHOD_CALL_EXPR) }
fn cast(syntax: SyntaxNode) -> Option<Self> {
Self::can_cast(syntax.kind()).then(|| AnyHasArgList { syntax })
Self::can_cast(syntax.kind()).then_some(AnyHasArgList { syntax })
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
@ -4006,7 +4006,7 @@ impl AstNode for AnyHasAttrs {
)
}
fn cast(syntax: SyntaxNode) -> Option<Self> {
Self::can_cast(syntax.kind()).then(|| AnyHasAttrs { syntax })
Self::can_cast(syntax.kind()).then_some(AnyHasAttrs { syntax })
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
@ -4043,7 +4043,7 @@ impl AstNode for AnyHasDocComments {
)
}
fn cast(syntax: SyntaxNode) -> Option<Self> {
Self::can_cast(syntax.kind()).then(|| AnyHasDocComments { syntax })
Self::can_cast(syntax.kind()).then_some(AnyHasDocComments { syntax })
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
@ -4058,7 +4058,7 @@ impl AstNode for AnyHasGenericParams {
matches!(kind, ENUM | FN | IMPL | STRUCT | TRAIT | TYPE_ALIAS | UNION)
}
fn cast(syntax: SyntaxNode) -> Option<Self> {
Self::can_cast(syntax.kind()).then(|| AnyHasGenericParams { syntax })
Self::can_cast(syntax.kind()).then_some(AnyHasGenericParams { syntax })
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
@ -4071,7 +4071,7 @@ impl AnyHasLoopBody {
impl AstNode for AnyHasLoopBody {
fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, FOR_EXPR | LOOP_EXPR | WHILE_EXPR) }
fn cast(syntax: SyntaxNode) -> Option<Self> {
Self::can_cast(syntax.kind()).then(|| AnyHasLoopBody { syntax })
Self::can_cast(syntax.kind()).then_some(AnyHasLoopBody { syntax })
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
@ -4084,7 +4084,7 @@ impl AnyHasModuleItem {
impl AstNode for AnyHasModuleItem {
fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, MACRO_ITEMS | SOURCE_FILE | ITEM_LIST) }
fn cast(syntax: SyntaxNode) -> Option<Self> {
Self::can_cast(syntax.kind()).then(|| AnyHasModuleItem { syntax })
Self::can_cast(syntax.kind()).then_some(AnyHasModuleItem { syntax })
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
@ -4119,7 +4119,7 @@ impl AstNode for AnyHasName {
)
}
fn cast(syntax: SyntaxNode) -> Option<Self> {
Self::can_cast(syntax.kind()).then(|| AnyHasName { syntax })
Self::can_cast(syntax.kind()).then_some(AnyHasName { syntax })
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
@ -4137,7 +4137,7 @@ impl AstNode for AnyHasTypeBounds {
)
}
fn cast(syntax: SyntaxNode) -> Option<Self> {
Self::can_cast(syntax.kind()).then(|| AnyHasTypeBounds { syntax })
Self::can_cast(syntax.kind()).then_some(AnyHasTypeBounds { syntax })
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
@ -4171,7 +4171,7 @@ impl AstNode for AnyHasVisibility {
)
}
fn cast(syntax: SyntaxNode) -> Option<Self> {
Self::can_cast(syntax.kind()).then(|| AnyHasVisibility { syntax })
Self::can_cast(syntax.kind()).then_some(AnyHasVisibility { syntax })
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}

View file

@ -253,7 +253,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String {
matches!(kind, #(#kinds)|*)
}
fn cast(syntax: SyntaxNode) -> Option<Self> {
Self::can_cast(syntax.kind()).then(|| #name { syntax })
Self::can_cast(syntax.kind()).then_some(#name { syntax })
}
fn syntax(&self) -> &SyntaxNode {
&self.syntax