mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 13:03:31 +00:00
Simlify with matches!()
This commit is contained in:
parent
513924a7e0
commit
e75e2ae5b6
20 changed files with 32 additions and 98 deletions
|
@ -30,9 +30,8 @@ pub(crate) fn change_visibility(acc: &mut Assists, ctx: &AssistContext) -> Optio
|
|||
}
|
||||
|
||||
fn add_vis(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
let item_keyword = ctx.token_at_offset().find(|leaf| match leaf.kind() {
|
||||
T![const] | T![fn] | T![mod] | T![struct] | T![enum] | T![trait] => true,
|
||||
_ => false,
|
||||
let item_keyword = ctx.token_at_offset().find(|leaf| {
|
||||
matches!(leaf.kind(), T![const] | T![fn] | T![mod] | T![struct] | T![enum] | T![trait])
|
||||
});
|
||||
|
||||
let (offset, target) = if let Some(keyword) = item_keyword {
|
||||
|
@ -73,11 +72,7 @@ fn add_vis(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
|||
|
||||
fn vis_offset(node: &SyntaxNode) -> TextSize {
|
||||
node.children_with_tokens()
|
||||
.skip_while(|it| match it.kind() {
|
||||
WHITESPACE | COMMENT | ATTR => true,
|
||||
_ => false,
|
||||
})
|
||||
.next()
|
||||
.find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR))
|
||||
.map(|it| it.text_range().start())
|
||||
.unwrap_or_else(|| node.text_range().start())
|
||||
}
|
||||
|
|
|
@ -179,11 +179,7 @@ fn target_data_for_def(
|
|||
|
||||
fn vis_offset(node: &SyntaxNode) -> TextSize {
|
||||
node.children_with_tokens()
|
||||
.skip_while(|it| match it.kind() {
|
||||
WHITESPACE | COMMENT | ATTR => true,
|
||||
_ => false,
|
||||
})
|
||||
.next()
|
||||
.find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR))
|
||||
.map(|it| it.text_range().start())
|
||||
.unwrap_or_else(|| node.text_range().start())
|
||||
}
|
||||
|
|
|
@ -81,10 +81,7 @@ pub(crate) fn merge_match_arms(acc: &mut Assists, ctx: &AssistContext) -> Option
|
|||
}
|
||||
|
||||
fn contains_placeholder(a: &ast::MatchArm) -> bool {
|
||||
match a.pat() {
|
||||
Some(ra_syntax::ast::Pat::PlaceholderPat(..)) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(a.pat(), Some(ra_syntax::ast::Pat::PlaceholderPat(..)))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -137,10 +137,7 @@ impl ModuleOrigin {
|
|||
}
|
||||
|
||||
pub fn is_inline(&self) -> bool {
|
||||
match self {
|
||||
ModuleOrigin::Inline { .. } => true,
|
||||
ModuleOrigin::CrateRoot { .. } | ModuleOrigin::File { .. } => false,
|
||||
}
|
||||
matches!(self, ModuleOrigin::Inline { .. })
|
||||
}
|
||||
|
||||
/// Returns a node which defines this module.
|
||||
|
|
|
@ -785,11 +785,7 @@ impl<'a> InferenceContext<'a> {
|
|||
for &check_closures in &[false, true] {
|
||||
let param_iter = param_tys.iter().cloned().chain(repeat(Ty::Unknown));
|
||||
for (&arg, param_ty) in args.iter().zip(param_iter) {
|
||||
let is_closure = match &self.body[arg] {
|
||||
Expr::Lambda { .. } => true,
|
||||
_ => false,
|
||||
};
|
||||
|
||||
let is_closure = matches!(&self.body[arg], Expr::Lambda { .. });
|
||||
if is_closure != check_closures {
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -620,17 +620,11 @@ pub enum GenericPredicate {
|
|||
|
||||
impl GenericPredicate {
|
||||
pub fn is_error(&self) -> bool {
|
||||
match self {
|
||||
GenericPredicate::Error => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self, GenericPredicate::Error)
|
||||
}
|
||||
|
||||
pub fn is_implemented(&self) -> bool {
|
||||
match self {
|
||||
GenericPredicate::Implemented(_) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self, GenericPredicate::Implemented(_))
|
||||
}
|
||||
|
||||
pub fn trait_ref(&self, db: &dyn HirDatabase) -> Option<TraitRef> {
|
||||
|
|
|
@ -312,10 +312,8 @@ fn get_string_representation(expr: &ast::Expr) -> Option<String> {
|
|||
}
|
||||
|
||||
fn is_obvious_param(param_name: &str) -> bool {
|
||||
let is_obvious_param_name = match param_name {
|
||||
"predicate" | "value" | "pat" | "rhs" | "other" => true,
|
||||
_ => false,
|
||||
};
|
||||
let is_obvious_param_name =
|
||||
matches!(param_name, "predicate" | "value" | "pat" | "rhs" | "other");
|
||||
param_name.len() == 1 || is_obvious_param_name
|
||||
}
|
||||
|
||||
|
|
|
@ -165,10 +165,7 @@ fn join_single_use_tree(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Opti
|
|||
}
|
||||
|
||||
fn is_trailing_comma(left: SyntaxKind, right: SyntaxKind) -> bool {
|
||||
match (left, right) {
|
||||
(T![,], T![')']) | (T![,], T![']']) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!((left, right), (T![,], T![')']) | (T![,], T![']']))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -346,10 +346,7 @@ impl Query {
|
|||
}
|
||||
|
||||
fn is_type(kind: SyntaxKind) -> bool {
|
||||
match kind {
|
||||
STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_ALIAS_DEF => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(kind, STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_ALIAS_DEF)
|
||||
}
|
||||
|
||||
/// The actual data that is stored in the index. It should be as compact as
|
||||
|
|
|
@ -137,10 +137,7 @@ fn eat_fragment_kind<'a>(
|
|||
}
|
||||
|
||||
fn is_boolean_literal(lit: &tt::Literal) -> bool {
|
||||
match lit.text.as_str() {
|
||||
"true" | "false" => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(lit.text.as_str(), "true" | "false")
|
||||
}
|
||||
|
||||
fn parse_repeat(src: &mut TtIter) -> Result<(Option<Separator>, RepeatKind), ExpandError> {
|
||||
|
|
|
@ -73,10 +73,7 @@ pub(crate) mod fragments {
|
|||
// Parse a meta item , which excluded [], e.g : #[ MetaItem ]
|
||||
pub(crate) fn meta_item(p: &mut Parser) {
|
||||
fn is_delimiter(p: &mut Parser) -> bool {
|
||||
match p.current() {
|
||||
T!['{'] | T!['('] | T!['['] => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(p.current(), T!['{'] | T!['('] | T!['['])
|
||||
}
|
||||
|
||||
if is_delimiter(p) {
|
||||
|
|
|
@ -41,10 +41,7 @@ fn path(p: &mut Parser, mode: Mode) {
|
|||
path_segment(p, mode, true);
|
||||
let mut qual = path.complete(p, PATH);
|
||||
loop {
|
||||
let use_tree = match p.nth(2) {
|
||||
T![*] | T!['{'] => true,
|
||||
_ => false,
|
||||
};
|
||||
let use_tree = matches!(p.nth(2), T![*] | T!['{']);
|
||||
if p.at(T![::]) && !use_tree {
|
||||
let path = qual.precede(p);
|
||||
p.bump(T![::]);
|
||||
|
|
|
@ -169,10 +169,7 @@ fn is_where_predicate(p: &mut Parser) -> bool {
|
|||
}
|
||||
|
||||
fn is_where_clause_end(p: &mut Parser) -> bool {
|
||||
match p.current() {
|
||||
T!['{'] | T![;] | T![=] => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(p.current(), T!['{'] | T![;] | T![=])
|
||||
}
|
||||
|
||||
fn where_predicate(p: &mut Parser) {
|
||||
|
|
|
@ -20,9 +20,6 @@ impl From<SyntaxKind> for u16 {
|
|||
|
||||
impl SyntaxKind {
|
||||
pub fn is_trivia(self) -> bool {
|
||||
match self {
|
||||
SyntaxKind::WHITESPACE | SyntaxKind::COMMENT => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self, SyntaxKind::WHITESPACE | SyntaxKind::COMMENT)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -399,10 +399,7 @@ impl ast::BlockExpr {
|
|||
Some(it) => it,
|
||||
None => return true,
|
||||
};
|
||||
match parent.kind() {
|
||||
FN_DEF | IF_EXPR | WHILE_EXPR | LOOP_EXPR | EFFECT_EXPR => false,
|
||||
_ => true,
|
||||
}
|
||||
!matches!(parent.kind(), FN_DEF | IF_EXPR | WHILE_EXPR | LOOP_EXPR | EFFECT_EXPR)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -459,16 +459,16 @@ impl ast::RangePat {
|
|||
|
||||
impl ast::TokenTree {
|
||||
pub fn left_delimiter_token(&self) -> Option<SyntaxToken> {
|
||||
self.syntax().first_child_or_token()?.into_token().filter(|it| match it.kind() {
|
||||
T!['{'] | T!['('] | T!['['] => true,
|
||||
_ => false,
|
||||
})
|
||||
self.syntax()
|
||||
.first_child_or_token()?
|
||||
.into_token()
|
||||
.filter(|it| matches!(it.kind(), T!['{'] | T!['('] | T!['[']))
|
||||
}
|
||||
|
||||
pub fn right_delimiter_token(&self) -> Option<SyntaxToken> {
|
||||
self.syntax().last_child_or_token()?.into_token().filter(|it| match it.kind() {
|
||||
T!['}'] | T![')'] | T![']'] => true,
|
||||
_ => false,
|
||||
})
|
||||
self.syntax()
|
||||
.last_child_or_token()?
|
||||
.into_token()
|
||||
.filter(|it| matches!(it.kind(), T!['}'] | T![')'] | T![']']))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -120,10 +120,7 @@ fn get_text_after_edit(element: SyntaxElement, edit: &Indel) -> String {
|
|||
}
|
||||
|
||||
fn is_contextual_kw(text: &str) -> bool {
|
||||
match text {
|
||||
"auto" | "default" | "union" => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(text, "auto" | "default" | "union")
|
||||
}
|
||||
|
||||
fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> {
|
||||
|
|
|
@ -105,10 +105,7 @@ impl<'a> Eq for Cursor<'a> {}
|
|||
impl<'a> Cursor<'a> {
|
||||
/// Check whether it is eof
|
||||
pub fn eof(self) -> bool {
|
||||
match self.buffer.entry(&self.ptr) {
|
||||
None | Some(Entry::End(None)) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self.buffer.entry(&self.ptr), None | Some(Entry::End(None)))
|
||||
}
|
||||
|
||||
/// If the cursor is pointing at the end of a subtree, returns
|
||||
|
|
|
@ -28,16 +28,10 @@ pub enum Verbosity {
|
|||
|
||||
impl Verbosity {
|
||||
pub fn is_verbose(self) -> bool {
|
||||
match self {
|
||||
Verbosity::Verbose | Verbosity::Spammy => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self, Verbosity::Verbose | Verbosity::Spammy)
|
||||
}
|
||||
pub fn is_spammy(self) -> bool {
|
||||
match self {
|
||||
Verbosity::Spammy => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self, Verbosity::Spammy)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -78,10 +78,7 @@ pub fn analysis_bench(
|
|||
}
|
||||
}
|
||||
BenchWhat::Complete(pos) | BenchWhat::GotoDef(pos) => {
|
||||
let is_completion = match what {
|
||||
BenchWhat::Complete(..) => true,
|
||||
_ => false,
|
||||
};
|
||||
let is_completion = matches!(what, BenchWhat::Complete(..));
|
||||
|
||||
let offset = host
|
||||
.analysis()
|
||||
|
|
Loading…
Reference in a new issue