mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-25 12:33:33 +00:00
Replace if let
with match
where appropriate
This commit is contained in:
parent
f29796da61
commit
9583dd5725
44 changed files with 201 additions and 269 deletions
|
@ -2119,10 +2119,9 @@ impl Impl {
|
|||
};
|
||||
|
||||
let fp = TyFingerprint::for_inherent_impl(&ty);
|
||||
let fp = if let Some(fp) = fp {
|
||||
fp
|
||||
} else {
|
||||
return Vec::new();
|
||||
let fp = match fp {
|
||||
Some(fp) => fp,
|
||||
None => return Vec::new(),
|
||||
};
|
||||
|
||||
let mut all = Vec::new();
|
||||
|
|
|
@ -474,10 +474,9 @@ impl ExprCollector<'_> {
|
|||
}
|
||||
ast::Expr::PrefixExpr(e) => {
|
||||
let expr = self.collect_expr_opt(e.expr());
|
||||
if let Some(op) = e.op_kind() {
|
||||
self.alloc_expr(Expr::UnaryOp { expr, op }, syntax_ptr)
|
||||
} else {
|
||||
self.alloc_expr(Expr::Missing, syntax_ptr)
|
||||
match e.op_kind() {
|
||||
Some(op) => self.alloc_expr(Expr::UnaryOp { expr, op }, syntax_ptr),
|
||||
None => self.alloc_expr(Expr::Missing, syntax_ptr),
|
||||
}
|
||||
}
|
||||
ast::Expr::ClosureExpr(e) => {
|
||||
|
@ -624,10 +623,9 @@ impl ExprCollector<'_> {
|
|||
}
|
||||
|
||||
fn collect_expr_opt(&mut self, expr: Option<ast::Expr>) -> ExprId {
|
||||
if let Some(expr) = expr {
|
||||
self.collect_expr(expr)
|
||||
} else {
|
||||
self.missing_expr()
|
||||
match expr {
|
||||
Some(expr) => self.collect_expr(expr),
|
||||
None => self.missing_expr(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -724,10 +722,9 @@ impl ExprCollector<'_> {
|
|||
}
|
||||
|
||||
fn collect_block_opt(&mut self, expr: Option<ast::BlockExpr>) -> ExprId {
|
||||
if let Some(block) = expr {
|
||||
self.collect_block(block)
|
||||
} else {
|
||||
self.missing_expr()
|
||||
match expr {
|
||||
Some(block) => self.collect_block(block),
|
||||
None => self.missing_expr(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -890,10 +887,9 @@ impl ExprCollector<'_> {
|
|||
}
|
||||
|
||||
fn collect_pat_opt(&mut self, pat: Option<ast::Pat>) -> PatId {
|
||||
if let Some(pat) = pat {
|
||||
self.collect_pat(pat)
|
||||
} else {
|
||||
self.missing_pat()
|
||||
match pat {
|
||||
Some(pat) => self.collect_pat(pat),
|
||||
None => self.missing_pat(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -209,10 +209,9 @@ fn find_path_inner(
|
|||
) {
|
||||
path.push_segment(name);
|
||||
|
||||
let new_path = if let Some(best_path) = best_path {
|
||||
select_best_path(best_path, path, prefer_no_std)
|
||||
} else {
|
||||
path
|
||||
let new_path = match best_path {
|
||||
Some(best_path) => select_best_path(best_path, path, prefer_no_std),
|
||||
None => path,
|
||||
};
|
||||
best_path_len = new_path.len();
|
||||
best_path = Some(new_path);
|
||||
|
@ -243,10 +242,9 @@ fn find_path_inner(
|
|||
});
|
||||
|
||||
for path in extern_paths {
|
||||
let new_path = if let Some(best_path) = best_path {
|
||||
select_best_path(best_path, path, prefer_no_std)
|
||||
} else {
|
||||
path
|
||||
let new_path = match best_path {
|
||||
Some(best_path) => select_best_path(best_path, path, prefer_no_std),
|
||||
None => path,
|
||||
};
|
||||
best_path = Some(new_path);
|
||||
}
|
||||
|
@ -261,12 +259,11 @@ fn find_path_inner(
|
|||
}
|
||||
}
|
||||
|
||||
if let Some(prefix) = prefixed.map(PrefixKind::prefix) {
|
||||
best_path.or_else(|| {
|
||||
match prefixed.map(PrefixKind::prefix) {
|
||||
Some(prefix) => best_path.or_else(|| {
|
||||
scope_name.map(|scope_name| ModPath::from_segments(prefix, vec![scope_name]))
|
||||
})
|
||||
} else {
|
||||
best_path
|
||||
}),
|
||||
None => best_path,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -346,15 +343,13 @@ fn find_local_import_locations(
|
|||
|
||||
if let Some((name, vis)) = data.scope.name_of(item) {
|
||||
if vis.is_visible_from(db, from) {
|
||||
let is_private = if let Visibility::Module(private_to) = vis {
|
||||
private_to.local_id == module.local_id
|
||||
} else {
|
||||
false
|
||||
let is_private = match vis {
|
||||
Visibility::Module(private_to) => private_to.local_id == module.local_id,
|
||||
Visibility::Public => false,
|
||||
};
|
||||
let is_original_def = if let Some(module_def_id) = item.as_module_def_id() {
|
||||
data.scope.declarations().any(|it| it == module_def_id)
|
||||
} else {
|
||||
false
|
||||
let is_original_def = match item.as_module_def_id() {
|
||||
Some(module_def_id) => data.scope.declarations().any(|it| it == module_def_id),
|
||||
None => false,
|
||||
};
|
||||
|
||||
// Ignore private imports. these could be used if we are
|
||||
|
|
|
@ -475,10 +475,9 @@ macro_rules! mod_items {
|
|||
}
|
||||
|
||||
fn id_from_mod_item(mod_item: ModItem) -> Option<FileItemTreeId<Self>> {
|
||||
if let ModItem::$typ(id) = mod_item {
|
||||
Some(id)
|
||||
} else {
|
||||
None
|
||||
match mod_item {
|
||||
ModItem::$typ(id) => Some(id),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -400,13 +400,10 @@ impl DefMap {
|
|||
};
|
||||
let from_scope_or_builtin = match shadow {
|
||||
BuiltinShadowMode::Module => from_scope.or(from_builtin),
|
||||
BuiltinShadowMode::Other => {
|
||||
if let Some(ModuleDefId::ModuleId(_)) = from_scope.take_types() {
|
||||
from_builtin.or(from_scope)
|
||||
} else {
|
||||
from_scope.or(from_builtin)
|
||||
}
|
||||
}
|
||||
BuiltinShadowMode::Other => match from_scope.take_types() {
|
||||
Some(ModuleDefId::ModuleId(_)) => from_builtin.or(from_scope),
|
||||
Some(_) | None => from_scope.or(from_builtin),
|
||||
},
|
||||
};
|
||||
let from_extern_prelude = self
|
||||
.extern_prelude
|
||||
|
|
|
@ -18,10 +18,9 @@ pub(crate) fn convert_path(
|
|||
path: ast::Path,
|
||||
hygiene: &Hygiene,
|
||||
) -> Option<ModPath> {
|
||||
let prefix = if let Some(qual) = path.qualifier() {
|
||||
Some(convert_path(db, prefix, qual, hygiene)?)
|
||||
} else {
|
||||
prefix
|
||||
let prefix = match path.qualifier() {
|
||||
Some(qual) => Some(convert_path(db, prefix, qual, hygiene)?),
|
||||
None => prefix,
|
||||
};
|
||||
|
||||
let segment = path.segment()?;
|
||||
|
|
|
@ -214,10 +214,9 @@ impl TypeRef {
|
|||
}
|
||||
|
||||
pub(crate) fn from_ast_opt(ctx: &LowerCtx, node: Option<ast::Type>) -> Self {
|
||||
if let Some(node) = node {
|
||||
TypeRef::from_ast(ctx, node)
|
||||
} else {
|
||||
TypeRef::Error
|
||||
match node {
|
||||
Some(node) => TypeRef::from_ast(ctx, node),
|
||||
None => TypeRef::Error,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -48,10 +48,9 @@ impl Name {
|
|||
|
||||
/// Resolve a name from the text of token.
|
||||
fn resolve(raw_text: &str) -> Name {
|
||||
if let Some(text) = raw_text.strip_prefix("r#") {
|
||||
Name::new_text(SmolStr::new(text))
|
||||
} else {
|
||||
Name::new_text(raw_text.into())
|
||||
match raw_text.strip_prefix("r#") {
|
||||
Some(text) => Name::new_text(SmolStr::new(text)),
|
||||
None => Name::new_text(raw_text.into()),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -109,10 +109,9 @@ pub(crate) fn deref(
|
|||
ty: InEnvironment<&Canonical<Ty>>,
|
||||
) -> Option<Canonical<Ty>> {
|
||||
let _p = profile::span("deref");
|
||||
if let Some(derefed) = builtin_deref(&ty.goal.value) {
|
||||
Some(Canonical { value: derefed, binders: ty.goal.binders.clone() })
|
||||
} else {
|
||||
deref_by_trait(db, krate, ty)
|
||||
match builtin_deref(&ty.goal.value) {
|
||||
Some(derefed) => Some(Canonical { value: derefed, binders: ty.goal.binders.clone() }),
|
||||
None => deref_by_trait(db, krate, ty),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -104,10 +104,9 @@ impl TyExt for Ty {
|
|||
}
|
||||
|
||||
fn as_fn_def(&self, db: &dyn HirDatabase) -> Option<FunctionId> {
|
||||
if let Some(CallableDefId::FunctionId(func)) = self.callable_def(db) {
|
||||
Some(func)
|
||||
} else {
|
||||
None
|
||||
match self.callable_def(db) {
|
||||
Some(CallableDefId::FunctionId(func)) => Some(func),
|
||||
Some(CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_)) | None => None,
|
||||
}
|
||||
}
|
||||
fn as_reference(&self) -> Option<(&Ty, Lifetime, Mutability)> {
|
||||
|
|
|
@ -105,10 +105,9 @@ impl IntRange {
|
|||
|
||||
#[inline]
|
||||
fn from_range(lo: u128, hi: u128, scalar_ty: Scalar) -> IntRange {
|
||||
if let Scalar::Bool = scalar_ty {
|
||||
IntRange { range: lo..=hi }
|
||||
} else {
|
||||
unimplemented!()
|
||||
match scalar_ty {
|
||||
Scalar::Bool => IntRange { range: lo..=hi },
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -167,10 +167,9 @@ impl<'a> HirFormatter<'a> {
|
|||
}
|
||||
|
||||
pub fn should_truncate(&self) -> bool {
|
||||
if let Some(max_size) = self.max_size {
|
||||
self.curr_size >= max_size
|
||||
} else {
|
||||
false
|
||||
match self.max_size {
|
||||
Some(max_size) => self.curr_size >= max_size,
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -264,10 +264,9 @@ impl<'a> InferenceContext<'a> {
|
|||
|
||||
// collect explicitly written argument types
|
||||
for arg_type in arg_types.iter() {
|
||||
let arg_ty = if let Some(type_ref) = arg_type {
|
||||
self.make_ty(type_ref)
|
||||
} else {
|
||||
self.table.new_type_var()
|
||||
let arg_ty = match arg_type {
|
||||
Some(type_ref) => self.make_ty(type_ref),
|
||||
None => self.table.new_type_var(),
|
||||
};
|
||||
sig_tys.push(arg_ty);
|
||||
}
|
||||
|
|
|
@ -204,10 +204,9 @@ impl<'a> InferenceContext<'a> {
|
|||
} else {
|
||||
BindingMode::convert(*mode)
|
||||
};
|
||||
let inner_ty = if let Some(subpat) = subpat {
|
||||
self.infer_pat(*subpat, &expected, default_bm)
|
||||
} else {
|
||||
expected
|
||||
let inner_ty = match subpat {
|
||||
Some(subpat) => self.infer_pat(*subpat, &expected, default_bm),
|
||||
None => expected,
|
||||
};
|
||||
let inner_ty = self.insert_type_vars_shallow(inner_ty);
|
||||
|
||||
|
|
|
@ -324,10 +324,9 @@ impl<'a> InferenceTable<'a> {
|
|||
|
||||
/// Unify two types and register new trait goals that arise from that.
|
||||
pub(crate) fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
|
||||
let result = if let Ok(r) = self.try_unify(ty1, ty2) {
|
||||
r
|
||||
} else {
|
||||
return false;
|
||||
let result = match self.try_unify(ty1, ty2) {
|
||||
Ok(r) => r,
|
||||
Err(_) => return false,
|
||||
};
|
||||
self.register_infer_ok(result);
|
||||
true
|
||||
|
|
|
@ -368,10 +368,9 @@ impl<'a> TyLoweringContext<'a> {
|
|||
Some((it, None)) => it,
|
||||
_ => return None,
|
||||
};
|
||||
if let TypeNs::GenericParam(param_id) = resolution {
|
||||
Some(param_id)
|
||||
} else {
|
||||
None
|
||||
match resolution {
|
||||
TypeNs::GenericParam(param_id) => Some(param_id),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -82,10 +82,9 @@ impl TyFingerprint {
|
|||
TyKind::Ref(_, _, ty) => return TyFingerprint::for_trait_impl(ty),
|
||||
TyKind::Tuple(_, subst) => {
|
||||
let first_ty = subst.interned().get(0).map(|arg| arg.assert_ty_ref(&Interner));
|
||||
if let Some(ty) = first_ty {
|
||||
return TyFingerprint::for_trait_impl(ty);
|
||||
} else {
|
||||
TyFingerprint::Unit
|
||||
match first_ty {
|
||||
Some(ty) => return TyFingerprint::for_trait_impl(ty),
|
||||
None => TyFingerprint::Unit,
|
||||
}
|
||||
}
|
||||
TyKind::AssociatedType(_, _)
|
||||
|
|
|
@ -195,10 +195,9 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
|
|||
mismatch.expected.display_test(&db),
|
||||
mismatch.actual.display_test(&db)
|
||||
);
|
||||
if let Some(annotation) = mismatches.remove(&range) {
|
||||
assert_eq!(actual, annotation);
|
||||
} else {
|
||||
format_to!(unexpected_type_mismatches, "{:?}: {}\n", range.range, actual);
|
||||
match mismatches.remove(&range) {
|
||||
Some(annotation) => assert_eq!(actual, annotation),
|
||||
None => format_to!(unexpected_type_mismatches, "{:?}: {}\n", range.range, actual),
|
||||
}
|
||||
}
|
||||
for (expr, mismatch) in inference_result.expr_type_mismatches() {
|
||||
|
@ -215,10 +214,9 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
|
|||
mismatch.expected.display_test(&db),
|
||||
mismatch.actual.display_test(&db)
|
||||
);
|
||||
if let Some(annotation) = mismatches.remove(&range) {
|
||||
assert_eq!(actual, annotation);
|
||||
} else {
|
||||
format_to!(unexpected_type_mismatches, "{:?}: {}\n", range.range, actual);
|
||||
match mismatches.remove(&range) {
|
||||
Some(annotation) => assert_eq!(actual, annotation),
|
||||
None => format_to!(unexpected_type_mismatches, "{:?}: {}\n", range.range, actual),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -292,10 +292,9 @@ impl TryToNav for hir::Impl {
|
|||
fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
|
||||
let src = self.source(db)?;
|
||||
let derive_attr = self.is_builtin_derive(db);
|
||||
let frange = if let Some(item) = &derive_attr {
|
||||
item.syntax().original_file_range(db)
|
||||
} else {
|
||||
src.syntax().original_file_range(db)
|
||||
let frange = match &derive_attr {
|
||||
Some(item) => item.syntax().original_file_range(db),
|
||||
None => src.syntax().original_file_range(db),
|
||||
};
|
||||
let focus_range = if derive_attr.is_some() {
|
||||
None
|
||||
|
|
|
@ -136,10 +136,9 @@ fn remove_newline(
|
|||
}
|
||||
T!['}'] => {
|
||||
// Removes: comma, newline (incl. surrounding whitespace)
|
||||
let space = if let Some(left) = prev.prev_sibling_or_token() {
|
||||
compute_ws(left.kind(), next.kind())
|
||||
} else {
|
||||
" "
|
||||
let space = match prev.prev_sibling_or_token() {
|
||||
Some(left) => compute_ws(left.kind(), next.kind()),
|
||||
None => " ",
|
||||
};
|
||||
edit.replace(
|
||||
TextRange::new(prev.text_range().start(), token.text_range().end()),
|
||||
|
|
|
@ -103,10 +103,9 @@ impl StaticIndex<'_> {
|
|||
for token in tokens {
|
||||
let range = token.text_range();
|
||||
let node = token.parent().unwrap();
|
||||
let def = if let Some(x) = get_definition(&sema, token.clone()) {
|
||||
x
|
||||
} else {
|
||||
continue;
|
||||
let def = match get_definition(&sema, token.clone()) {
|
||||
Some(x) => x,
|
||||
None => continue,
|
||||
};
|
||||
let id = if let Some(x) = self.def_map.get(&def) {
|
||||
*x
|
||||
|
@ -124,10 +123,9 @@ impl StaticIndex<'_> {
|
|||
let token = self.tokens.get_mut(id).unwrap();
|
||||
token.references.push(ReferenceData {
|
||||
range: FileRange { range, file_id },
|
||||
is_definition: if let Some(x) = def.try_to_nav(self.db) {
|
||||
x.file_id == file_id && x.focus_or_full_range() == range
|
||||
} else {
|
||||
false
|
||||
is_definition: match def.try_to_nav(self.db) {
|
||||
Some(x) => x.file_id == file_id && x.focus_or_full_range() == range,
|
||||
None => false,
|
||||
},
|
||||
});
|
||||
result.tokens.push((range, id));
|
||||
|
|
|
@ -827,10 +827,9 @@ impl FunctionBody {
|
|||
locals
|
||||
.map(|local| (local, local.source(ctx.db())))
|
||||
.filter(|(_, src)| is_defined_outside_of_body(ctx, self, src))
|
||||
.filter_map(|(local, src)| {
|
||||
if let Either::Left(src) = src.value {
|
||||
Some((local, src))
|
||||
} else {
|
||||
.filter_map(|(local, src)| match src.value {
|
||||
Either::Left(src) => Some((local, src)),
|
||||
Either::Right(_) => {
|
||||
stdx::never!(false, "Local::is_self returned false, but source is SelfParam");
|
||||
None
|
||||
}
|
||||
|
|
|
@ -69,10 +69,11 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext) -> Option
|
|||
None => to_extract.syntax().text_range(),
|
||||
};
|
||||
|
||||
if let Anchor::WrapInBlock(_) = anchor {
|
||||
format_to!(buf, "{{ let {} = ", var_name);
|
||||
} else {
|
||||
format_to!(buf, "let {} = ", var_name);
|
||||
match anchor {
|
||||
Anchor::Before(_) | Anchor::Replace(_) => {
|
||||
format_to!(buf, "let {} = ", var_name)
|
||||
}
|
||||
Anchor::WrapInBlock(_) => format_to!(buf, "{{ let {} = ", var_name),
|
||||
};
|
||||
format_to!(buf, "{}", to_extract.syntax());
|
||||
|
||||
|
|
|
@ -213,10 +213,9 @@ impl FunctionTemplate {
|
|||
Some(cap) => {
|
||||
let cursor = if self.should_focus_return_type {
|
||||
// Focus the return type if there is one
|
||||
if let Some(ref ret_type) = self.ret_type {
|
||||
ret_type.syntax()
|
||||
} else {
|
||||
self.tail_expr.syntax()
|
||||
match self.ret_type {
|
||||
Some(ref ret_type) => ret_type.syntax(),
|
||||
None => self.tail_expr.syntax(),
|
||||
}
|
||||
} else {
|
||||
self.tail_expr.syntax()
|
||||
|
@ -447,10 +446,9 @@ fn fn_args(
|
|||
arg_types.push(match fn_arg_type(ctx, target_module, &arg) {
|
||||
Some(ty) => {
|
||||
if !ty.is_empty() && ty.starts_with('&') {
|
||||
if let Some((new_ty, _)) = useless_type_special_case("", &ty[1..].to_owned()) {
|
||||
new_ty
|
||||
} else {
|
||||
ty
|
||||
match useless_type_special_case("", &ty[1..].to_owned()) {
|
||||
Some((new_ty, _)) => new_ty,
|
||||
None => ty,
|
||||
}
|
||||
} else {
|
||||
ty
|
||||
|
@ -575,20 +573,14 @@ fn next_space_for_fn_in_module(
|
|||
) -> Option<(FileId, GeneratedFunctionTarget)> {
|
||||
let file = module_source.file_id.original_file(db);
|
||||
let assist_item = match &module_source.value {
|
||||
hir::ModuleSource::SourceFile(it) => {
|
||||
if let Some(last_item) = it.items().last() {
|
||||
GeneratedFunctionTarget::BehindItem(last_item.syntax().clone())
|
||||
} else {
|
||||
GeneratedFunctionTarget::BehindItem(it.syntax().clone())
|
||||
}
|
||||
}
|
||||
hir::ModuleSource::Module(it) => {
|
||||
if let Some(last_item) = it.item_list().and_then(|it| it.items().last()) {
|
||||
GeneratedFunctionTarget::BehindItem(last_item.syntax().clone())
|
||||
} else {
|
||||
GeneratedFunctionTarget::InEmptyItemList(it.item_list()?.syntax().clone())
|
||||
}
|
||||
}
|
||||
hir::ModuleSource::SourceFile(it) => match it.items().last() {
|
||||
Some(last_item) => GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()),
|
||||
None => GeneratedFunctionTarget::BehindItem(it.syntax().clone()),
|
||||
},
|
||||
hir::ModuleSource::Module(it) => match it.item_list().and_then(|it| it.items().last()) {
|
||||
Some(last_item) => GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()),
|
||||
None => GeneratedFunctionTarget::InEmptyItemList(it.item_list()?.syntax().clone()),
|
||||
},
|
||||
hir::ModuleSource::BlockExpr(it) => {
|
||||
if let Some(last_item) =
|
||||
it.statements().take_while(|stmt| matches!(stmt, ast::Stmt::Item(_))).last()
|
||||
|
|
|
@ -141,10 +141,9 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext) -> Opt
|
|||
for (file_id, refs) in usages.into_iter() {
|
||||
inline_refs_for_file(file_id, refs);
|
||||
}
|
||||
if let Some(refs) = current_file_usage {
|
||||
inline_refs_for_file(def_file, refs);
|
||||
} else {
|
||||
builder.edit_file(def_file);
|
||||
match current_file_usage {
|
||||
Some(refs) => inline_refs_for_file(def_file, refs),
|
||||
None => builder.edit_file(def_file),
|
||||
}
|
||||
if remove_def {
|
||||
builder.delete(ast_func.syntax().text_range());
|
||||
|
|
|
@ -127,12 +127,9 @@ impl<'a> AssignmentsCollector<'a> {
|
|||
}
|
||||
}
|
||||
fn collect_block(&mut self, block: &ast::BlockExpr) -> Option<()> {
|
||||
let last_expr = block.tail_expr().or_else(|| {
|
||||
if let ast::Stmt::ExprStmt(stmt) = block.statements().last()? {
|
||||
stmt.expr()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
let last_expr = block.tail_expr().or_else(|| match block.statements().last()? {
|
||||
ast::Stmt::ExprStmt(stmt) => stmt.expr(),
|
||||
ast::Stmt::Item(_) | ast::Stmt::LetStmt(_) => None,
|
||||
})?;
|
||||
|
||||
if let ast::Expr::BinExpr(expr) = last_expr {
|
||||
|
|
|
@ -181,10 +181,9 @@ fn find_trait_method(
|
|||
fn item_as_trait(db: &RootDatabase, item: hir::ItemInNs) -> Option<hir::Trait> {
|
||||
let item_module_def = item.as_module_def()?;
|
||||
|
||||
if let hir::ModuleDef::Trait(trait_) = item_module_def {
|
||||
Some(trait_)
|
||||
} else {
|
||||
item_module_def.as_assoc_item(db)?.containing_trait(db)
|
||||
match item_module_def {
|
||||
hir::ModuleDef::Trait(trait_) => Some(trait_),
|
||||
_ => item_module_def.as_assoc_item(db)?.containing_trait(db),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -250,13 +250,10 @@ fn invert_special_case(expr: &ast::Expr) -> Option<ast::Expr> {
|
|||
};
|
||||
Some(make::expr_method_call(receiver, make::name_ref(method), arg_list))
|
||||
}
|
||||
ast::Expr::PrefixExpr(pe) if pe.op_kind()? == ast::UnaryOp::Not => {
|
||||
if let ast::Expr::ParenExpr(parexpr) = pe.expr()? {
|
||||
parexpr.expr()
|
||||
} else {
|
||||
pe.expr()
|
||||
}
|
||||
}
|
||||
ast::Expr::PrefixExpr(pe) if pe.op_kind()? == ast::UnaryOp::Not => match pe.expr()? {
|
||||
ast::Expr::ParenExpr(parexpr) => parexpr.expr(),
|
||||
_ => pe.expr(),
|
||||
},
|
||||
ast::Expr::Literal(lit) => match lit.kind() {
|
||||
ast::LiteralKind::Bool(b) => match b {
|
||||
true => Some(ast::Expr::Literal(make::expr_literal("false"))),
|
||||
|
@ -276,13 +273,10 @@ pub(crate) fn does_pat_match_variant(pat: &ast::Pat, var: &ast::Pat) -> bool {
|
|||
let first_node_text = |pat: &ast::Pat| pat.syntax().first_child().map(|node| node.text());
|
||||
|
||||
let pat_head = match pat {
|
||||
ast::Pat::IdentPat(bind_pat) => {
|
||||
if let Some(p) = bind_pat.pat() {
|
||||
first_node_text(&p)
|
||||
} else {
|
||||
return pat.syntax().text() == var.syntax().text();
|
||||
}
|
||||
}
|
||||
ast::Pat::IdentPat(bind_pat) => match bind_pat.pat() {
|
||||
Some(p) => first_node_text(&p),
|
||||
None => return pat.syntax().text() == var.syntax().text(),
|
||||
},
|
||||
pat => first_node_text(pat),
|
||||
};
|
||||
|
||||
|
|
|
@ -144,10 +144,9 @@ fn is_valid_name(name: &str) -> bool {
|
|||
fn is_useless_method(method: &ast::MethodCallExpr) -> bool {
|
||||
let ident = method.name_ref().and_then(|it| it.ident_token());
|
||||
|
||||
if let Some(ident) = ident {
|
||||
USELESS_METHODS.contains(&ident.text())
|
||||
} else {
|
||||
false
|
||||
match ident {
|
||||
Some(ident) => USELESS_METHODS.contains(&ident.text()),
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -509,10 +509,9 @@ impl<'a> CompletionContext<'a> {
|
|||
.and_then(|pat| self.sema.type_of_pat(&pat))
|
||||
.or_else(|| it.initializer().and_then(|it| self.sema.type_of_expr(&it)))
|
||||
.map(TypeInfo::original);
|
||||
let name = if let Some(ast::Pat::IdentPat(ident)) = it.pat() {
|
||||
ident.name().map(NameOrNameRef::Name)
|
||||
} else {
|
||||
None
|
||||
let name = match it.pat() {
|
||||
Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name),
|
||||
Some(_) | None => None,
|
||||
};
|
||||
|
||||
(ty, name)
|
||||
|
|
|
@ -74,10 +74,9 @@ impl<'a> FunctionRender<'a> {
|
|||
|
||||
fn render(self, import_to_add: Option<ImportEdit>) -> CompletionItem {
|
||||
let params = self.params();
|
||||
let call = if let Some(receiver) = &self.receiver {
|
||||
format!("{}.{}", receiver, &self.name)
|
||||
} else {
|
||||
self.name.clone()
|
||||
let call = match &self.receiver {
|
||||
Some(receiver) => format!("{}.{}", receiver, &self.name),
|
||||
None => self.name.clone(),
|
||||
};
|
||||
let mut item =
|
||||
CompletionItem::new(CompletionKind::Reference, self.ctx.source_range(), call.clone());
|
||||
|
|
|
@ -63,10 +63,9 @@ fn build_completion(
|
|||
.set_documentation(ctx.docs(def))
|
||||
.set_deprecated(ctx.is_deprecated(def))
|
||||
.detail(&pat);
|
||||
if let Some(snippet_cap) = ctx.snippet_cap() {
|
||||
item.insert_snippet(snippet_cap, pat);
|
||||
} else {
|
||||
item.insert_text(pat);
|
||||
match ctx.snippet_cap() {
|
||||
Some(snippet_cap) => item.insert_snippet(snippet_cap, pat),
|
||||
None => item.insert_text(pat),
|
||||
};
|
||||
item.build()
|
||||
}
|
||||
|
|
|
@ -38,10 +38,9 @@ fn build_completion(
|
|||
.set_documentation(ctx.docs(def))
|
||||
.set_deprecated(ctx.is_deprecated(def))
|
||||
.detail(&literal);
|
||||
if let Some(snippet_cap) = ctx.snippet_cap() {
|
||||
item.insert_snippet(snippet_cap, literal);
|
||||
} else {
|
||||
item.insert_text(literal);
|
||||
match ctx.snippet_cap() {
|
||||
Some(snippet_cap) => item.insert_snippet(snippet_cap, literal),
|
||||
None => item.insert_text(literal),
|
||||
};
|
||||
item.build()
|
||||
}
|
||||
|
|
|
@ -47,10 +47,9 @@ impl ResolvedRule {
|
|||
) -> Result<ResolvedRule, SsrError> {
|
||||
let resolver =
|
||||
Resolver { resolution_scope, placeholders_by_stand_in: rule.placeholders_by_stand_in };
|
||||
let resolved_template = if let Some(template) = rule.template {
|
||||
Some(resolver.resolve_pattern_tree(template)?)
|
||||
} else {
|
||||
None
|
||||
let resolved_template = match rule.template {
|
||||
Some(template) => Some(resolver.resolve_pattern_tree(template)?),
|
||||
None => None,
|
||||
};
|
||||
Ok(ResolvedRule {
|
||||
pattern: resolver.resolve_pattern_tree(rule.pattern)?,
|
||||
|
|
|
@ -497,10 +497,9 @@ impl server::Literal for Rustc {
|
|||
}
|
||||
|
||||
fn integer(&mut self, n: &str) -> Self::Literal {
|
||||
let n = if let Ok(n) = n.parse::<i128>() {
|
||||
n.to_string()
|
||||
} else {
|
||||
n.parse::<u128>().unwrap().to_string()
|
||||
let n = match n.parse::<i128>() {
|
||||
Ok(n) => n.to_string(),
|
||||
Err(_) => n.parse::<u128>().unwrap().to_string(),
|
||||
};
|
||||
Literal { text: n.into(), id: tt::TokenId::unspecified() }
|
||||
}
|
||||
|
|
|
@ -500,10 +500,9 @@ impl server::Literal for Rustc {
|
|||
}
|
||||
|
||||
fn integer(&mut self, n: &str) -> Self::Literal {
|
||||
let n = if let Ok(n) = n.parse::<i128>() {
|
||||
n.to_string()
|
||||
} else {
|
||||
n.parse::<u128>().unwrap().to_string()
|
||||
let n = match n.parse::<i128>() {
|
||||
Ok(n) => n.to_string(),
|
||||
Err(_) => n.parse::<u128>().unwrap().to_string(),
|
||||
};
|
||||
Literal { text: n.into(), id: tt::TokenId::unspecified() }
|
||||
}
|
||||
|
|
|
@ -504,10 +504,9 @@ impl server::Literal for Rustc {
|
|||
}
|
||||
|
||||
fn integer(&mut self, n: &str) -> Self::Literal {
|
||||
let n = if let Ok(n) = n.parse::<i128>() {
|
||||
n.to_string()
|
||||
} else {
|
||||
n.parse::<u128>().unwrap().to_string()
|
||||
let n = match n.parse::<i128>() {
|
||||
Ok(n) => n.to_string(),
|
||||
Err(_) => n.parse::<u128>().unwrap().to_string(),
|
||||
};
|
||||
Literal { text: n.into(), id: tt::TokenId::unspecified() }
|
||||
}
|
||||
|
|
|
@ -427,10 +427,9 @@ pub(crate) fn handle_workspace_symbol(
|
|||
// If no explicit marker was set, check request params. If that's also empty
|
||||
// use global config.
|
||||
if !all_symbols {
|
||||
let search_kind = if let Some(ref search_kind) = params.search_kind {
|
||||
search_kind
|
||||
} else {
|
||||
&config.search_kind
|
||||
let search_kind = match params.search_kind {
|
||||
Some(ref search_kind) => search_kind,
|
||||
None => &config.search_kind,
|
||||
};
|
||||
all_symbols = match search_kind {
|
||||
lsp_ext::WorkspaceSymbolSearchKind::OnlyTypes => false,
|
||||
|
@ -439,10 +438,9 @@ pub(crate) fn handle_workspace_symbol(
|
|||
}
|
||||
|
||||
if !libs {
|
||||
let search_scope = if let Some(ref search_scope) = params.search_scope {
|
||||
search_scope
|
||||
} else {
|
||||
&config.search_scope
|
||||
let search_scope = match params.search_scope {
|
||||
Some(ref search_scope) => search_scope,
|
||||
None => &config.search_scope,
|
||||
};
|
||||
libs = match search_scope {
|
||||
lsp_ext::WorkspaceSymbolSearchScope::Workspace => false,
|
||||
|
|
|
@ -60,10 +60,9 @@ impl GenericParamsOwnerEdit for ast::Impl {
|
|||
match self.generic_param_list() {
|
||||
Some(it) => it,
|
||||
None => {
|
||||
let position = if let Some(imp_token) = self.impl_token() {
|
||||
Position::after(imp_token)
|
||||
} else {
|
||||
Position::last_child_of(self.syntax())
|
||||
let position = match self.impl_token() {
|
||||
Some(imp_token) => Position::after(imp_token),
|
||||
None => Position::last_child_of(self.syntax()),
|
||||
};
|
||||
create_generic_param_list(position)
|
||||
}
|
||||
|
@ -72,10 +71,9 @@ impl GenericParamsOwnerEdit for ast::Impl {
|
|||
|
||||
fn get_or_create_where_clause(&self) -> ast::WhereClause {
|
||||
if self.where_clause().is_none() {
|
||||
let position = if let Some(items) = self.assoc_item_list() {
|
||||
Position::before(items.syntax())
|
||||
} else {
|
||||
Position::last_child_of(self.syntax())
|
||||
let position = match self.assoc_item_list() {
|
||||
Some(items) => Position::before(items.syntax()),
|
||||
None => Position::last_child_of(self.syntax()),
|
||||
};
|
||||
create_where_clause(position);
|
||||
}
|
||||
|
@ -102,10 +100,9 @@ impl GenericParamsOwnerEdit for ast::Trait {
|
|||
|
||||
fn get_or_create_where_clause(&self) -> ast::WhereClause {
|
||||
if self.where_clause().is_none() {
|
||||
let position = if let Some(items) = self.assoc_item_list() {
|
||||
Position::before(items.syntax())
|
||||
} else {
|
||||
Position::last_child_of(self.syntax())
|
||||
let position = match self.assoc_item_list() {
|
||||
Some(items) => Position::before(items.syntax()),
|
||||
None => Position::last_child_of(self.syntax()),
|
||||
};
|
||||
create_where_clause(position);
|
||||
}
|
||||
|
@ -253,12 +250,9 @@ impl ast::WhereClause {
|
|||
|
||||
impl ast::TypeBoundList {
|
||||
pub fn remove(&self) {
|
||||
if let Some(colon) =
|
||||
self.syntax().siblings_with_tokens(Direction::Prev).find(|it| it.kind() == T![:])
|
||||
{
|
||||
ted::remove_all(colon..=self.syntax().clone().into())
|
||||
} else {
|
||||
ted::remove(self.syntax())
|
||||
match self.syntax().siblings_with_tokens(Direction::Prev).find(|it| it.kind() == T![:]) {
|
||||
Some(colon) => ted::remove_all(colon..=self.syntax().clone().into()),
|
||||
None => ted::remove(self.syntax()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -641,9 +641,14 @@ pub fn fn_(
|
|||
ret_type: Option<ast::RetType>,
|
||||
is_async: bool,
|
||||
) -> ast::Fn {
|
||||
let type_params =
|
||||
if let Some(type_params) = type_params { format!("<{}>", type_params) } else { "".into() };
|
||||
let ret_type = if let Some(ret_type) = ret_type { format!("{} ", ret_type) } else { "".into() };
|
||||
let type_params = match type_params {
|
||||
Some(type_params) => format!("<{}>", type_params),
|
||||
None => "".into(),
|
||||
};
|
||||
let ret_type = match ret_type {
|
||||
Some(ret_type) => format!("{} ", ret_type),
|
||||
None => "".into(),
|
||||
};
|
||||
let visibility = match visibility {
|
||||
None => String::new(),
|
||||
Some(it) => format!("{} ", it),
|
||||
|
|
|
@ -549,10 +549,9 @@ impl ast::FieldExpr {
|
|||
}
|
||||
|
||||
pub fn field_access(&self) -> Option<FieldKind> {
|
||||
if let Some(nr) = self.name_ref() {
|
||||
Some(FieldKind::Name(nr))
|
||||
} else {
|
||||
self.index_token().map(FieldKind::Index)
|
||||
match self.name_ref() {
|
||||
Some(nr) => Some(FieldKind::Name(nr)),
|
||||
None => self.index_token().map(FieldKind::Index),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -283,10 +283,9 @@ pub trait HasFormatSpecifier: AstToken {
|
|||
where
|
||||
F: FnMut(TextRange, FormatSpecifier),
|
||||
{
|
||||
let char_ranges = if let Some(char_ranges) = self.char_ranges() {
|
||||
char_ranges
|
||||
} else {
|
||||
return;
|
||||
let char_ranges = match self.char_ranges() {
|
||||
Some(char_ranges) => char_ranges,
|
||||
None => return,
|
||||
};
|
||||
let mut chars = char_ranges.iter().peekable();
|
||||
|
||||
|
@ -528,10 +527,11 @@ pub trait HasFormatSpecifier: AstToken {
|
|||
}
|
||||
}
|
||||
|
||||
if let Some((_, Ok('}'))) = chars.peek() {
|
||||
match chars.peek() {
|
||||
Some((_, Ok('}'))) => {
|
||||
skip_char_and_emit(&mut chars, FormatSpecifier::Close, &mut callback);
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
Some((_, _)) | None => continue,
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
|
|
|
@ -227,12 +227,9 @@ where
|
|||
T: crate::AstNode,
|
||||
F: Fn(&str) -> Result<T, ()>,
|
||||
{
|
||||
dir_tests(&test_data_dir(), ok_paths, "rast", |text, path| {
|
||||
if let Ok(node) = f(text) {
|
||||
format!("{:#?}", crate::ast::AstNode::syntax(&node))
|
||||
} else {
|
||||
panic!("Failed to parse '{:?}'", path);
|
||||
}
|
||||
dir_tests(&test_data_dir(), ok_paths, "rast", |text, path| match f(text) {
|
||||
Ok(node) => format!("{:#?}", crate::ast::AstNode::syntax(&node)),
|
||||
Err(_) => panic!("Failed to parse '{:?}'", path),
|
||||
});
|
||||
dir_tests(&test_data_dir(), err_paths, "rast", |text, path| {
|
||||
if f(text).is_ok() {
|
||||
|
|
|
@ -205,10 +205,9 @@ impl<'a> Cursor<'a> {
|
|||
/// Bump the cursor
|
||||
pub fn bump(self) -> Cursor<'a> {
|
||||
if let Some(Entry::End(exit)) = self.buffer.entry(&self.ptr) {
|
||||
if let Some(exit) = exit {
|
||||
Cursor::create(self.buffer, *exit)
|
||||
} else {
|
||||
self
|
||||
match exit {
|
||||
Some(exit) => Cursor::create(self.buffer, *exit),
|
||||
None => self,
|
||||
}
|
||||
} else {
|
||||
Cursor::create(self.buffer, EntryPtr(self.ptr.0, self.ptr.1 + 1))
|
||||
|
|
Loading…
Reference in a new issue