Auto merge of #13860 - danieleades:clippy, r=lnicola

fix a bunch of clippy lints

fixes a bunch of clippy lints for fun and profit

i'm aware of this repo's position on clippy. The changes are split into separate commits so they can be reviewed separately
This commit is contained in:
bors 2023-01-08 17:29:57 +00:00
commit f77b68a3cb
120 changed files with 298 additions and 375 deletions

View file

@ -408,8 +408,7 @@ impl CargoHandle {
Ok(()) Ok(())
} else { } else {
Err(io::Error::new(io::ErrorKind::Other, format!( Err(io::Error::new(io::ErrorKind::Other, format!(
"Cargo watcher failed, the command produced no valid metadata (exit code: {:?}):\n{}", "Cargo watcher failed, the command produced no valid metadata (exit code: {exit_status:?}):\n{error}"
exit_status, error
))) )))
} }
} }

View file

@ -234,8 +234,7 @@ impl TraitData {
let item_tree = tree_id.item_tree(db); let item_tree = tree_id.item_tree(db);
let tr_def = &item_tree[tree_id.value]; let tr_def = &item_tree[tree_id.value];
let _cx = stdx::panic_context::enter(format!( let _cx = stdx::panic_context::enter(format!(
"trait_data_query({:?} -> {:?} -> {:?})", "trait_data_query({tr:?} -> {tr_loc:?} -> {tr_def:?})"
tr, tr_loc, tr_def
)); ));
let name = tr_def.name.clone(); let name = tr_def.name.clone();
let is_auto = tr_def.is_auto; let is_auto = tr_def.is_auto;
@ -543,7 +542,7 @@ impl<'a> AssocItemCollector<'a> {
if !attrs.is_cfg_enabled(self.expander.cfg_options()) { if !attrs.is_cfg_enabled(self.expander.cfg_options()) {
self.inactive_diagnostics.push(DefDiagnostic::unconfigured_code( self.inactive_diagnostics.push(DefDiagnostic::unconfigured_code(
self.module_id.local_id, self.module_id.local_id,
InFile::new(self.expander.current_file_id(), item.ast_id(&item_tree).upcast()), InFile::new(self.expander.current_file_id(), item.ast_id(item_tree).upcast()),
attrs.cfg().unwrap(), attrs.cfg().unwrap(),
self.expander.cfg_options().clone(), self.expander.cfg_options().clone(),
)); ));
@ -552,7 +551,7 @@ impl<'a> AssocItemCollector<'a> {
'attrs: for attr in &*attrs { 'attrs: for attr in &*attrs {
let ast_id = let ast_id =
AstId::new(self.expander.current_file_id(), item.ast_id(&item_tree).upcast()); AstId::new(self.expander.current_file_id(), item.ast_id(item_tree).upcast());
let ast_id_with_path = AstIdWithPath { path: (*attr.path).clone(), ast_id }; let ast_id_with_path = AstIdWithPath { path: (*attr.path).clone(), ast_id };
if let Ok(ResolvedAttr::Macro(call_id)) = self.def_map.resolve_attr_macro( if let Ok(ResolvedAttr::Macro(call_id)) = self.def_map.resolve_attr_macro(
@ -619,10 +618,8 @@ impl<'a> AssocItemCollector<'a> {
let ast_id_map = self.db.ast_id_map(self.expander.current_file_id()); let ast_id_map = self.db.ast_id_map(self.expander.current_file_id());
let call = ast_id_map.get(call.ast_id).to_node(&root); let call = ast_id_map.get(call.ast_id).to_node(&root);
let _cx = stdx::panic_context::enter(format!( let _cx =
"collect_items MacroCall: {}", stdx::panic_context::enter(format!("collect_items MacroCall: {call}"));
call
));
let res = self.expander.enter_expand::<ast::MacroItems>(self.db, call); let res = self.expander.enter_expand::<ast::MacroItems>(self.db, call);
if let Ok(ExpandResult { value: Some((mark, _)), .. }) = res { if let Ok(ExpandResult { value: Some((mark, _)), .. }) = res {

View file

@ -176,7 +176,7 @@ fn find_path_for_module(
// - if relative paths are fine, check if we are searching for a parent // - if relative paths are fine, check if we are searching for a parent
if prefixed.filter(PrefixKind::is_absolute).is_none() { if prefixed.filter(PrefixKind::is_absolute).is_none() {
if let modpath @ Some(_) = find_self_super(&def_map, module_id, from) { if let modpath @ Some(_) = find_self_super(def_map, module_id, from) {
return modpath; return modpath;
} }
} }

View file

@ -142,8 +142,8 @@ pub enum WherePredicateTypeTarget {
impl GenericParams { impl GenericParams {
/// Iterator of type_or_consts field /// Iterator of type_or_consts field
pub fn iter<'a>( pub fn iter(
&'a self, &self,
) -> impl DoubleEndedIterator<Item = (Idx<TypeOrConstParamData>, &TypeOrConstParamData)> { ) -> impl DoubleEndedIterator<Item = (Idx<TypeOrConstParamData>, &TypeOrConstParamData)> {
self.type_or_consts.iter() self.type_or_consts.iter()
} }

View file

@ -393,8 +393,8 @@ impl Query {
/// Searches dependencies of `krate` for an importable path matching `query`. /// Searches dependencies of `krate` for an importable path matching `query`.
/// ///
/// This returns a list of items that could be imported from dependencies of `krate`. /// This returns a list of items that could be imported from dependencies of `krate`.
pub fn search_dependencies<'a>( pub fn search_dependencies(
db: &'a dyn DefDatabase, db: &dyn DefDatabase,
krate: CrateId, krate: CrateId,
query: Query, query: Query,
) -> FxHashSet<ItemInNs> { ) -> FxHashSet<ItemInNs> {

View file

@ -96,7 +96,7 @@ pub(crate) enum BuiltinShadowMode {
/// Legacy macros can only be accessed through special methods like `get_legacy_macros`. /// Legacy macros can only be accessed through special methods like `get_legacy_macros`.
/// Other methods will only resolve values, types and module scoped macros only. /// Other methods will only resolve values, types and module scoped macros only.
impl ItemScope { impl ItemScope {
pub fn entries<'a>(&'a self) -> impl Iterator<Item = (&'a Name, PerNs)> + 'a { pub fn entries(&self) -> impl Iterator<Item = (&Name, PerNs)> + '_ {
// FIXME: shadowing // FIXME: shadowing
self.types self.types
.keys() .keys()
@ -159,18 +159,17 @@ impl ItemScope {
pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility)> { pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility)> {
let (def, mut iter) = match item { let (def, mut iter) = match item {
ItemInNs::Macros(def) => { ItemInNs::Macros(def) => {
return self return self.macros.iter().find_map(|(name, &(other_def, vis))| {
.macros (other_def == def).then_some((name, vis))
.iter() });
.find_map(|(name, &(other_def, vis))| (other_def == def).then(|| (name, vis)));
} }
ItemInNs::Types(def) => (def, self.types.iter()), ItemInNs::Types(def) => (def, self.types.iter()),
ItemInNs::Values(def) => (def, self.values.iter()), ItemInNs::Values(def) => (def, self.values.iter()),
}; };
iter.find_map(|(name, &(other_def, vis))| (other_def == def).then(|| (name, vis))) iter.find_map(|(name, &(other_def, vis))| (other_def == def).then_some((name, vis)))
} }
pub(crate) fn traits<'a>(&'a self) -> impl Iterator<Item = TraitId> + 'a { pub(crate) fn traits(&self) -> impl Iterator<Item = TraitId> + '_ {
self.types self.types
.values() .values()
.filter_map(|&(def, _)| match def { .filter_map(|&(def, _)| match def {
@ -327,7 +326,7 @@ impl ItemScope {
changed changed
} }
pub(crate) fn resolutions<'a>(&'a self) -> impl Iterator<Item = (Option<Name>, PerNs)> + 'a { pub(crate) fn resolutions(&self) -> impl Iterator<Item = (Option<Name>, PerNs)> + '_ {
self.entries().map(|(name, res)| (Some(name.clone()), res)).chain( self.entries().map(|(name, res)| (Some(name.clone()), res)).chain(
self.unnamed_trait_imports self.unnamed_trait_imports
.iter() .iter()

View file

@ -170,7 +170,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
} }
let pp = pretty_print_macro_expansion( let pp = pretty_print_macro_expansion(
parse.syntax_node(), parse.syntax_node(),
show_token_ids.then(|| &*token_map), show_token_ids.then_some(&*token_map),
); );
let indent = IndentLevel::from_node(call.syntax()); let indent = IndentLevel::from_node(call.syntax());
let pp = reindent(indent, pp); let pp = reindent(indent, pp);

View file

@ -67,7 +67,7 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: DefMap, tree_id: T
let dep_def_map = db.crate_def_map(dep.crate_id); let dep_def_map = db.crate_def_map(dep.crate_id);
let dep_root = dep_def_map.module_id(dep_def_map.root); let dep_root = dep_def_map.module_id(dep_def_map.root);
deps.insert(dep.as_name(), dep_root.into()); deps.insert(dep.as_name(), dep_root);
if dep.is_prelude() && !tree_id.is_block() { if dep.is_prelude() && !tree_id.is_block() {
def_map.extern_prelude.insert(dep.as_name(), dep_root); def_map.extern_prelude.insert(dep.as_name(), dep_root);
@ -1094,7 +1094,7 @@ impl DefCollector<'_> {
ast_id, ast_id,
*expand_to, *expand_to,
self.def_map.krate, self.def_map.krate,
&resolver_def_id, resolver_def_id,
&mut |_err| (), &mut |_err| (),
); );
if let Ok(Ok(call_id)) = call_id { if let Ok(Ok(call_id)) = call_id {
@ -1110,7 +1110,7 @@ impl DefCollector<'_> {
*derive_attr, *derive_attr,
*derive_pos as u32, *derive_pos as u32,
self.def_map.krate, self.def_map.krate,
&resolver, resolver,
); );
if let Ok((macro_id, def_id, call_id)) = id { if let Ok((macro_id, def_id, call_id)) = id {
@ -2085,7 +2085,7 @@ impl ModCollector<'_, '_> {
.scope .scope
.get_legacy_macro(name) .get_legacy_macro(name)
.and_then(|it| it.last()) .and_then(|it| it.last())
.map(|&it| macro_id_to_def_id(self.def_collector.db, it.into())) .map(|&it| macro_id_to_def_id(self.def_collector.db, it))
}, },
) )
}) })

View file

@ -390,7 +390,7 @@ impl DefMap {
.get_legacy_macro(name) .get_legacy_macro(name)
// FIXME: shadowing // FIXME: shadowing
.and_then(|it| it.last()) .and_then(|it| it.last())
.map_or_else(PerNs::none, |&m| PerNs::macros(m.into(), Visibility::Public)); .map_or_else(PerNs::none, |&m| PerNs::macros(m, Visibility::Public));
let from_scope = self[module].scope.get(name); let from_scope = self[module].scope.get(name);
let from_builtin = match self.block { let from_builtin = match self.block {
Some(_) => { Some(_) => {

View file

@ -381,7 +381,7 @@ impl Resolver {
}); });
def_map[module_id].scope.legacy_macros().for_each(|(name, macs)| { def_map[module_id].scope.legacy_macros().for_each(|(name, macs)| {
macs.iter().for_each(|&mac| { macs.iter().for_each(|&mac| {
res.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(MacroId::from(mac)))); res.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac)));
}) })
}); });
def_map.extern_prelude().for_each(|(name, &def)| { def_map.extern_prelude().for_each(|(name, &def)| {
@ -517,10 +517,7 @@ impl Scope {
}); });
m.def_map[m.module_id].scope.legacy_macros().for_each(|(name, macs)| { m.def_map[m.module_id].scope.legacy_macros().for_each(|(name, macs)| {
macs.iter().for_each(|&mac| { macs.iter().for_each(|&mac| {
acc.add( acc.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac)));
name,
ScopeDef::ModuleDef(ModuleDefId::MacroId(MacroId::from(mac))),
);
}) })
}); });
} }

View file

@ -115,7 +115,8 @@ pub fn pseudo_derive_attr_expansion(
}; };
let mut token_trees = Vec::new(); let mut token_trees = Vec::new();
for tt in (&args.token_trees) for tt in args
.token_trees
.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. })))) .split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))))
{ {
token_trees.push(mk_leaf('#')); token_trees.push(mk_leaf('#'));

View file

@ -449,7 +449,7 @@ fn concat_bytes_expand(
match token.kind() { match token.kind() {
syntax::SyntaxKind::BYTE => bytes.push(token.text().to_string()), syntax::SyntaxKind::BYTE => bytes.push(token.text().to_string()),
syntax::SyntaxKind::BYTE_STRING => { syntax::SyntaxKind::BYTE_STRING => {
let components = unquote_byte_string(lit).unwrap_or_else(Vec::new); let components = unquote_byte_string(lit).unwrap_or_default();
components.into_iter().for_each(|x| bytes.push(x.to_string())); components.into_iter().for_each(|x| bytes.push(x.to_string()));
} }
_ => { _ => {

View file

@ -208,7 +208,7 @@ fn eager_macro_recur(
// Collect replacement // Collect replacement
for child in children { for child in children {
let def = match child.path().and_then(|path| ModPath::from_src(db, path, hygiene)) { let def = match child.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
Some(path) => macro_resolver(path.clone()).ok_or_else(|| UnresolvedMacro { path })?, Some(path) => macro_resolver(path.clone()).ok_or(UnresolvedMacro { path })?,
None => { None => {
diagnostic_sink(ExpandError::Other("malformed macro invocation".into())); diagnostic_sink(ExpandError::Other("malformed macro invocation".into()));
continue; continue;

View file

@ -62,7 +62,7 @@ impl<'a> UnescapedName<'a> {
it.clone() it.clone()
} }
} }
Repr::TupleField(it) => SmolStr::new(&it.to_string()), Repr::TupleField(it) => SmolStr::new(it.to_string()),
} }
} }
} }
@ -139,7 +139,7 @@ impl Name {
pub fn to_smol_str(&self) -> SmolStr { pub fn to_smol_str(&self) -> SmolStr {
match &self.0 { match &self.0 {
Repr::Text(it) => it.clone(), Repr::Text(it) => it.clone(),
Repr::TupleField(it) => SmolStr::new(&it.to_string()), Repr::TupleField(it) => SmolStr::new(it.to_string()),
} }
} }

View file

@ -82,11 +82,11 @@ pub(crate) fn autoderef_step(
} }
// FIXME: replace uses of this with Autoderef above // FIXME: replace uses of this with Autoderef above
pub fn autoderef<'a>( pub fn autoderef(
db: &'a dyn HirDatabase, db: &dyn HirDatabase,
env: Arc<TraitEnvironment>, env: Arc<TraitEnvironment>,
ty: Canonical<Ty>, ty: Canonical<Ty>,
) -> impl Iterator<Item = Canonical<Ty>> + 'a { ) -> impl Iterator<Item = Canonical<Ty>> + '_ {
let mut table = InferenceTable::new(db, env); let mut table = InferenceTable::new(db, env);
let ty = table.instantiate_canonical(ty); let ty = table.instantiate_canonical(ty);
let mut autoderef = Autoderef::new(&mut table, ty); let mut autoderef = Autoderef::new(&mut table, ty);

View file

@ -131,7 +131,7 @@ fn scalar_max(scalar: &Scalar) -> i128 {
IntTy::I16 => i16::MAX as i128, IntTy::I16 => i16::MAX as i128,
IntTy::I32 => i32::MAX as i128, IntTy::I32 => i32::MAX as i128,
IntTy::I64 => i64::MAX as i128, IntTy::I64 => i64::MAX as i128,
IntTy::I128 => i128::MAX as i128, IntTy::I128 => i128::MAX,
}, },
Scalar::Uint(x) => match x { Scalar::Uint(x) => match x {
chalk_ir::UintTy::Usize => usize::MAX as i128, chalk_ir::UintTy::Usize => usize::MAX as i128,
@ -139,7 +139,7 @@ fn scalar_max(scalar: &Scalar) -> i128 {
chalk_ir::UintTy::U16 => u16::MAX as i128, chalk_ir::UintTy::U16 => u16::MAX as i128,
chalk_ir::UintTy::U32 => u32::MAX as i128, chalk_ir::UintTy::U32 => u32::MAX as i128,
chalk_ir::UintTy::U64 => u64::MAX as i128, chalk_ir::UintTy::U64 => u64::MAX as i128,
chalk_ir::UintTy::U128 => i128::MAX as i128, // ignore too big u128 for now chalk_ir::UintTy::U128 => i128::MAX, // ignore too big u128 for now
}, },
Scalar::Float(_) => 0, Scalar::Float(_) => 0,
} }
@ -404,7 +404,7 @@ pub(crate) fn path_to_const(
args_lazy: impl FnOnce() -> Generics, args_lazy: impl FnOnce() -> Generics,
debruijn: DebruijnIndex, debruijn: DebruijnIndex,
) -> Option<Const> { ) -> Option<Const> {
match resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) { match resolver.resolve_path_in_value_ns_fully(db.upcast(), path) {
Some(ValueNs::GenericParam(p)) => { Some(ValueNs::GenericParam(p)) => {
let ty = db.const_param_ty(p); let ty = db.const_param_ty(p);
let args = args_lazy(); let args = args_lazy();
@ -511,10 +511,10 @@ pub(crate) fn const_eval_query_variant(
) )
} }
pub(crate) fn eval_to_const<'a>( pub(crate) fn eval_to_const(
expr: Idx<Expr>, expr: Idx<Expr>,
mode: ParamLoweringMode, mode: ParamLoweringMode,
ctx: &mut InferenceContext<'a>, ctx: &mut InferenceContext<'_>,
args: impl FnOnce() -> Generics, args: impl FnOnce() -> Generics,
debruijn: DebruijnIndex, debruijn: DebruijnIndex,
) -> Const { ) -> Const {

View file

@ -25,7 +25,6 @@ fn eval_goal(ra_fixture: &str) -> Result<ComputedExpr, ConstEvalError> {
let scope = &def_map[module_id.local_id].scope; let scope = &def_map[module_id.local_id].scope;
let const_id = scope let const_id = scope
.declarations() .declarations()
.into_iter()
.find_map(|x| match x { .find_map(|x| match x {
hir_def::ModuleDefId::ConstId(x) => { hir_def::ModuleDefId::ConstId(x) => {
if db.const_data(x).name.as_ref()?.to_string() == "GOAL" { if db.const_data(x).name.as_ref()?.to_string() == "GOAL" {

View file

@ -961,7 +961,7 @@ impl<'a> InferenceContext<'a> {
Expr::RecordLit { path, fields, .. } => { Expr::RecordLit { path, fields, .. } => {
let subs = fields.iter().map(|f| (f.name.clone(), f.expr)); let subs = fields.iter().map(|f| (f.name.clone(), f.expr));
self.infer_record_pat_like(path.as_deref(), &rhs_ty, (), lhs.into(), subs) self.infer_record_pat_like(path.as_deref(), &rhs_ty, (), lhs, subs)
} }
Expr::Underscore => rhs_ty.clone(), Expr::Underscore => rhs_ty.clone(),
_ => { _ => {
@ -1360,7 +1360,7 @@ impl<'a> InferenceContext<'a> {
ty, ty,
c, c,
ParamLoweringMode::Placeholder, ParamLoweringMode::Placeholder,
|| generics(this.db.upcast(), (&this.resolver).generic_def().unwrap()), || generics(this.db.upcast(), this.resolver.generic_def().unwrap()),
DebruijnIndex::INNERMOST, DebruijnIndex::INNERMOST,
) )
}, },

View file

@ -153,7 +153,7 @@ impl<'a> InferenceContext<'a> {
) -> Ty { ) -> Ty {
let mut expected = self.resolve_ty_shallow(expected); let mut expected = self.resolve_ty_shallow(expected);
if is_non_ref_pat(&self.body, pat) { if is_non_ref_pat(self.body, pat) {
let mut pat_adjustments = Vec::new(); let mut pat_adjustments = Vec::new();
while let Some((inner, _lifetime, mutability)) = expected.as_reference() { while let Some((inner, _lifetime, mutability)) = expected.as_reference() {
pat_adjustments.push(expected.clone()); pat_adjustments.push(expected.clone());
@ -220,7 +220,7 @@ impl<'a> InferenceContext<'a> {
), ),
Pat::Record { path: p, args: fields, ellipsis: _ } => { Pat::Record { path: p, args: fields, ellipsis: _ } => {
let subs = fields.iter().map(|f| (f.name.clone(), f.pat)); let subs = fields.iter().map(|f| (f.name.clone(), f.pat));
self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat.into(), subs) self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat, subs)
} }
Pat::Path(path) => { Pat::Path(path) => {
// FIXME use correct resolver for the surrounding expression // FIXME use correct resolver for the surrounding expression

View file

@ -228,7 +228,7 @@ impl chalk_ir::interner::Interner for Interner {
Interned::new(InternedWrapper(chalk_ir::TyData { kind, flags })) Interned::new(InternedWrapper(chalk_ir::TyData { kind, flags }))
} }
fn ty_data<'a>(self, ty: &'a Self::InternedType) -> &'a chalk_ir::TyData<Self> { fn ty_data(self, ty: &Self::InternedType) -> &chalk_ir::TyData<Self> {
&ty.0 &ty.0
} }
@ -236,10 +236,7 @@ impl chalk_ir::interner::Interner for Interner {
Interned::new(InternedWrapper(lifetime)) Interned::new(InternedWrapper(lifetime))
} }
fn lifetime_data<'a>( fn lifetime_data(self, lifetime: &Self::InternedLifetime) -> &chalk_ir::LifetimeData<Self> {
self,
lifetime: &'a Self::InternedLifetime,
) -> &'a chalk_ir::LifetimeData<Self> {
&lifetime.0 &lifetime.0
} }
@ -247,7 +244,7 @@ impl chalk_ir::interner::Interner for Interner {
Interned::new(InternedWrapper(constant)) Interned::new(InternedWrapper(constant))
} }
fn const_data<'a>(self, constant: &'a Self::InternedConst) -> &'a chalk_ir::ConstData<Self> { fn const_data(self, constant: &Self::InternedConst) -> &chalk_ir::ConstData<Self> {
&constant.0 &constant.0
} }
@ -267,10 +264,10 @@ impl chalk_ir::interner::Interner for Interner {
parameter parameter
} }
fn generic_arg_data<'a>( fn generic_arg_data(
self, self,
parameter: &'a Self::InternedGenericArg, parameter: &Self::InternedGenericArg,
) -> &'a chalk_ir::GenericArgData<Self> { ) -> &chalk_ir::GenericArgData<Self> {
parameter parameter
} }
@ -285,11 +282,11 @@ impl chalk_ir::interner::Interner for Interner {
data.into_iter().collect() data.into_iter().collect()
} }
fn goal_data<'a>(self, goal: &'a Self::InternedGoal) -> &'a GoalData<Self> { fn goal_data(self, goal: &Self::InternedGoal) -> &GoalData<Self> {
goal goal
} }
fn goals_data<'a>(self, goals: &'a Self::InternedGoals) -> &'a [Goal<Interner>] { fn goals_data(self, goals: &Self::InternedGoals) -> &[Goal<Interner>] {
goals goals
} }
@ -300,10 +297,7 @@ impl chalk_ir::interner::Interner for Interner {
Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?))) Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
} }
fn substitution_data<'a>( fn substitution_data(self, substitution: &Self::InternedSubstitution) -> &[GenericArg] {
self,
substitution: &'a Self::InternedSubstitution,
) -> &'a [GenericArg] {
&substitution.as_ref().0 &substitution.as_ref().0
} }
@ -314,10 +308,10 @@ impl chalk_ir::interner::Interner for Interner {
data data
} }
fn program_clause_data<'a>( fn program_clause_data(
self, self,
clause: &'a Self::InternedProgramClause, clause: &Self::InternedProgramClause,
) -> &'a chalk_ir::ProgramClauseData<Self> { ) -> &chalk_ir::ProgramClauseData<Self> {
clause clause
} }
@ -328,10 +322,10 @@ impl chalk_ir::interner::Interner for Interner {
Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?))) Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
} }
fn program_clauses_data<'a>( fn program_clauses_data(
self, self,
clauses: &'a Self::InternedProgramClauses, clauses: &Self::InternedProgramClauses,
) -> &'a [chalk_ir::ProgramClause<Self>] { ) -> &[chalk_ir::ProgramClause<Self>] {
clauses clauses
} }
@ -342,10 +336,10 @@ impl chalk_ir::interner::Interner for Interner {
Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?))) Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
} }
fn quantified_where_clauses_data<'a>( fn quantified_where_clauses_data(
self, self,
clauses: &'a Self::InternedQuantifiedWhereClauses, clauses: &Self::InternedQuantifiedWhereClauses,
) -> &'a [chalk_ir::QuantifiedWhereClause<Self>] { ) -> &[chalk_ir::QuantifiedWhereClause<Self>] {
clauses clauses
} }
@ -356,10 +350,10 @@ impl chalk_ir::interner::Interner for Interner {
Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?))) Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
} }
fn variable_kinds_data<'a>( fn variable_kinds_data(
self, self,
parameter_kinds: &'a Self::InternedVariableKinds, parameter_kinds: &Self::InternedVariableKinds,
) -> &'a [chalk_ir::VariableKind<Self>] { ) -> &[chalk_ir::VariableKind<Self>] {
&parameter_kinds.as_ref().0 &parameter_kinds.as_ref().0
} }
@ -370,10 +364,10 @@ impl chalk_ir::interner::Interner for Interner {
Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?))) Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
} }
fn canonical_var_kinds_data<'a>( fn canonical_var_kinds_data(
self, self,
canonical_var_kinds: &'a Self::InternedCanonicalVarKinds, canonical_var_kinds: &Self::InternedCanonicalVarKinds,
) -> &'a [chalk_ir::CanonicalVarKind<Self>] { ) -> &[chalk_ir::CanonicalVarKind<Self>] {
canonical_var_kinds canonical_var_kinds
} }
@ -384,10 +378,10 @@ impl chalk_ir::interner::Interner for Interner {
data.into_iter().collect() data.into_iter().collect()
} }
fn constraints_data<'a>( fn constraints_data(
self, self,
constraints: &'a Self::InternedConstraints, constraints: &Self::InternedConstraints,
) -> &'a [chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>] { ) -> &[chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>] {
constraints constraints
} }
fn debug_closure_id( fn debug_closure_id(
@ -410,10 +404,7 @@ impl chalk_ir::interner::Interner for Interner {
Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?))) Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
} }
fn variances_data<'a>( fn variances_data(self, variances: &Self::InternedVariances) -> &[chalk_ir::Variance] {
self,
variances: &'a Self::InternedVariances,
) -> &'a [chalk_ir::Variance] {
variances variances
} }
} }

View file

@ -241,7 +241,7 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
fn layout_of_unit(cx: &LayoutCx<'_>, dl: &TargetDataLayout) -> Result<Layout, LayoutError> { fn layout_of_unit(cx: &LayoutCx<'_>, dl: &TargetDataLayout) -> Result<Layout, LayoutError> {
cx.univariant::<RustcEnumVariantIdx, &&Layout>( cx.univariant::<RustcEnumVariantIdx, &&Layout>(
&dl, dl,
&[], &[],
&ReprOptions::default(), &ReprOptions::default(),
StructKind::AlwaysSized, StructKind::AlwaysSized,

View file

@ -12,8 +12,7 @@ use super::layout_of_ty;
fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> { fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
// using unstable cargo features failed, fall back to using plain rustc // using unstable cargo features failed, fall back to using plain rustc
let mut cmd = std::process::Command::new("rustc"); let mut cmd = std::process::Command::new("rustc");
cmd.args(&["-Z", "unstable-options", "--print", "target-spec-json"]) cmd.args(["-Z", "unstable-options", "--print", "target-spec-json"]).env("RUSTC_BOOTSTRAP", "1");
.env("RUSTC_BOOTSTRAP", "1");
let output = cmd.output().unwrap(); let output = cmd.output().unwrap();
assert!(output.status.success(), "{}", output.status); assert!(output.status.success(), "{}", output.status);
let stdout = String::from_utf8(output.stdout).unwrap(); let stdout = String::from_utf8(output.stdout).unwrap();
@ -30,7 +29,6 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
let scope = &def_map[module_id.local_id].scope; let scope = &def_map[module_id.local_id].scope;
let adt_id = scope let adt_id = scope
.declarations() .declarations()
.into_iter()
.find_map(|x| match x { .find_map(|x| match x {
hir_def::ModuleDefId::AdtId(x) => { hir_def::ModuleDefId::AdtId(x) => {
let name = match x { let name = match x {
@ -38,7 +36,7 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
hir_def::AdtId::UnionId(x) => db.union_data(x).name.to_smol_str(), hir_def::AdtId::UnionId(x) => db.union_data(x).name.to_smol_str(),
hir_def::AdtId::EnumId(x) => db.enum_data(x).name.to_smol_str(), hir_def::AdtId::EnumId(x) => db.enum_data(x).name.to_smol_str(),
}; };
(name == "Goal").then(|| x) (name == "Goal").then_some(x)
} }
_ => None, _ => None,
}) })

View file

@ -780,7 +780,7 @@ impl<'a> TyLoweringContext<'a> {
|_, c, ty| { |_, c, ty| {
const_or_path_to_chalk( const_or_path_to_chalk(
self.db, self.db,
&self.resolver, self.resolver,
ty, ty,
c, c,
self.type_param_mode, self.type_param_mode,
@ -1796,8 +1796,7 @@ pub(crate) fn impl_self_ty_query(db: &dyn HirDatabase, impl_id: ImplId) -> Binde
let impl_data = db.impl_data(impl_id); let impl_data = db.impl_data(impl_id);
let resolver = impl_id.resolver(db.upcast()); let resolver = impl_id.resolver(db.upcast());
let _cx = stdx::panic_context::enter(format!( let _cx = stdx::panic_context::enter(format!(
"impl_self_ty_query({:?} -> {:?} -> {:?})", "impl_self_ty_query({impl_id:?} -> {impl_loc:?} -> {impl_data:?})"
impl_id, impl_loc, impl_data
)); ));
let generics = generics(db.upcast(), impl_id.into()); let generics = generics(db.upcast(), impl_id.into());
let ctx = let ctx =
@ -1834,8 +1833,7 @@ pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option<
let impl_data = db.impl_data(impl_id); let impl_data = db.impl_data(impl_id);
let resolver = impl_id.resolver(db.upcast()); let resolver = impl_id.resolver(db.upcast());
let _cx = stdx::panic_context::enter(format!( let _cx = stdx::panic_context::enter(format!(
"impl_trait_query({:?} -> {:?} -> {:?})", "impl_trait_query({impl_id:?} -> {impl_loc:?} -> {impl_data:?})"
impl_id, impl_loc, impl_data
)); ));
let ctx = let ctx =
TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable); TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
@ -1854,7 +1852,7 @@ pub(crate) fn return_type_impl_traits(
let ctx_ret = TyLoweringContext::new(db, &resolver) let ctx_ret = TyLoweringContext::new(db, &resolver)
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque) .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
.with_type_param_mode(ParamLoweringMode::Variable); .with_type_param_mode(ParamLoweringMode::Variable);
let _ret = (&ctx_ret).lower_ty(&data.ret_type); let _ret = ctx_ret.lower_ty(&data.ret_type);
let generics = generics(db.upcast(), def.into()); let generics = generics(db.upcast(), def.into());
let return_type_impl_traits = let return_type_impl_traits =
ReturnTypeImplTraits { impl_traits: ctx_ret.opaque_type_data.into_inner() }; ReturnTypeImplTraits { impl_traits: ctx_ret.opaque_type_data.into_inner() };

View file

@ -714,7 +714,7 @@ fn lookup_impl_assoc_item_for_trait_ref(
let impl_data = find_matching_impl(impls, table, trait_ref)?; let impl_data = find_matching_impl(impls, table, trait_ref)?;
impl_data.items.iter().find_map(|it| match it { impl_data.items.iter().find_map(|it| match it {
AssocItemId::FunctionId(f) => { AssocItemId::FunctionId(f) => {
(db.function_data(*f).name == *name).then(|| AssocItemId::FunctionId(*f)) (db.function_data(*f).name == *name).then_some(AssocItemId::FunctionId(*f))
} }
AssocItemId::ConstId(c) => db AssocItemId::ConstId(c) => db
.const_data(*c) .const_data(*c)
@ -926,7 +926,7 @@ fn iterate_method_candidates_by_receiver(
while let Some((self_ty, _)) = autoderef.next() { while let Some((self_ty, _)) = autoderef.next() {
iterate_inherent_methods( iterate_inherent_methods(
&self_ty, &self_ty,
&mut autoderef.table, autoderef.table,
name, name,
Some(&receiver_ty), Some(&receiver_ty),
Some(receiver_adjustments.clone()), Some(receiver_adjustments.clone()),
@ -941,7 +941,7 @@ fn iterate_method_candidates_by_receiver(
while let Some((self_ty, _)) = autoderef.next() { while let Some((self_ty, _)) = autoderef.next() {
iterate_trait_method_candidates( iterate_trait_method_candidates(
&self_ty, &self_ty,
&mut autoderef.table, autoderef.table,
traits_in_scope, traits_in_scope,
name, name,
Some(&receiver_ty), Some(&receiver_ty),
@ -1246,7 +1246,7 @@ fn is_valid_candidate(
let expected_self_ty = TyBuilder::impl_self_ty(db, impl_id) let expected_self_ty = TyBuilder::impl_self_ty(db, impl_id)
.fill_with_inference_vars(table) .fill_with_inference_vars(table)
.build(); .build();
table.unify(&expected_self_ty, &self_ty) table.unify(&expected_self_ty, self_ty)
}); });
if !self_ty_matches { if !self_ty_matches {
cov_mark::hit!(const_candidate_self_type_mismatch); cov_mark::hit!(const_candidate_self_type_mismatch);

View file

@ -55,13 +55,10 @@ impl TraitEnvironment {
} }
} }
pub fn traits_in_scope_from_clauses<'a>( pub fn traits_in_scope_from_clauses(&self, ty: Ty) -> impl Iterator<Item = TraitId> + '_ {
&'a self,
ty: Ty,
) -> impl Iterator<Item = TraitId> + 'a {
self.traits_from_clauses self.traits_from_clauses
.iter() .iter()
.filter_map(move |(self_ty, trait_id)| (*self_ty == ty).then(|| *trait_id)) .filter_map(move |(self_ty, trait_id)| (*self_ty == ty).then_some(*trait_id))
} }
} }

View file

@ -184,9 +184,7 @@ pub(crate) struct Generics {
} }
impl Generics { impl Generics {
pub(crate) fn iter_id<'a>( pub(crate) fn iter_id(&self) -> impl Iterator<Item = Either<TypeParamId, ConstParamId>> + '_ {
&'a self,
) -> impl Iterator<Item = Either<TypeParamId, ConstParamId>> + 'a {
self.iter().map(|(id, data)| match data { self.iter().map(|(id, data)| match data {
TypeOrConstParamData::TypeParamData(_) => Either::Left(TypeParamId::from_unchecked(id)), TypeOrConstParamData::TypeParamData(_) => Either::Left(TypeParamId::from_unchecked(id)),
TypeOrConstParamData::ConstParamData(_) => { TypeOrConstParamData::ConstParamData(_) => {
@ -216,9 +214,9 @@ impl Generics {
} }
/// Iterator over types and const params of parent. /// Iterator over types and const params of parent.
pub(crate) fn iter_parent<'a>( pub(crate) fn iter_parent(
&'a self, &self,
) -> impl DoubleEndedIterator<Item = (TypeOrConstParamId, &'a TypeOrConstParamData)> + 'a { ) -> impl DoubleEndedIterator<Item = (TypeOrConstParamId, &TypeOrConstParamData)> {
self.parent_generics().into_iter().flat_map(|it| { self.parent_generics().into_iter().flat_map(|it| {
let to_toc_id = let to_toc_id =
move |(local_id, p)| (TypeOrConstParamId { parent: it.def, local_id }, p); move |(local_id, p)| (TypeOrConstParamId { parent: it.def, local_id }, p);

View file

@ -608,7 +608,7 @@ impl Module {
pub fn legacy_macros(self, db: &dyn HirDatabase) -> Vec<Macro> { pub fn legacy_macros(self, db: &dyn HirDatabase) -> Vec<Macro> {
let def_map = self.id.def_map(db.upcast()); let def_map = self.id.def_map(db.upcast());
let scope = &def_map[self.id.local_id].scope; let scope = &def_map[self.id.local_id].scope;
scope.legacy_macros().flat_map(|(_, it)| it).map(|&it| MacroId::from(it).into()).collect() scope.legacy_macros().flat_map(|(_, it)| it).map(|&it| it.into()).collect()
} }
pub fn impl_defs(self, db: &dyn HirDatabase) -> Vec<Impl> { pub fn impl_defs(self, db: &dyn HirDatabase) -> Vec<Impl> {
@ -1559,7 +1559,7 @@ impl Function {
} }
pub fn self_param(self, db: &dyn HirDatabase) -> Option<SelfParam> { pub fn self_param(self, db: &dyn HirDatabase) -> Option<SelfParam> {
self.has_self_param(db).then(|| SelfParam { func: self.id }) self.has_self_param(db).then_some(SelfParam { func: self.id })
} }
pub fn assoc_fn_params(self, db: &dyn HirDatabase) -> Vec<Param> { pub fn assoc_fn_params(self, db: &dyn HirDatabase) -> Vec<Param> {
@ -2411,7 +2411,7 @@ pub struct DeriveHelper {
impl DeriveHelper { impl DeriveHelper {
pub fn derive(&self) -> Macro { pub fn derive(&self) -> Macro {
Macro { id: self.derive.into() } Macro { id: self.derive }
} }
pub fn name(&self, db: &dyn HirDatabase) -> Name { pub fn name(&self, db: &dyn HirDatabase) -> Name {
@ -2781,7 +2781,7 @@ impl Impl {
pub fn all_for_trait(db: &dyn HirDatabase, trait_: Trait) -> Vec<Impl> { pub fn all_for_trait(db: &dyn HirDatabase, trait_: Trait) -> Vec<Impl> {
let krate = trait_.module(db).krate(); let krate = trait_.module(db).krate();
let mut all = Vec::new(); let mut all = Vec::new();
for Crate { id } in krate.transitive_reverse_dependencies(db).into_iter() { for Crate { id } in krate.transitive_reverse_dependencies(db) {
let impls = db.trait_impls_in_crate(id); let impls = db.trait_impls_in_crate(id);
all.extend(impls.for_trait(trait_.id).map(Self::from)) all.extend(impls.for_trait(trait_.id).map(Self::from))
} }

View file

@ -795,7 +795,7 @@ impl<'db> SemanticsImpl<'db> {
// requeue the tokens we got from mapping our current token down // requeue the tokens we got from mapping our current token down
stack.extend(mapped_tokens); stack.extend(mapped_tokens);
// if the length changed we have found a mapping for the token // if the length changed we have found a mapping for the token
(stack.len() != len).then(|| ()) (stack.len() != len).then_some(())
}; };
// Remap the next token in the queue into a macro call its in, if it is not being remapped // Remap the next token in the queue into a macro call its in, if it is not being remapped
@ -1221,7 +1221,7 @@ impl<'db> SemanticsImpl<'db> {
krate krate
.dependencies(self.db) .dependencies(self.db)
.into_iter() .into_iter()
.find_map(|dep| (dep.name == name).then(|| dep.krate)) .find_map(|dep| (dep.name == name).then_some(dep.krate))
} }
fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> { fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
@ -1252,7 +1252,7 @@ impl<'db> SemanticsImpl<'db> {
fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> { fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
let src = self.find_file(src.syntax()).with_value(src).cloned(); let src = self.find_file(src.syntax()).with_value(src).cloned();
T::to_def(&self, src) T::to_def(self, src)
} }
fn to_module_def(&self, file: FileId) -> impl Iterator<Item = Module> { fn to_module_def(&self, file: FileId) -> impl Iterator<Item = Module> {

View file

@ -228,7 +228,7 @@ impl SourceAnalyzer {
db: &dyn HirDatabase, db: &dyn HirDatabase,
pat: &ast::Pat, pat: &ast::Pat,
) -> Option<SmallVec<[Type; 1]>> { ) -> Option<SmallVec<[Type; 1]>> {
let pat_id = self.pat_id(&pat)?; let pat_id = self.pat_id(pat)?;
let infer = self.infer.as_ref()?; let infer = self.infer.as_ref()?;
Some( Some(
infer infer
@ -270,7 +270,7 @@ impl SourceAnalyzer {
db: &dyn HirDatabase, db: &dyn HirDatabase,
await_expr: &ast::AwaitExpr, await_expr: &ast::AwaitExpr,
) -> Option<FunctionId> { ) -> Option<FunctionId> {
let mut ty = self.ty_of_expr(db, &await_expr.expr()?.into())?.clone(); let mut ty = self.ty_of_expr(db, &await_expr.expr()?)?.clone();
let into_future_trait = self let into_future_trait = self
.resolver .resolver
@ -316,7 +316,7 @@ impl SourceAnalyzer {
ast::UnaryOp::Not => name![not], ast::UnaryOp::Not => name![not],
ast::UnaryOp::Neg => name![neg], ast::UnaryOp::Neg => name![neg],
}; };
let ty = self.ty_of_expr(db, &prefix_expr.expr()?.into())?; let ty = self.ty_of_expr(db, &prefix_expr.expr()?)?;
let (op_trait, op_fn) = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?; let (op_trait, op_fn) = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?;
// HACK: subst for all methods coincides with that for their trait because the methods // HACK: subst for all methods coincides with that for their trait because the methods
@ -331,8 +331,8 @@ impl SourceAnalyzer {
db: &dyn HirDatabase, db: &dyn HirDatabase,
index_expr: &ast::IndexExpr, index_expr: &ast::IndexExpr,
) -> Option<FunctionId> { ) -> Option<FunctionId> {
let base_ty = self.ty_of_expr(db, &index_expr.base()?.into())?; let base_ty = self.ty_of_expr(db, &index_expr.base()?)?;
let index_ty = self.ty_of_expr(db, &index_expr.index()?.into())?; let index_ty = self.ty_of_expr(db, &index_expr.index()?)?;
let lang_item_name = name![index]; let lang_item_name = name![index];
@ -352,8 +352,8 @@ impl SourceAnalyzer {
binop_expr: &ast::BinExpr, binop_expr: &ast::BinExpr,
) -> Option<FunctionId> { ) -> Option<FunctionId> {
let op = binop_expr.op_kind()?; let op = binop_expr.op_kind()?;
let lhs = self.ty_of_expr(db, &binop_expr.lhs()?.into())?; let lhs = self.ty_of_expr(db, &binop_expr.lhs()?)?;
let rhs = self.ty_of_expr(db, &binop_expr.rhs()?.into())?; let rhs = self.ty_of_expr(db, &binop_expr.rhs()?)?;
let (op_trait, op_fn) = lang_names_for_bin_op(op) let (op_trait, op_fn) = lang_names_for_bin_op(op)
.and_then(|(name, lang_item)| self.lang_trait_fn(db, &lang_item, &name))?; .and_then(|(name, lang_item)| self.lang_trait_fn(db, &lang_item, &name))?;
@ -372,7 +372,7 @@ impl SourceAnalyzer {
db: &dyn HirDatabase, db: &dyn HirDatabase,
try_expr: &ast::TryExpr, try_expr: &ast::TryExpr,
) -> Option<FunctionId> { ) -> Option<FunctionId> {
let ty = self.ty_of_expr(db, &try_expr.expr()?.into())?; let ty = self.ty_of_expr(db, &try_expr.expr()?)?;
let op_fn = let op_fn =
db.lang_item(self.resolver.krate(), name![branch].to_smol_str())?.as_function()?; db.lang_item(self.resolver.krate(), name![branch].to_smol_str())?.as_function()?;
@ -824,7 +824,7 @@ impl SourceAnalyzer {
} }
fn ty_of_expr(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<&Ty> { fn ty_of_expr(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<&Ty> {
self.infer.as_ref()?.type_of_expr.get(self.expr_id(db, &expr)?) self.infer.as_ref()?.type_of_expr.get(self.expr_id(db, expr)?)
} }
} }
@ -987,7 +987,7 @@ fn resolve_hir_path_(
db, db,
def, def,
res.in_type_ns()?, res.in_type_ns()?,
|name, id| (name == unresolved.name).then(|| id), |name, id| (name == unresolved.name).then_some(id),
) )
}) })
.map(TypeAlias::from) .map(TypeAlias::from)

View file

@ -326,7 +326,7 @@ impl ExtendedEnum {
fn resolve_enum_def(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> Option<ExtendedEnum> { fn resolve_enum_def(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> Option<ExtendedEnum> {
sema.type_of_expr(expr)?.adjusted().autoderef(sema.db).find_map(|ty| match ty.as_adt() { sema.type_of_expr(expr)?.adjusted().autoderef(sema.db).find_map(|ty| match ty.as_adt() {
Some(Adt::Enum(e)) => Some(ExtendedEnum::Enum(e)), Some(Adt::Enum(e)) => Some(ExtendedEnum::Enum(e)),
_ => ty.is_bool().then(|| ExtendedEnum::Bool), _ => ty.is_bool().then_some(ExtendedEnum::Bool),
}) })
} }
@ -344,7 +344,7 @@ fn resolve_tuple_of_enum_def(
// For now we only handle expansion for a tuple of enums. Here // For now we only handle expansion for a tuple of enums. Here
// we map non-enum items to None and rely on `collect` to // we map non-enum items to None and rely on `collect` to
// convert Vec<Option<hir::Enum>> into Option<Vec<hir::Enum>>. // convert Vec<Option<hir::Enum>> into Option<Vec<hir::Enum>>.
_ => ty.is_bool().then(|| ExtendedEnum::Bool), _ => ty.is_bool().then_some(ExtendedEnum::Bool),
}) })
}) })
.collect() .collect()

View file

@ -35,16 +35,16 @@ pub(crate) fn add_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
match builder_edit_pos { match builder_edit_pos {
InsertOrReplace::Insert(insert_pos, needs_whitespace) => { InsertOrReplace::Insert(insert_pos, needs_whitespace) => {
let preceeding_whitespace = if needs_whitespace { " " } else { "" }; let preceeding_whitespace = if needs_whitespace { " " } else { "" };
builder.insert(insert_pos, &format!("{preceeding_whitespace}-> {ty} ")) builder.insert(insert_pos, format!("{preceeding_whitespace}-> {ty} "))
} }
InsertOrReplace::Replace(text_range) => { InsertOrReplace::Replace(text_range) => {
builder.replace(text_range, &format!("-> {ty}")) builder.replace(text_range, format!("-> {ty}"))
} }
} }
if let FnType::Closure { wrap_expr: true } = fn_type { if let FnType::Closure { wrap_expr: true } = fn_type {
cov_mark::hit!(wrap_closure_non_block_expr); cov_mark::hit!(wrap_closure_non_block_expr);
// `|x| x` becomes `|x| -> T x` which is invalid, so wrap it in a block // `|x| x` becomes `|x| -> T x` which is invalid, so wrap it in a block
builder.replace(tail_expr.syntax().text_range(), &format!("{{{tail_expr}}}")); builder.replace(tail_expr.syntax().text_range(), format!("{{{tail_expr}}}"));
} }
}, },
) )

View file

@ -203,7 +203,7 @@ fn relevance_score(
// get the distance between the imported path and the current module // get the distance between the imported path and the current module
// (prefer items that are more local) // (prefer items that are more local)
Some((item_module, current_module)) => { Some((item_module, current_module)) => {
score -= module_distance_hueristic(db, &current_module, &item_module) as i32; score -= module_distance_hueristic(db, current_module, &item_module) as i32;
} }
// could not find relevant modules, so just use the length of the path as an estimate // could not find relevant modules, so just use the length of the path as an estimate

View file

@ -216,7 +216,7 @@ fn validate_method_call_expr(
let krate = module.krate(); let krate = module.krate();
let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?; let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
it_type.impls_trait(sema.db, iter_trait, &[]).then(|| (expr, receiver)) it_type.impls_trait(sema.db, iter_trait, &[]).then_some((expr, receiver))
} }
#[cfg(test)] #[cfg(test)]

View file

@ -588,7 +588,7 @@ impl FunctionBody {
FunctionBody::Expr(expr) => Some(expr.clone()), FunctionBody::Expr(expr) => Some(expr.clone()),
FunctionBody::Span { parent, text_range } => { FunctionBody::Span { parent, text_range } => {
let tail_expr = parent.tail_expr()?; let tail_expr = parent.tail_expr()?;
text_range.contains_range(tail_expr.syntax().text_range()).then(|| tail_expr) text_range.contains_range(tail_expr.syntax().text_range()).then_some(tail_expr)
} }
} }
} }

View file

@ -118,13 +118,13 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let mut body_items: Vec<String> = Vec::new(); let mut body_items: Vec<String> = Vec::new();
let mut items_to_be_processed: Vec<ast::Item> = module.body_items.clone(); let mut items_to_be_processed: Vec<ast::Item> = module.body_items.clone();
let mut new_item_indent = old_item_indent + 1;
if impl_parent.is_some() { let new_item_indent = if impl_parent.is_some() {
new_item_indent = old_item_indent + 2; old_item_indent + 2
} else { } else {
items_to_be_processed = [module.use_items.clone(), items_to_be_processed].concat(); items_to_be_processed = [module.use_items.clone(), items_to_be_processed].concat();
} old_item_indent + 1
};
for item in items_to_be_processed { for item in items_to_be_processed {
let item = item.indent(IndentLevel(1)); let item = item.indent(IndentLevel(1));

View file

@ -178,7 +178,7 @@ fn extract_generic_params(
.fold(false, |tagged, ty| tag_generics_in_variant(&ty, &mut generics) || tagged), .fold(false, |tagged, ty| tag_generics_in_variant(&ty, &mut generics) || tagged),
}; };
let generics = generics.into_iter().filter_map(|(param, tag)| tag.then(|| param)); let generics = generics.into_iter().filter_map(|(param, tag)| tag.then_some(param));
tagged_one.then(|| make::generic_param_list(generics)) tagged_one.then(|| make::generic_param_list(generics))
} }

View file

@ -53,7 +53,7 @@ pub(crate) fn generate_default_from_new(acc: &mut Assists, ctx: &AssistContext<'
return None; return None;
} }
let impl_ = fn_node.syntax().ancestors().into_iter().find_map(ast::Impl::cast)?; let impl_ = fn_node.syntax().ancestors().find_map(ast::Impl::cast)?;
if is_default_implemented(ctx, &impl_) { if is_default_implemented(ctx, &impl_) {
cov_mark::hit!(default_block_is_already_present); cov_mark::hit!(default_block_is_already_present);
cov_mark::hit!(struct_in_module_with_default); cov_mark::hit!(struct_in_module_with_default);

View file

@ -85,8 +85,7 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()
let strukt = ctx.find_node_at_offset::<ast::Struct>()?; let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
let field = ctx.find_node_at_offset::<ast::TupleField>()?; let field = ctx.find_node_at_offset::<ast::TupleField>()?;
let field_list = ctx.find_node_at_offset::<ast::TupleFieldList>()?; let field_list = ctx.find_node_at_offset::<ast::TupleFieldList>()?;
let field_list_index = let field_list_index = field_list.syntax().children().position(|s| &s == field.syntax())?;
field_list.syntax().children().into_iter().position(|s| &s == field.syntax())?;
let deref_type_to_generate = match existing_deref_impl(&ctx.sema, &strukt) { let deref_type_to_generate = match existing_deref_impl(&ctx.sema, &strukt) {
None => DerefType::Deref, None => DerefType::Deref,

View file

@ -180,7 +180,7 @@ fn make_tuple_field_list(
) -> Option<ast::FieldList> { ) -> Option<ast::FieldList> {
let args = call_expr.arg_list()?.args(); let args = call_expr.arg_list()?.args();
let tuple_fields = args.map(|arg| { let tuple_fields = args.map(|arg| {
let ty = expr_ty(ctx, arg, &scope).unwrap_or_else(make::ty_placeholder); let ty = expr_ty(ctx, arg, scope).unwrap_or_else(make::ty_placeholder);
make::tuple_field(None, ty) make::tuple_field(None, ty)
}); });
Some(make::tuple_field_list(tuple_fields).into()) Some(make::tuple_field_list(tuple_fields).into())

View file

@ -514,7 +514,7 @@ fn fn_args(
/// vec!["foo_1".into(), "foo_2".into(), "bar_1".into(), "baz".into(), "bar_2".into()]; /// vec!["foo_1".into(), "foo_2".into(), "bar_1".into(), "baz".into(), "bar_2".into()];
/// assert_eq!(names, expected); /// assert_eq!(names, expected);
/// ``` /// ```
fn deduplicate_arg_names(arg_names: &mut Vec<String>) { fn deduplicate_arg_names(arg_names: &mut [String]) {
let mut arg_name_counts = FxHashMap::default(); let mut arg_name_counts = FxHashMap::default();
for name in arg_names.iter() { for name in arg_names.iter() {
*arg_name_counts.entry(name).or_insert(0) += 1; *arg_name_counts.entry(name).or_insert(0) += 1;

View file

@ -176,7 +176,7 @@ pub(crate) fn generate_getter_impl(
// for separating it from other assoc items, that needs // for separating it from other assoc items, that needs
// to be handled spearately // to be handled spearately
let mut getter_buf = let mut getter_buf =
generate_getter_from_info(ctx, &getter_info, &record_field_info); generate_getter_from_info(ctx, &getter_info, record_field_info);
// Insert `$0` only for last getter we generate // Insert `$0` only for last getter we generate
if i == record_fields_count - 1 { if i == record_fields_count - 1 {
@ -271,7 +271,7 @@ fn generate_getter_from_info(
}}", }}",
vis, vis,
record_field_info.fn_name, record_field_info.fn_name,
info.mutable.then(|| "mut ").unwrap_or_default(), info.mutable.then_some("mut ").unwrap_or_default(),
ty, ty,
body, body,
); );

View file

@ -70,7 +70,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
)?; )?;
let expr = use_trivial_constructor( let expr = use_trivial_constructor(
&ctx.sema.db, ctx.sema.db,
ide_db::helpers::mod_path_to_ast(&type_path), ide_db::helpers::mod_path_to_ast(&type_path),
&ty, &ty,
)?; )?;

View file

@ -138,7 +138,7 @@ pub(crate) fn inline_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
replacement = Replacement::Plain; replacement = Replacement::Plain;
} }
_ => { _ => {
let alias = get_type_alias(&ctx, &alias_instance)?; let alias = get_type_alias(ctx, &alias_instance)?;
concrete_type = alias.ty()?; concrete_type = alias.ty()?;
replacement = inline(&alias, &alias_instance)?; replacement = inline(&alias, &alias_instance)?;
} }
@ -158,7 +158,7 @@ impl Replacement {
fn to_text(&self, concrete_type: &ast::Type) -> String { fn to_text(&self, concrete_type: &ast::Type) -> String {
match self { match self {
Replacement::Generic { lifetime_map, const_and_type_map } => { Replacement::Generic { lifetime_map, const_and_type_map } => {
create_replacement(&lifetime_map, &const_and_type_map, &concrete_type) create_replacement(lifetime_map, const_and_type_map, concrete_type)
} }
Replacement::Plain => concrete_type.to_string(), Replacement::Plain => concrete_type.to_string(),
} }
@ -240,7 +240,7 @@ impl ConstAndTypeMap {
) -> Option<Self> { ) -> Option<Self> {
let mut inner = HashMap::new(); let mut inner = HashMap::new();
let instance_generics = generic_args_to_const_and_type_generics(instance_args); let instance_generics = generic_args_to_const_and_type_generics(instance_args);
let alias_generics = generic_param_list_to_const_and_type_generics(&alias_generics); let alias_generics = generic_param_list_to_const_and_type_generics(alias_generics);
if instance_generics.len() > alias_generics.len() { if instance_generics.len() > alias_generics.len() {
cov_mark::hit!(too_many_generic_args); cov_mark::hit!(too_many_generic_args);

View file

@ -64,7 +64,7 @@ fn compute_dbg_replacement(macro_call: ast::MacroCall) -> Option<(TextRange, Str
let input_expressions = mac_input.group_by(|tok| tok.kind() == T![,]); let input_expressions = mac_input.group_by(|tok| tok.kind() == T![,]);
let input_expressions = input_expressions let input_expressions = input_expressions
.into_iter() .into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then(|| group)) .filter_map(|(is_sep, group)| (!is_sep).then_some(group))
.map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join(""))) .map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
.collect::<Option<Vec<ast::Expr>>>()?; .collect::<Option<Vec<ast::Expr>>>()?;

View file

@ -42,7 +42,7 @@ pub(crate) fn replace_turbofish_with_explicit_type(
let r_angle = generic_args.r_angle_token()?; let r_angle = generic_args.r_angle_token()?;
let turbofish_range = TextRange::new(colon2.text_range().start(), r_angle.text_range().end()); let turbofish_range = TextRange::new(colon2.text_range().start(), r_angle.text_range().end());
let turbofish_args: Vec<GenericArg> = generic_args.generic_args().into_iter().collect(); let turbofish_args: Vec<GenericArg> = generic_args.generic_args().collect();
// Find type of ::<_> // Find type of ::<_>
if turbofish_args.len() != 1 { if turbofish_args.len() != 1 {

View file

@ -107,7 +107,7 @@ fn find_all_references(
/// If no await expression is found, returns None. /// If no await expression is found, returns None.
fn find_await_expression(ctx: &AssistContext<'_>, nameref: &NameRef) -> Option<ast::AwaitExpr> { fn find_await_expression(ctx: &AssistContext<'_>, nameref: &NameRef) -> Option<ast::AwaitExpr> {
// From the nameref, walk up the tree to the await expression. // From the nameref, walk up the tree to the await expression.
let await_expr = if let Some(path) = full_path_of_name_ref(&nameref) { let await_expr = if let Some(path) = full_path_of_name_ref(nameref) {
// Function calls. // Function calls.
path.syntax() path.syntax()
.parent() .parent()

View file

@ -95,8 +95,7 @@ impl Assist {
let id = block.id; let id = block.id;
assert!( assert!(
id.chars().all(|it| it.is_ascii_lowercase() || it == '_'), id.chars().all(|it| it.is_ascii_lowercase() || it == '_'),
"invalid assist id: {:?}", "invalid assist id: {id:?}"
id
); );
let mut lines = block.contents.iter().peekable(); let mut lines = block.contents.iter().peekable();
let location = sourcegen::Location { file: path.to_path_buf(), line: block.line }; let location = sourcegen::Location { file: path.to_path_buf(), line: block.line };

View file

@ -613,7 +613,7 @@ pub(crate) fn convert_reference_type(
} }
fn handle_copy(ty: &hir::Type, db: &dyn HirDatabase) -> Option<ReferenceConversionType> { fn handle_copy(ty: &hir::Type, db: &dyn HirDatabase) -> Option<ReferenceConversionType> {
ty.is_copy(db).then(|| ReferenceConversionType::Copy) ty.is_copy(db).then_some(ReferenceConversionType::Copy)
} }
fn handle_as_ref_str( fn handle_as_ref_str(
@ -624,7 +624,7 @@ fn handle_as_ref_str(
let str_type = hir::BuiltinType::str().ty(db); let str_type = hir::BuiltinType::str().ty(db);
ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[str_type]) ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[str_type])
.then(|| ReferenceConversionType::AsRefStr) .then_some(ReferenceConversionType::AsRefStr)
} }
fn handle_as_ref_slice( fn handle_as_ref_slice(
@ -636,7 +636,7 @@ fn handle_as_ref_slice(
let slice_type = hir::Type::new_slice(type_argument); let slice_type = hir::Type::new_slice(type_argument);
ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[slice_type]) ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[slice_type])
.then(|| ReferenceConversionType::AsRefSlice) .then_some(ReferenceConversionType::AsRefSlice)
} }
fn handle_dereferenced( fn handle_dereferenced(
@ -647,7 +647,7 @@ fn handle_dereferenced(
let type_argument = ty.type_arguments().next()?; let type_argument = ty.type_arguments().next()?;
ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[type_argument]) ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[type_argument])
.then(|| ReferenceConversionType::Dereferenced) .then_some(ReferenceConversionType::Dereferenced)
} }
fn handle_option_as_ref( fn handle_option_as_ref(

View file

@ -419,7 +419,7 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
make::ext::path_from_idents(["Self", &variant.name()?.to_string()]) make::ext::path_from_idents(["Self", &variant.name()?.to_string()])
} }
fn gen_tuple_field(field_name: &String) -> ast::Pat { fn gen_tuple_field(field_name: &str) -> ast::Pat {
ast::Pat::IdentPat(make::ident_pat(false, false, make::name(field_name))) ast::Pat::IdentPat(make::ident_pat(false, false, make::name(field_name)))
} }

View file

@ -357,7 +357,7 @@ fn parse_comma_sep_expr(input: ast::TokenTree) -> Option<Vec<ast::Expr>> {
Some( Some(
input_expressions input_expressions
.into_iter() .into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then(|| group)) .filter_map(|(is_sep, group)| (!is_sep).then_some(group))
.filter_map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join(""))) .filter_map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
.collect::<Vec<ast::Expr>>(), .collect::<Vec<ast::Expr>>(),
) )
@ -371,9 +371,7 @@ fn attributes_are_sorted() {
attrs.for_each(|next| { attrs.for_each(|next| {
assert!( assert!(
prev < next, prev < next,
r#"ATTRIBUTES array is not sorted, "{}" should come after "{}""#, r#"ATTRIBUTES array is not sorted, "{prev}" should come after "{next}""#
prev,
next
); );
prev = next; prev = next;
}); });

View file

@ -32,12 +32,12 @@ pub(crate) fn complete_dot(
complete_fields( complete_fields(
acc, acc,
ctx, ctx,
&receiver_ty, receiver_ty,
|acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty), |acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty),
|acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty), |acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty),
); );
} }
complete_methods(ctx, &receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None)); complete_methods(ctx, receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None));
} }
pub(crate) fn complete_undotted_self( pub(crate) fn complete_undotted_self(

View file

@ -68,28 +68,26 @@ mod tests {
&format!( &format!(
r#" r#"
#[rustc_builtin_macro] #[rustc_builtin_macro]
macro_rules! {} {{ macro_rules! {macro_name} {{
($var:literal) => {{ 0 }} ($var:literal) => {{ 0 }}
}} }}
fn main() {{ fn main() {{
let foo = {}!("CAR$0"); let foo = {macro_name}!("CAR$0");
}} }}
"#, "#
macro_name, macro_name
), ),
&format!( &format!(
r#" r#"
#[rustc_builtin_macro] #[rustc_builtin_macro]
macro_rules! {} {{ macro_rules! {macro_name} {{
($var:literal) => {{ 0 }} ($var:literal) => {{ 0 }}
}} }}
fn main() {{ fn main() {{
let foo = {}!("CARGO_BIN_NAME"); let foo = {macro_name}!("CARGO_BIN_NAME");
}} }}
"#, "#
macro_name, macro_name
), ),
); );
} }

View file

@ -64,7 +64,7 @@ pub(crate) fn complete_expr_path(
acc.add_enum_variants(ctx, path_ctx, e); acc.add_enum_variants(ctx, path_ctx, e);
} }
ctx.iterate_path_candidates(&ty, |item| { ctx.iterate_path_candidates(ty, |item| {
add_assoc_item(acc, item); add_assoc_item(acc, item);
}); });

View file

@ -13,7 +13,7 @@ pub(crate) fn format_string(
original: &ast::String, original: &ast::String,
expanded: &ast::String, expanded: &ast::String,
) { ) {
if !is_format_string(&expanded) { if !is_format_string(expanded) {
return; return;
} }
let cursor = ctx.position.offset; let cursor = ctx.position.offset;

View file

@ -845,11 +845,10 @@ trait Test {{
struct T; struct T;
impl Test for T {{ impl Test for T {{
{} {hint}
{} {next_sibling}
}} }}
"#, "#
hint, next_sibling
), ),
&format!( &format!(
r#" r#"
@ -861,11 +860,10 @@ trait Test {{
struct T; struct T;
impl Test for T {{ impl Test for T {{
{} {completed}
{} {next_sibling}
}} }}
"#, "#
completed, next_sibling
), ),
) )
}; };
@ -905,10 +903,9 @@ struct T;
impl Foo for T {{ impl Foo for T {{
// Comment // Comment
#[bar] #[bar]
{} {hint}
}} }}
"#, "#
hint
), ),
&format!( &format!(
r#" r#"
@ -922,10 +919,9 @@ struct T;
impl Foo for T {{ impl Foo for T {{
// Comment // Comment
#[bar] #[bar]
{} {completed}
}} }}
"#, "#
completed
), ),
) )
}; };

View file

@ -153,8 +153,7 @@ pub(crate) fn complete_postfix(
"match", "match",
"match expr {}", "match expr {}",
&format!( &format!(
"match {} {{\n Some(${{1:_}}) => {{$2}},\n None => {{$0}},\n}}", "match {receiver_text} {{\n Some(${{1:_}}) => {{$2}},\n None => {{$0}},\n}}"
receiver_text
), ),
) )
.add_to(acc); .add_to(acc);

View file

@ -58,7 +58,7 @@ pub(crate) fn complete_type_path(
trait_.items(ctx.sema.db).into_iter().for_each(|item| add_assoc_item(acc, item)) trait_.items(ctx.sema.db).into_iter().for_each(|item| add_assoc_item(acc, item))
} }
Qualified::TypeAnchor { ty: Some(ty), trait_: None } => { Qualified::TypeAnchor { ty: Some(ty), trait_: None } => {
ctx.iterate_path_candidates(&ty, |item| { ctx.iterate_path_candidates(ty, |item| {
add_assoc_item(acc, item); add_assoc_item(acc, item);
}); });

View file

@ -226,7 +226,7 @@ fn analyze(
find_node_at_offset(&file_with_fake_ident, offset) find_node_at_offset(&file_with_fake_ident, offset)
{ {
let parent = name_ref.syntax().parent()?; let parent = name_ref.syntax().parent()?;
let (mut nameref_ctx, _) = classify_name_ref(&sema, &original_file, name_ref, parent)?; let (mut nameref_ctx, _) = classify_name_ref(sema, &original_file, name_ref, parent)?;
if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind { if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind {
path_ctx.kind = PathKind::Derive { path_ctx.kind = PathKind::Derive {
existing_derives: sema existing_derives: sema
@ -277,7 +277,7 @@ fn analyze(
return Some((analysis, (None, None), QualifierCtx::default())); return Some((analysis, (None, None), QualifierCtx::default()));
} }
}; };
let expected = expected_type_and_name(sema, &self_token, &name_like); let expected = expected_type_and_name(sema, self_token, &name_like);
let mut qual_ctx = QualifierCtx::default(); let mut qual_ctx = QualifierCtx::default();
let analysis = match name_like { let analysis = match name_like {
ast::NameLike::Lifetime(lifetime) => { ast::NameLike::Lifetime(lifetime) => {
@ -374,7 +374,7 @@ fn expected_type_and_name(
ast::ArgList(_) => { ast::ArgList(_) => {
cov_mark::hit!(expected_type_fn_param); cov_mark::hit!(expected_type_fn_param);
ActiveParameter::at_token( ActiveParameter::at_token(
&sema, sema,
token.clone(), token.clone(),
).map(|ap| { ).map(|ap| {
let name = ap.ident().map(NameOrNameRef::Name); let name = ap.ident().map(NameOrNameRef::Name);
@ -507,7 +507,7 @@ fn classify_lifetime(
_ => LifetimeKind::Lifetime, _ => LifetimeKind::Lifetime,
} }
}; };
let lifetime = find_node_at_offset(&original_file, lifetime.syntax().text_range().start()); let lifetime = find_node_at_offset(original_file, lifetime.syntax().text_range().start());
Some(LifetimeContext { lifetime, kind }) Some(LifetimeContext { lifetime, kind })
} }
@ -548,7 +548,7 @@ fn classify_name(
_ => return None, _ => return None,
} }
}; };
let name = find_node_at_offset(&original_file, name.syntax().text_range().start()); let name = find_node_at_offset(original_file, name.syntax().text_range().start());
Some(NameContext { name, kind }) Some(NameContext { name, kind })
} }
@ -558,7 +558,7 @@ fn classify_name_ref(
name_ref: ast::NameRef, name_ref: ast::NameRef,
parent: SyntaxNode, parent: SyntaxNode,
) -> Option<(NameRefContext, QualifierCtx)> { ) -> Option<(NameRefContext, QualifierCtx)> {
let nameref = find_node_at_offset(&original_file, name_ref.syntax().text_range().start()); let nameref = find_node_at_offset(original_file, name_ref.syntax().text_range().start());
let make_res = |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default()); let make_res = |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());

View file

@ -68,7 +68,7 @@ pub(crate) fn render_union_literal(
item.set_documentation(ctx.docs(un)) item.set_documentation(ctx.docs(un))
.set_deprecated(ctx.is_deprecated(un)) .set_deprecated(ctx.is_deprecated(un))
.detail(&detail) .detail(detail)
.set_relevance(ctx.completion_relevance()); .set_relevance(ctx.completion_relevance());
match ctx.snippet_cap() { match ctx.snippet_cap() {

View file

@ -183,7 +183,7 @@ pub(crate) fn check_edit_with_config(
let ra_fixture_after = trim_indent(ra_fixture_after); let ra_fixture_after = trim_indent(ra_fixture_after);
let (db, position) = position(ra_fixture_before); let (db, position) = position(ra_fixture_before);
let completions: Vec<CompletionItem> = let completions: Vec<CompletionItem> =
crate::completions(&db, &config, position, None).unwrap().into(); crate::completions(&db, &config, position, None).unwrap();
let (completion,) = completions let (completion,) = completions
.iter() .iter()
.filter(|it| it.lookup() == what) .filter(|it| it.lookup() == what)

View file

@ -91,7 +91,7 @@ fn recursive_merge(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehavior)
.flat_map(|list| list.use_trees()) .flat_map(|list| list.use_trees())
// We use Option here to early return from this function(this is not the // We use Option here to early return from this function(this is not the
// same as a `filter` op). // same as a `filter` op).
.map(|tree| merge.is_tree_allowed(&tree).then(|| tree)) .map(|tree| merge.is_tree_allowed(&tree).then_some(tree))
.collect::<Option<_>>()?; .collect::<Option<_>>()?;
use_trees.sort_unstable_by(|a, b| path_cmp_for_sort(a.path(), b.path())); use_trees.sort_unstable_by(|a, b| path_cmp_for_sort(a.path(), b.path()));
for rhs_t in rhs.use_tree_list().into_iter().flat_map(|list| list.use_trees()) { for rhs_t in rhs.use_tree_list().into_iter().flat_map(|list| list.use_trees()) {

View file

@ -608,7 +608,7 @@ impl<'a> FindUsages<'a> {
let reference = FileReference { let reference = FileReference {
range, range,
name: ast::NameLike::NameRef(name_ref.clone()), name: ast::NameLike::NameRef(name_ref.clone()),
category: is_name_ref_in_import(name_ref).then(|| ReferenceCategory::Import), category: is_name_ref_in_import(name_ref).then_some(ReferenceCategory::Import),
}; };
sink(file_id, reference) sink(file_id, reference)
} }
@ -787,7 +787,7 @@ impl ReferenceCategory {
fn new(def: &Definition, r: &ast::NameRef) -> Option<ReferenceCategory> { fn new(def: &Definition, r: &ast::NameRef) -> Option<ReferenceCategory> {
// Only Locals and Fields have accesses for now. // Only Locals and Fields have accesses for now.
if !matches!(def, Definition::Local(_) | Definition::Field(_)) { if !matches!(def, Definition::Local(_) | Definition::Field(_)) {
return is_name_ref_in_import(r).then(|| ReferenceCategory::Import); return is_name_ref_in_import(r).then_some(ReferenceCategory::Import);
} }
let mode = r.syntax().ancestors().find_map(|node| { let mode = r.syntax().ancestors().find_map(|node| {

View file

@ -452,7 +452,7 @@ pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Pat
let input_expressions = tokens.group_by(|tok| tok.kind() == T![,]); let input_expressions = tokens.group_by(|tok| tok.kind() == T![,]);
let paths = input_expressions let paths = input_expressions
.into_iter() .into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then(|| group)) .filter_map(|(is_sep, group)| (!is_sep).then_some(group))
.filter_map(|mut tokens| { .filter_map(|mut tokens| {
syntax::hacks::parse_expr_from_str(&tokens.join("")).and_then(|expr| match expr { syntax::hacks::parse_expr_from_str(&tokens.join("")).and_then(|expr| match expr {
ast::Expr::PathExpr(it) => it.path(), ast::Expr::PathExpr(it) => it.path(),

View file

@ -128,9 +128,9 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
)?; )?;
use_trivial_constructor( use_trivial_constructor(
&ctx.sema.db, ctx.sema.db,
ide_db::helpers::mod_path_to_ast(&type_path), ide_db::helpers::mod_path_to_ast(&type_path),
&ty, ty,
) )
})(); })();

View file

@ -68,7 +68,7 @@ fn missing_record_expr_field_fixes(
} }
let new_field = make::record_field( let new_field = make::record_field(
None, None,
make::name(&record_expr_field.field_name()?.ident_token()?.text()), make::name(record_expr_field.field_name()?.ident_token()?.text()),
make::ty(&new_field_type.display_source_code(sema.db, module.into()).ok()?), make::ty(&new_field_type.display_source_code(sema.db, module.into()).ok()?),
); );

View file

@ -229,7 +229,7 @@ pub fn diagnostics(
for node in parse.syntax().descendants() { for node in parse.syntax().descendants() {
handlers::useless_braces::useless_braces(&mut res, file_id, &node); handlers::useless_braces::useless_braces(&mut res, file_id, &node);
handlers::field_shorthand::field_shorthand(&mut res, file_id, &node); handlers::field_shorthand::field_shorthand(&mut res, file_id, &node);
handlers::json_is_not_rust::json_in_items(&sema, &mut res, file_id, &node, &config); handlers::json_is_not_rust::json_in_items(&sema, &mut res, file_id, &node, config);
} }
let module = sema.to_module_def(file_id); let module = sema.to_module_def(file_id);

View file

@ -11,7 +11,7 @@ fn sourcegen_diagnostic_docs() {
diagnostics.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); diagnostics.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
let contents = sourcegen::add_preamble("sourcegen_diagnostic_docs", contents); let contents = sourcegen::add_preamble("sourcegen_diagnostic_docs", contents);
let dst = project_root().join("docs/user/generated_diagnostic.adoc"); let dst = project_root().join("docs/user/generated_diagnostic.adoc");
fs::write(&dst, &contents).unwrap(); fs::write(dst, contents).unwrap();
} }
#[derive(Debug)] #[derive(Debug)]

View file

@ -273,7 +273,7 @@ impl DocCommentToken {
let (in_expansion_range, link, ns) = let (in_expansion_range, link, ns) =
extract_definitions_from_docs(&docs).into_iter().find_map(|(range, link, ns)| { extract_definitions_from_docs(&docs).into_iter().find_map(|(range, link, ns)| {
let mapped = doc_mapping.map(range)?; let mapped = doc_mapping.map(range)?;
(mapped.value.contains(abs_in_expansion_offset)).then(|| (mapped.value, link, ns)) (mapped.value.contains(abs_in_expansion_offset)).then_some((mapped.value, link, ns))
})?; })?;
// get the relative range to the doc/attribute in the expansion // get the relative range to the doc/attribute in the expansion
let in_expansion_relative_range = in_expansion_range - descended_prefix_len - token_start; let in_expansion_relative_range = in_expansion_range - descended_prefix_len - token_start;
@ -285,7 +285,7 @@ impl DocCommentToken {
} }
} }
fn broken_link_clone_cb<'a>(link: BrokenLink<'a>) -> Option<(CowStr<'a>, CowStr<'a>)> { fn broken_link_clone_cb(link: BrokenLink<'_>) -> Option<(CowStr<'_>, CowStr<'_>)> {
Some((/*url*/ link.reference.clone(), /*title*/ link.reference)) Some((/*url*/ link.reference.clone(), /*title*/ link.reference))
} }

View file

@ -163,7 +163,7 @@ fn _format(
) -> Option<String> { ) -> Option<String> {
use ide_db::base_db::{FileLoader, SourceDatabase}; use ide_db::base_db::{FileLoader, SourceDatabase};
// hack until we get hygiene working (same character amount to preserve formatting as much as possible) // hack until we get hygiene working (same character amount to preserve formatting as much as possible)
const DOLLAR_CRATE_REPLACE: &str = &"__r_a_"; const DOLLAR_CRATE_REPLACE: &str = "__r_a_";
let expansion = expansion.replace("$crate", DOLLAR_CRATE_REPLACE); let expansion = expansion.replace("$crate", DOLLAR_CRATE_REPLACE);
let (prefix, suffix) = match kind { let (prefix, suffix) = match kind {
SyntaxKind::MACRO_PAT => ("fn __(", ": u32);"), SyntaxKind::MACRO_PAT => ("fn __(", ": u32);"),

View file

@ -205,7 +205,7 @@ fn extend_single_word_in_comment_or_string(
} }
let start_idx = before.rfind(non_word_char)? as u32; let start_idx = before.rfind(non_word_char)? as u32;
let end_idx = after.find(non_word_char).unwrap_or_else(|| after.len()) as u32; let end_idx = after.find(non_word_char).unwrap_or(after.len()) as u32;
let from: TextSize = (start_idx + 1).into(); let from: TextSize = (start_idx + 1).into();
let to: TextSize = (cursor_position + end_idx).into(); let to: TextSize = (cursor_position + end_idx).into();

View file

@ -110,7 +110,7 @@ fn impls_for_trait_item(
.filter_map(|imp| { .filter_map(|imp| {
let item = imp.items(sema.db).iter().find_map(|itm| { let item = imp.items(sema.db).iter().find_map(|itm| {
let itm_name = itm.name(sema.db)?; let itm_name = itm.name(sema.db)?;
(itm_name == fun_name).then(|| *itm) (itm_name == fun_name).then_some(*itm)
})?; })?;
item.try_to_nav(sema.db) item.try_to_nav(sema.db)
}) })

View file

@ -110,7 +110,7 @@ fn highlight_references(
.and_then(|decl| decl.focus_range) .and_then(|decl| decl.focus_range)
.map(|range| { .map(|range| {
let category = let category =
references::decl_mutability(&def, node, range).then(|| ReferenceCategory::Write); references::decl_mutability(&def, node, range).then_some(ReferenceCategory::Write);
HighlightedRange { range, category } HighlightedRange { range, category }
}); });
if let Some(hl_range) = hl_range { if let Some(hl_range) = hl_range {
@ -365,7 +365,7 @@ mod tests {
let mut expected = annotations let mut expected = annotations
.into_iter() .into_iter()
.map(|(r, access)| (r.range, (!access.is_empty()).then(|| access))) .map(|(r, access)| (r.range, (!access.is_empty()).then_some(access)))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let mut actual = hls let mut actual = hls

View file

@ -195,7 +195,7 @@ pub(crate) fn hover(
// fallback to type hover if there aren't any other suggestions // fallback to type hover if there aren't any other suggestions
// this finds its own range instead of using the closest token's range // this finds its own range instead of using the closest token's range
.or_else(|| { .or_else(|| {
descended.iter().find_map(|token| hover_type_fallback(sema, config, token, &token)) descended.iter().find_map(|token| hover_type_fallback(sema, config, token, token))
}) })
} }

View file

@ -276,7 +276,7 @@ pub(super) fn struct_rest_pat(
} }
}; };
for (_, t) in &missing_fields { for (_, t) in &missing_fields {
walk_and_push_ty(sema.db, &t, &mut push_new_def); walk_and_push_ty(sema.db, t, &mut push_new_def);
} }
res.markup = { res.markup = {

View file

@ -253,7 +253,7 @@ fn label_of_ty(
ty: hir::Type, ty: hir::Type,
label_builder: &mut InlayHintLabelBuilder<'_>, label_builder: &mut InlayHintLabelBuilder<'_>,
) { ) {
let iter_item_type = hint_iterator(sema, &famous_defs, &ty); let iter_item_type = hint_iterator(sema, famous_defs, &ty);
match iter_item_type { match iter_item_type {
Some(ty) => { Some(ty) => {
const LABEL_START: &str = "impl Iterator<Item = "; const LABEL_START: &str = "impl Iterator<Item = ";
@ -279,7 +279,7 @@ fn label_of_ty(
location_link_enabled: config.location_links, location_link_enabled: config.location_links,
result: InlayHintLabel::default(), result: InlayHintLabel::default(),
}; };
rec(sema, &famous_defs, config.max_length, ty, &mut label_builder); rec(sema, famous_defs, config.max_length, ty, &mut label_builder);
let r = label_builder.finish(); let r = label_builder.finish();
Some(r) Some(r)
} }
@ -315,7 +315,7 @@ pub(crate) fn inlay_hints(
let mut acc = Vec::new(); let mut acc = Vec::new();
if let Some(scope) = sema.scope(&file) { if let Some(scope) = sema.scope(file) {
let famous_defs = FamousDefs(&sema, scope.krate()); let famous_defs = FamousDefs(&sema, scope.krate());
let hints = |node| hints(&mut acc, &famous_defs, config, file_id, node); let hints = |node| hints(&mut acc, &famous_defs, config, file_id, node);

View file

@ -167,7 +167,7 @@ fn is_named_constructor(
ast::PathSegmentKind::Type { type_ref: Some(ty), trait_ref: None } => ty.to_string(), ast::PathSegmentKind::Type { type_ref: Some(ty), trait_ref: None } => ty.to_string(),
_ => return None, _ => return None,
}; };
(ctor_name == ty_name).then(|| ()) (ctor_name == ty_name).then_some(())
} }
fn pat_is_enum_variant(db: &RootDatabase, bind_pat: &ast::IdentPat, pat_ty: &hir::Type) -> bool { fn pat_is_enum_variant(db: &RootDatabase, bind_pat: &ast::IdentPat, pat_ty: &hir::Type) -> bool {

View file

@ -31,7 +31,7 @@ pub(super) fn hints(
.last(); .last();
let range = let range =
outer_paren_pat.as_ref().map_or_else(|| pat.syntax(), |it| it.syntax()).text_range(); outer_paren_pat.as_ref().map_or_else(|| pat.syntax(), |it| it.syntax()).text_range();
let pattern_adjustments = sema.pattern_adjustments(&pat); let pattern_adjustments = sema.pattern_adjustments(pat);
pattern_adjustments.iter().for_each(|ty| { pattern_adjustments.iter().for_each(|ty| {
let reference = ty.is_reference(); let reference = ty.is_reference();
let mut_reference = ty.is_mutable_reference(); let mut_reference = ty.is_mutable_reference();

View file

@ -364,11 +364,8 @@ mod tests {
} }
Err(err) => { Err(err) => {
if ra_fixture_after.starts_with("error:") { if ra_fixture_after.starts_with("error:") {
let error_message = ra_fixture_after let error_message =
.chars() ra_fixture_after.chars().skip("error:".len()).collect::<String>();
.into_iter()
.skip("error:".len())
.collect::<String>();
assert_eq!(error_message.trim(), err.to_string()); assert_eq!(error_message.trim(), err.to_string());
} else { } else {
panic!("Rename to '{new_name}' failed unexpectedly: {err}") panic!("Rename to '{new_name}' failed unexpectedly: {err}")

View file

@ -111,7 +111,7 @@ fn punctuation(
let is_raw_ptr = (|| { let is_raw_ptr = (|| {
let prefix_expr = parent.and_then(ast::PrefixExpr::cast)?; let prefix_expr = parent.and_then(ast::PrefixExpr::cast)?;
let expr = prefix_expr.expr()?; let expr = prefix_expr.expr()?;
sema.type_of_expr(&expr)?.original.is_raw_ptr().then(|| ()) sema.type_of_expr(&expr)?.original.is_raw_ptr().then_some(())
})(); })();
if let Some(()) = is_raw_ptr { if let Some(()) = is_raw_ptr {
HlTag::Operator(HlOperator::Other) | HlMod::Unsafe HlTag::Operator(HlOperator::Other) | HlMod::Unsafe

View file

@ -140,7 +140,7 @@ impl Shift {
| tt::Leaf::Punct(tt::Punct { id, .. }) | tt::Leaf::Punct(tt::Punct { id, .. })
| tt::Leaf::Literal(tt::Literal { id, .. })) = leaf; | tt::Leaf::Literal(tt::Literal { id, .. })) = leaf;
(id != tt::TokenId::unspecified()).then(|| id.0) (id != tt::TokenId::unspecified()).then_some(id.0)
} }
}; };
subtree.token_trees.iter().filter_map(filter).max() subtree.token_trees.iter().filter_map(filter).max()

View file

@ -116,9 +116,9 @@ enum Mode {
Template, Template,
} }
fn next_op<'a>( fn next_op(
first_peeked: &tt::TokenTree, first_peeked: &tt::TokenTree,
src: &mut TtIter<'a>, src: &mut TtIter<'_>,
mode: Mode, mode: Mode,
) -> Result<Op, ParseError> { ) -> Result<Op, ParseError> {
let res = match first_peeked { let res = match first_peeked {
@ -273,7 +273,7 @@ fn parse_repeat(src: &mut TtIter<'_>) -> Result<(Option<Separator>, RepeatKind),
_ => return Err(ParseError::InvalidRepeat), _ => return Err(ParseError::InvalidRepeat),
}, },
}; };
return Ok((has_sep.then(|| separator), repeat_kind)); return Ok((has_sep.then_some(separator), repeat_kind));
} }
} }
} }

View file

@ -145,7 +145,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
} }
if iter.peek_n(0).is_some() { if iter.peek_n(0).is_some() {
res.push(tt::Subtree { delimiter: None, token_trees: iter.into_iter().cloned().collect() }); res.push(tt::Subtree { delimiter: None, token_trees: iter.cloned().collect() });
} }
res res

View file

@ -30,7 +30,7 @@ fn check_punct_spacing(fixture: &str) {
while !cursor.eof() { while !cursor.eof() {
while let Some(token_tree) = cursor.token_tree() { while let Some(token_tree) = cursor.token_tree() {
if let TokenTreeRef::Leaf(Leaf::Punct(Punct { spacing, id, .. }), _) = token_tree { if let TokenTreeRef::Leaf(Leaf::Punct(Punct { spacing, id, .. }), _) = token_tree {
if let Some(expected) = annotations.remove(&id) { if let Some(expected) = annotations.remove(id) {
assert_eq!(expected, *spacing); assert_eq!(expected, *spacing);
} }
} }

View file

@ -83,11 +83,12 @@ fn path_segment(p: &mut Parser<'_>, mode: Mode, first: bool) {
} }
p.expect(T![>]); p.expect(T![>]);
} else { } else {
let mut empty = true; let empty = if first {
if first {
p.eat(T![::]); p.eat(T![::]);
empty = false; false
} } else {
true
};
match p.current() { match p.current() {
IDENT => { IDENT => {
name_ref(p); name_ref(p);

View file

@ -54,7 +54,7 @@ impl Output {
} }
pub(crate) fn token(&mut self, kind: SyntaxKind, n_tokens: u8) { pub(crate) fn token(&mut self, kind: SyntaxKind, n_tokens: u8) {
let e = ((kind as u16 as u32) << 16) | ((n_tokens as u32) << 8) | (0 << 4) | 1; let e = ((kind as u16 as u32) << 16) | ((n_tokens as u32) << 8) | 1;
self.event.push(e) self.event.push(e)
} }

View file

@ -210,7 +210,7 @@ impl<'a> Writer<'a> {
let idx_tag = match child { let idx_tag = match child {
tt::TokenTree::Subtree(it) => { tt::TokenTree::Subtree(it) => {
let idx = self.enqueue(it); let idx = self.enqueue(it);
idx << 2 | 0b00 idx << 2
} }
tt::TokenTree::Leaf(leaf) => match leaf { tt::TokenTree::Leaf(leaf) => match leaf {
tt::Leaf::Literal(lit) => { tt::Leaf::Literal(lit) => {

View file

@ -182,7 +182,7 @@ impl server::TokenStream for RustAnalyzer {
.map(|tree| match tree { .map(|tree| match tree {
tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
bridge::TokenTree::Ident(bridge::Ident { bridge::TokenTree::Ident(bridge::Ident {
sym: Symbol::intern(&ident.text.trim_start_matches("r#")), sym: Symbol::intern(ident.text.trim_start_matches("r#")),
is_raw: ident.text.starts_with("r#"), is_raw: ident.text.starts_with("r#"),
span: ident.id, span: ident.id,
}) })

View file

@ -30,12 +30,12 @@ fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect:
let attr = attr.map(|attr| parse_string(attr).unwrap().into_subtree()); let attr = attr.map(|attr| parse_string(attr).unwrap().into_subtree());
let res = expander.expand(macro_name, &fixture.into_subtree(), attr.as_ref()).unwrap(); let res = expander.expand(macro_name, &fixture.into_subtree(), attr.as_ref()).unwrap();
expect.assert_eq(&format!("{:?}", res)); expect.assert_eq(&format!("{res:?}"));
} }
pub(crate) fn list() -> Vec<String> { pub(crate) fn list() -> Vec<String> {
let dylib_path = proc_macro_test_dylib_path(); let dylib_path = proc_macro_test_dylib_path();
let mut srv = ProcMacroSrv::default(); let mut srv = ProcMacroSrv::default();
let res = srv.list_macros(&dylib_path).unwrap(); let res = srv.list_macros(&dylib_path).unwrap();
res.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect() res.into_iter().map(|(name, kind)| format!("{name} [{kind:?}]")).collect()
} }

View file

@ -114,11 +114,11 @@ impl Drop for CpuSpan {
match out { match out {
Ok(out) if out.status.success() => { Ok(out) if out.status.success() => {
let svg = profile_data.with_extension("svg"); let svg = profile_data.with_extension("svg");
std::fs::write(&svg, &out.stdout).unwrap(); std::fs::write(&svg, out.stdout).unwrap();
eprintln!("Profile rendered to:\n\n {}\n", svg.display()); eprintln!("Profile rendered to:\n\n {}\n", svg.display());
} }
_ => { _ => {
eprintln!("Failed to run:\n\n {:?}\n", cmd); eprintln!("Failed to run:\n\n {cmd:?}\n");
} }
} }
} }

View file

@ -66,7 +66,7 @@ impl WorkspaceBuildScripts {
_ => { _ => {
let mut cmd = Command::new(toolchain::cargo()); let mut cmd = Command::new(toolchain::cargo());
cmd.args(&["check", "--quiet", "--workspace", "--message-format=json"]); cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]);
// --all-targets includes tests, benches and examples in addition to the // --all-targets includes tests, benches and examples in addition to the
// default lib and bins. This is an independent concept from the --target // default lib and bins. This is an independent concept from the --target
@ -74,7 +74,7 @@ impl WorkspaceBuildScripts {
cmd.arg("--all-targets"); cmd.arg("--all-targets");
if let Some(target) = &config.target { if let Some(target) = &config.target {
cmd.args(&["--target", target]); cmd.args(["--target", target]);
} }
match &config.features { match &config.features {
@ -122,7 +122,7 @@ impl WorkspaceBuildScripts {
InvocationLocation::Root(root) if config.run_build_script_command.is_some() => { InvocationLocation::Root(root) if config.run_build_script_command.is_some() => {
root.as_path() root.as_path()
} }
_ => &workspace.workspace_root(), _ => workspace.workspace_root(),
} }
.as_ref(); .as_ref();
@ -133,7 +133,7 @@ impl WorkspaceBuildScripts {
// building build scripts failed, attempt to build with --keep-going so // building build scripts failed, attempt to build with --keep-going so
// that we potentially get more build data // that we potentially get more build data
let mut cmd = Self::build_command(config)?; let mut cmd = Self::build_command(config)?;
cmd.args(&["-Z", "unstable-options", "--keep-going"]).env("RUSTC_BOOTSTRAP", "1"); cmd.args(["-Z", "unstable-options", "--keep-going"]).env("RUSTC_BOOTSTRAP", "1");
let mut res = Self::run_per_ws(cmd, workspace, current_dir, progress)?; let mut res = Self::run_per_ws(cmd, workspace, current_dir, progress)?;
res.error = Some(error); res.error = Some(error);
Ok(res) Ok(res)
@ -303,8 +303,7 @@ impl WorkspaceBuildScripts {
Ok(it) => acc.push(it), Ok(it) => acc.push(it),
Err(err) => { Err(err) => {
push_err(&format!( push_err(&format!(
"invalid cfg from cargo-metadata: {}", "invalid cfg from cargo-metadata: {err}"
err
)); ));
return; return;
} }

View file

@ -411,7 +411,7 @@ impl CargoWorkspace {
CargoWorkspace { packages, targets, workspace_root } CargoWorkspace { packages, targets, workspace_root }
} }
pub fn packages<'a>(&'a self) -> impl Iterator<Item = Package> + ExactSizeIterator + 'a { pub fn packages(&self) -> impl Iterator<Item = Package> + ExactSizeIterator + '_ {
self.packages.iter().map(|(id, _pkg)| id) self.packages.iter().map(|(id, _pkg)| id)
} }
@ -517,7 +517,7 @@ fn cargo_config_build_target(
cargo_config.envs(extra_env); cargo_config.envs(extra_env);
cargo_config cargo_config
.current_dir(cargo_toml.parent()) .current_dir(cargo_toml.parent())
.args(&["-Z", "unstable-options", "config", "get", "build.target"]) .args(["-Z", "unstable-options", "config", "get", "build.target"])
.env("RUSTC_BOOTSTRAP", "1"); .env("RUSTC_BOOTSTRAP", "1");
// if successful we receive `build.target = "target-triple"` // if successful we receive `build.target = "target-triple"`
// or `build.target = ["<target 1>", ..]` // or `build.target = ["<target 1>", ..]`

View file

@ -50,10 +50,10 @@ fn get_rust_cfgs(
cargo_config.envs(extra_env); cargo_config.envs(extra_env);
cargo_config cargo_config
.current_dir(cargo_toml.parent()) .current_dir(cargo_toml.parent())
.args(&["rustc", "-Z", "unstable-options", "--print", "cfg"]) .args(["rustc", "-Z", "unstable-options", "--print", "cfg"])
.env("RUSTC_BOOTSTRAP", "1"); .env("RUSTC_BOOTSTRAP", "1");
if let Some(target) = target { if let Some(target) = target {
cargo_config.args(&["--target", target]); cargo_config.args(["--target", target]);
} }
match utf8_stdout(cargo_config) { match utf8_stdout(cargo_config) {
Ok(it) => return Ok(it), Ok(it) => return Ok(it),
@ -63,9 +63,9 @@ fn get_rust_cfgs(
// using unstable cargo features failed, fall back to using plain rustc // using unstable cargo features failed, fall back to using plain rustc
let mut cmd = Command::new(toolchain::rustc()); let mut cmd = Command::new(toolchain::rustc());
cmd.envs(extra_env); cmd.envs(extra_env);
cmd.args(&["--print", "cfg", "-O"]); cmd.args(["--print", "cfg", "-O"]);
if let Some(target) = target { if let Some(target) = target {
cmd.args(&["--target", target]); cmd.args(["--target", target]);
} }
utf8_stdout(cmd) utf8_stdout(cmd)
} }

View file

@ -171,7 +171,7 @@ fn discover_sysroot_dir(
) -> Result<AbsPathBuf> { ) -> Result<AbsPathBuf> {
let mut rustc = Command::new(toolchain::rustc()); let mut rustc = Command::new(toolchain::rustc());
rustc.envs(extra_env); rustc.envs(extra_env);
rustc.current_dir(current_dir).args(&["--print", "sysroot"]); rustc.current_dir(current_dir).args(["--print", "sysroot"]);
tracing::debug!("Discovering sysroot by {:?}", rustc); tracing::debug!("Discovering sysroot by {:?}", rustc);
let stdout = utf8_stdout(rustc)?; let stdout = utf8_stdout(rustc)?;
Ok(AbsPathBuf::assert(PathBuf::from(stdout))) Ok(AbsPathBuf::assert(PathBuf::from(stdout)))
@ -203,7 +203,7 @@ fn discover_sysroot_src_dir_or_add_component(
.or_else(|| { .or_else(|| {
let mut rustup = Command::new(toolchain::rustup()); let mut rustup = Command::new(toolchain::rustup());
rustup.envs(extra_env); rustup.envs(extra_env);
rustup.current_dir(current_dir).args(&["component", "add", "rust-src"]); rustup.current_dir(current_dir).args(["component", "add", "rust-src"]);
tracing::info!("adding rust-src component by {:?}", rustup); tracing::info!("adding rust-src component by {:?}", rustup);
utf8_stdout(rustup).ok()?; utf8_stdout(rustup).ok()?;
get_rust_src(sysroot_path) get_rust_src(sysroot_path)

View file

@ -15,10 +15,10 @@ pub(super) fn get(
let mut cmd = Command::new(toolchain::rustc()); let mut cmd = Command::new(toolchain::rustc());
cmd.envs(extra_env); cmd.envs(extra_env);
cmd.current_dir(cargo_toml.parent()) cmd.current_dir(cargo_toml.parent())
.args(&["-Z", "unstable-options", "rustc", "--print", "target-spec-json"]) .args(["-Z", "unstable-options", "rustc", "--print", "target-spec-json"])
.env("RUSTC_BOOTSTRAP", "1"); .env("RUSTC_BOOTSTRAP", "1");
if let Some(target) = target { if let Some(target) = target {
cmd.args(&["--target", target]); cmd.args(["--target", target]);
} }
match utf8_stdout(cmd) { match utf8_stdout(cmd) {
Ok(it) => return Ok(it), Ok(it) => return Ok(it),
@ -28,10 +28,10 @@ pub(super) fn get(
// using unstable cargo features failed, fall back to using plain rustc // using unstable cargo features failed, fall back to using plain rustc
let mut cmd = Command::new(toolchain::rustc()); let mut cmd = Command::new(toolchain::rustc());
cmd.envs(extra_env) cmd.envs(extra_env)
.args(&["-Z", "unstable-options", "rustc", "--print", "target-spec-json"]) .args(["-Z", "unstable-options", "rustc", "--print", "target-spec-json"])
.env("RUSTC_BOOTSTRAP", "1"); .env("RUSTC_BOOTSTRAP", "1");
if let Some(target) = target { if let Some(target) = target {
cmd.args(&["--target", target]); cmd.args(["--target", target]);
} }
utf8_stdout(cmd) utf8_stdout(cmd)
})() })()

View file

@ -407,7 +407,7 @@ impl ProjectWorkspace {
["libexec", "lib"] ["libexec", "lib"]
.into_iter() .into_iter()
.map(|segment| sysroot.root().join(segment).join(&standalone_server_name)) .map(|segment| sysroot.root().join(segment).join(&standalone_server_name))
.find(|server_path| std::fs::metadata(&server_path).is_ok()) .find(|server_path| std::fs::metadata(server_path).is_ok())
} }
_ => None, _ => None,
} }

View file

@ -42,7 +42,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities {
"(".to_string(), "(".to_string(),
]), ]),
all_commit_characters: None, all_commit_characters: None,
completion_item: completion_item(&config), completion_item: completion_item(config),
work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None }, work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None },
}), }),
signature_help_provider: Some(SignatureHelpOptions { signature_help_provider: Some(SignatureHelpOptions {
@ -67,7 +67,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities {
}, },
document_on_type_formatting_provider: Some(DocumentOnTypeFormattingOptions { document_on_type_formatting_provider: Some(DocumentOnTypeFormattingOptions {
first_trigger_character: "=".to_string(), first_trigger_character: "=".to_string(),
more_trigger_character: Some(more_trigger_character(&config)), more_trigger_character: Some(more_trigger_character(config)),
}), }),
selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)), selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)),
folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)), folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)),

View file

@ -102,7 +102,7 @@ impl flags::Scip {
let symbol = tokens_to_symbol let symbol = tokens_to_symbol
.entry(id) .entry(id)
.or_insert_with(|| { .or_insert_with(|| {
let symbol = token_to_symbol(&token).unwrap_or_else(&mut new_local_symbol); let symbol = token_to_symbol(token).unwrap_or_else(&mut new_local_symbol);
scip::symbol::format_symbol(symbol) scip::symbol::format_symbol(symbol)
}) })
.clone(); .clone();
@ -176,7 +176,7 @@ fn get_relative_filepath(
rootpath: &vfs::AbsPathBuf, rootpath: &vfs::AbsPathBuf,
file_id: ide::FileId, file_id: ide::FileId,
) -> Option<String> { ) -> Option<String> {
Some(vfs.file_path(file_id).as_path()?.strip_prefix(&rootpath)?.as_ref().to_str()?.to_string()) Some(vfs.file_path(file_id).as_path()?.strip_prefix(rootpath)?.as_ref().to_str()?.to_string())
} }
// SCIP Ranges have a (very large) optimization that ranges if they are on the same line // SCIP Ranges have a (very large) optimization that ranges if they are on the same line

View file

@ -1908,9 +1908,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
let doc = doc.trim_end_matches('\n'); let doc = doc.trim_end_matches('\n');
assert!( assert!(
doc.ends_with('.') && doc.starts_with(char::is_uppercase), doc.ends_with('.') && doc.starts_with(char::is_uppercase),
"bad docs for {}: {:?}", "bad docs for {field}: {doc:?}"
field,
doc
); );
let default = default.parse::<serde_json::Value>().unwrap(); let default = default.parse::<serde_json::Value>().unwrap();
@ -2213,17 +2211,16 @@ fn manual(fields: &[(&'static str, &'static str, &[&str], &str)]) -> String {
let doc = doc_comment_to_string(doc); let doc = doc_comment_to_string(doc);
if default.contains('\n') { if default.contains('\n') {
format!( format!(
r#"[[{}]]{}:: r#"[[{name}]]{name}::
+ +
-- --
Default: Default:
---- ----
{} {default}
---- ----
{} {doc}
-- --
"#, "#
name, name, default, doc
) )
} else { } else {
format!("[[{name}]]{name} (default: `{default}`)::\n+\n--\n{doc}--\n") format!("[[{name}]]{name} (default: `{default}`)::\n+\n--\n{doc}--\n")

View file

@ -101,8 +101,7 @@ impl DiagnosticCollection {
file_id: FileId, file_id: FileId,
) -> impl Iterator<Item = &lsp_types::Diagnostic> { ) -> impl Iterator<Item = &lsp_types::Diagnostic> {
let native = self.native.get(&file_id).into_iter().flatten(); let native = self.native.get(&file_id).into_iter().flatten();
let check = let check = self.check.values().filter_map(move |it| it.get(&file_id)).flatten();
self.check.values().filter_map(move |it| it.get(&file_id)).into_iter().flatten();
native.chain(check) native.chain(check)
} }

View file

@ -505,8 +505,7 @@ fn rustc_code_description(code: Option<&str>) -> Option<lsp_types::CodeDescripti
fn clippy_code_description(code: Option<&str>) -> Option<lsp_types::CodeDescription> { fn clippy_code_description(code: Option<&str>) -> Option<lsp_types::CodeDescription> {
code.and_then(|code| { code.and_then(|code| {
lsp_types::Url::parse(&format!( lsp_types::Url::parse(&format!(
"https://rust-lang.github.io/rust-clippy/master/index.html#{}", "https://rust-lang.github.io/rust-clippy/master/index.html#{code}"
code
)) ))
.ok() .ok()
.map(|href| lsp_types::CodeDescription { href }) .map(|href| lsp_types::CodeDescription { href })

Some files were not shown because too many files have changed in this diff Show more