Auto merge of #13860 - danieleades:clippy, r=lnicola

fix a bunch of clippy lints

fixes a bunch of clippy lints for fun and profit

i'm aware of this repo's position on clippy. The changes are split into separate commits so they can be reviewed separately
This commit is contained in:
bors 2023-01-08 17:29:57 +00:00
commit f77b68a3cb
120 changed files with 298 additions and 375 deletions

View file

@ -408,8 +408,7 @@ impl CargoHandle {
Ok(())
} else {
Err(io::Error::new(io::ErrorKind::Other, format!(
"Cargo watcher failed, the command produced no valid metadata (exit code: {:?}):\n{}",
exit_status, error
"Cargo watcher failed, the command produced no valid metadata (exit code: {exit_status:?}):\n{error}"
)))
}
}

View file

@ -234,8 +234,7 @@ impl TraitData {
let item_tree = tree_id.item_tree(db);
let tr_def = &item_tree[tree_id.value];
let _cx = stdx::panic_context::enter(format!(
"trait_data_query({:?} -> {:?} -> {:?})",
tr, tr_loc, tr_def
"trait_data_query({tr:?} -> {tr_loc:?} -> {tr_def:?})"
));
let name = tr_def.name.clone();
let is_auto = tr_def.is_auto;
@ -543,7 +542,7 @@ impl<'a> AssocItemCollector<'a> {
if !attrs.is_cfg_enabled(self.expander.cfg_options()) {
self.inactive_diagnostics.push(DefDiagnostic::unconfigured_code(
self.module_id.local_id,
InFile::new(self.expander.current_file_id(), item.ast_id(&item_tree).upcast()),
InFile::new(self.expander.current_file_id(), item.ast_id(item_tree).upcast()),
attrs.cfg().unwrap(),
self.expander.cfg_options().clone(),
));
@ -552,7 +551,7 @@ impl<'a> AssocItemCollector<'a> {
'attrs: for attr in &*attrs {
let ast_id =
AstId::new(self.expander.current_file_id(), item.ast_id(&item_tree).upcast());
AstId::new(self.expander.current_file_id(), item.ast_id(item_tree).upcast());
let ast_id_with_path = AstIdWithPath { path: (*attr.path).clone(), ast_id };
if let Ok(ResolvedAttr::Macro(call_id)) = self.def_map.resolve_attr_macro(
@ -619,10 +618,8 @@ impl<'a> AssocItemCollector<'a> {
let ast_id_map = self.db.ast_id_map(self.expander.current_file_id());
let call = ast_id_map.get(call.ast_id).to_node(&root);
let _cx = stdx::panic_context::enter(format!(
"collect_items MacroCall: {}",
call
));
let _cx =
stdx::panic_context::enter(format!("collect_items MacroCall: {call}"));
let res = self.expander.enter_expand::<ast::MacroItems>(self.db, call);
if let Ok(ExpandResult { value: Some((mark, _)), .. }) = res {

View file

@ -176,7 +176,7 @@ fn find_path_for_module(
// - if relative paths are fine, check if we are searching for a parent
if prefixed.filter(PrefixKind::is_absolute).is_none() {
if let modpath @ Some(_) = find_self_super(&def_map, module_id, from) {
if let modpath @ Some(_) = find_self_super(def_map, module_id, from) {
return modpath;
}
}

View file

@ -142,8 +142,8 @@ pub enum WherePredicateTypeTarget {
impl GenericParams {
/// Iterator of type_or_consts field
pub fn iter<'a>(
&'a self,
pub fn iter(
&self,
) -> impl DoubleEndedIterator<Item = (Idx<TypeOrConstParamData>, &TypeOrConstParamData)> {
self.type_or_consts.iter()
}

View file

@ -393,8 +393,8 @@ impl Query {
/// Searches dependencies of `krate` for an importable path matching `query`.
///
/// This returns a list of items that could be imported from dependencies of `krate`.
pub fn search_dependencies<'a>(
db: &'a dyn DefDatabase,
pub fn search_dependencies(
db: &dyn DefDatabase,
krate: CrateId,
query: Query,
) -> FxHashSet<ItemInNs> {

View file

@ -96,7 +96,7 @@ pub(crate) enum BuiltinShadowMode {
/// Legacy macros can only be accessed through special methods like `get_legacy_macros`.
/// Other methods will only resolve values, types and module scoped macros only.
impl ItemScope {
pub fn entries<'a>(&'a self) -> impl Iterator<Item = (&'a Name, PerNs)> + 'a {
pub fn entries(&self) -> impl Iterator<Item = (&Name, PerNs)> + '_ {
// FIXME: shadowing
self.types
.keys()
@ -159,18 +159,17 @@ impl ItemScope {
pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility)> {
let (def, mut iter) = match item {
ItemInNs::Macros(def) => {
return self
.macros
.iter()
.find_map(|(name, &(other_def, vis))| (other_def == def).then(|| (name, vis)));
return self.macros.iter().find_map(|(name, &(other_def, vis))| {
(other_def == def).then_some((name, vis))
});
}
ItemInNs::Types(def) => (def, self.types.iter()),
ItemInNs::Values(def) => (def, self.values.iter()),
};
iter.find_map(|(name, &(other_def, vis))| (other_def == def).then(|| (name, vis)))
iter.find_map(|(name, &(other_def, vis))| (other_def == def).then_some((name, vis)))
}
pub(crate) fn traits<'a>(&'a self) -> impl Iterator<Item = TraitId> + 'a {
pub(crate) fn traits(&self) -> impl Iterator<Item = TraitId> + '_ {
self.types
.values()
.filter_map(|&(def, _)| match def {
@ -327,7 +326,7 @@ impl ItemScope {
changed
}
pub(crate) fn resolutions<'a>(&'a self) -> impl Iterator<Item = (Option<Name>, PerNs)> + 'a {
pub(crate) fn resolutions(&self) -> impl Iterator<Item = (Option<Name>, PerNs)> + '_ {
self.entries().map(|(name, res)| (Some(name.clone()), res)).chain(
self.unnamed_trait_imports
.iter()

View file

@ -170,7 +170,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
}
let pp = pretty_print_macro_expansion(
parse.syntax_node(),
show_token_ids.then(|| &*token_map),
show_token_ids.then_some(&*token_map),
);
let indent = IndentLevel::from_node(call.syntax());
let pp = reindent(indent, pp);

View file

@ -67,7 +67,7 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: DefMap, tree_id: T
let dep_def_map = db.crate_def_map(dep.crate_id);
let dep_root = dep_def_map.module_id(dep_def_map.root);
deps.insert(dep.as_name(), dep_root.into());
deps.insert(dep.as_name(), dep_root);
if dep.is_prelude() && !tree_id.is_block() {
def_map.extern_prelude.insert(dep.as_name(), dep_root);
@ -1094,7 +1094,7 @@ impl DefCollector<'_> {
ast_id,
*expand_to,
self.def_map.krate,
&resolver_def_id,
resolver_def_id,
&mut |_err| (),
);
if let Ok(Ok(call_id)) = call_id {
@ -1110,7 +1110,7 @@ impl DefCollector<'_> {
*derive_attr,
*derive_pos as u32,
self.def_map.krate,
&resolver,
resolver,
);
if let Ok((macro_id, def_id, call_id)) = id {
@ -2085,7 +2085,7 @@ impl ModCollector<'_, '_> {
.scope
.get_legacy_macro(name)
.and_then(|it| it.last())
.map(|&it| macro_id_to_def_id(self.def_collector.db, it.into()))
.map(|&it| macro_id_to_def_id(self.def_collector.db, it))
},
)
})

View file

@ -390,7 +390,7 @@ impl DefMap {
.get_legacy_macro(name)
// FIXME: shadowing
.and_then(|it| it.last())
.map_or_else(PerNs::none, |&m| PerNs::macros(m.into(), Visibility::Public));
.map_or_else(PerNs::none, |&m| PerNs::macros(m, Visibility::Public));
let from_scope = self[module].scope.get(name);
let from_builtin = match self.block {
Some(_) => {

View file

@ -381,7 +381,7 @@ impl Resolver {
});
def_map[module_id].scope.legacy_macros().for_each(|(name, macs)| {
macs.iter().for_each(|&mac| {
res.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(MacroId::from(mac))));
res.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac)));
})
});
def_map.extern_prelude().for_each(|(name, &def)| {
@ -517,10 +517,7 @@ impl Scope {
});
m.def_map[m.module_id].scope.legacy_macros().for_each(|(name, macs)| {
macs.iter().for_each(|&mac| {
acc.add(
name,
ScopeDef::ModuleDef(ModuleDefId::MacroId(MacroId::from(mac))),
);
acc.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac)));
})
});
}

View file

@ -115,7 +115,8 @@ pub fn pseudo_derive_attr_expansion(
};
let mut token_trees = Vec::new();
for tt in (&args.token_trees)
for tt in args
.token_trees
.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))))
{
token_trees.push(mk_leaf('#'));

View file

@ -449,7 +449,7 @@ fn concat_bytes_expand(
match token.kind() {
syntax::SyntaxKind::BYTE => bytes.push(token.text().to_string()),
syntax::SyntaxKind::BYTE_STRING => {
let components = unquote_byte_string(lit).unwrap_or_else(Vec::new);
let components = unquote_byte_string(lit).unwrap_or_default();
components.into_iter().for_each(|x| bytes.push(x.to_string()));
}
_ => {

View file

@ -208,7 +208,7 @@ fn eager_macro_recur(
// Collect replacement
for child in children {
let def = match child.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
Some(path) => macro_resolver(path.clone()).ok_or_else(|| UnresolvedMacro { path })?,
Some(path) => macro_resolver(path.clone()).ok_or(UnresolvedMacro { path })?,
None => {
diagnostic_sink(ExpandError::Other("malformed macro invocation".into()));
continue;

View file

@ -62,7 +62,7 @@ impl<'a> UnescapedName<'a> {
it.clone()
}
}
Repr::TupleField(it) => SmolStr::new(&it.to_string()),
Repr::TupleField(it) => SmolStr::new(it.to_string()),
}
}
}
@ -139,7 +139,7 @@ impl Name {
pub fn to_smol_str(&self) -> SmolStr {
match &self.0 {
Repr::Text(it) => it.clone(),
Repr::TupleField(it) => SmolStr::new(&it.to_string()),
Repr::TupleField(it) => SmolStr::new(it.to_string()),
}
}

View file

@ -82,11 +82,11 @@ pub(crate) fn autoderef_step(
}
// FIXME: replace uses of this with Autoderef above
pub fn autoderef<'a>(
db: &'a dyn HirDatabase,
pub fn autoderef(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
ty: Canonical<Ty>,
) -> impl Iterator<Item = Canonical<Ty>> + 'a {
) -> impl Iterator<Item = Canonical<Ty>> + '_ {
let mut table = InferenceTable::new(db, env);
let ty = table.instantiate_canonical(ty);
let mut autoderef = Autoderef::new(&mut table, ty);

View file

@ -131,7 +131,7 @@ fn scalar_max(scalar: &Scalar) -> i128 {
IntTy::I16 => i16::MAX as i128,
IntTy::I32 => i32::MAX as i128,
IntTy::I64 => i64::MAX as i128,
IntTy::I128 => i128::MAX as i128,
IntTy::I128 => i128::MAX,
},
Scalar::Uint(x) => match x {
chalk_ir::UintTy::Usize => usize::MAX as i128,
@ -139,7 +139,7 @@ fn scalar_max(scalar: &Scalar) -> i128 {
chalk_ir::UintTy::U16 => u16::MAX as i128,
chalk_ir::UintTy::U32 => u32::MAX as i128,
chalk_ir::UintTy::U64 => u64::MAX as i128,
chalk_ir::UintTy::U128 => i128::MAX as i128, // ignore too big u128 for now
chalk_ir::UintTy::U128 => i128::MAX, // ignore too big u128 for now
},
Scalar::Float(_) => 0,
}
@ -404,7 +404,7 @@ pub(crate) fn path_to_const(
args_lazy: impl FnOnce() -> Generics,
debruijn: DebruijnIndex,
) -> Option<Const> {
match resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) {
match resolver.resolve_path_in_value_ns_fully(db.upcast(), path) {
Some(ValueNs::GenericParam(p)) => {
let ty = db.const_param_ty(p);
let args = args_lazy();
@ -511,10 +511,10 @@ pub(crate) fn const_eval_query_variant(
)
}
pub(crate) fn eval_to_const<'a>(
pub(crate) fn eval_to_const(
expr: Idx<Expr>,
mode: ParamLoweringMode,
ctx: &mut InferenceContext<'a>,
ctx: &mut InferenceContext<'_>,
args: impl FnOnce() -> Generics,
debruijn: DebruijnIndex,
) -> Const {

View file

@ -25,7 +25,6 @@ fn eval_goal(ra_fixture: &str) -> Result<ComputedExpr, ConstEvalError> {
let scope = &def_map[module_id.local_id].scope;
let const_id = scope
.declarations()
.into_iter()
.find_map(|x| match x {
hir_def::ModuleDefId::ConstId(x) => {
if db.const_data(x).name.as_ref()?.to_string() == "GOAL" {

View file

@ -961,7 +961,7 @@ impl<'a> InferenceContext<'a> {
Expr::RecordLit { path, fields, .. } => {
let subs = fields.iter().map(|f| (f.name.clone(), f.expr));
self.infer_record_pat_like(path.as_deref(), &rhs_ty, (), lhs.into(), subs)
self.infer_record_pat_like(path.as_deref(), &rhs_ty, (), lhs, subs)
}
Expr::Underscore => rhs_ty.clone(),
_ => {
@ -1360,7 +1360,7 @@ impl<'a> InferenceContext<'a> {
ty,
c,
ParamLoweringMode::Placeholder,
|| generics(this.db.upcast(), (&this.resolver).generic_def().unwrap()),
|| generics(this.db.upcast(), this.resolver.generic_def().unwrap()),
DebruijnIndex::INNERMOST,
)
},

View file

@ -153,7 +153,7 @@ impl<'a> InferenceContext<'a> {
) -> Ty {
let mut expected = self.resolve_ty_shallow(expected);
if is_non_ref_pat(&self.body, pat) {
if is_non_ref_pat(self.body, pat) {
let mut pat_adjustments = Vec::new();
while let Some((inner, _lifetime, mutability)) = expected.as_reference() {
pat_adjustments.push(expected.clone());
@ -220,7 +220,7 @@ impl<'a> InferenceContext<'a> {
),
Pat::Record { path: p, args: fields, ellipsis: _ } => {
let subs = fields.iter().map(|f| (f.name.clone(), f.pat));
self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat.into(), subs)
self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat, subs)
}
Pat::Path(path) => {
// FIXME use correct resolver for the surrounding expression

View file

@ -228,7 +228,7 @@ impl chalk_ir::interner::Interner for Interner {
Interned::new(InternedWrapper(chalk_ir::TyData { kind, flags }))
}
fn ty_data<'a>(self, ty: &'a Self::InternedType) -> &'a chalk_ir::TyData<Self> {
fn ty_data(self, ty: &Self::InternedType) -> &chalk_ir::TyData<Self> {
&ty.0
}
@ -236,10 +236,7 @@ impl chalk_ir::interner::Interner for Interner {
Interned::new(InternedWrapper(lifetime))
}
fn lifetime_data<'a>(
self,
lifetime: &'a Self::InternedLifetime,
) -> &'a chalk_ir::LifetimeData<Self> {
fn lifetime_data(self, lifetime: &Self::InternedLifetime) -> &chalk_ir::LifetimeData<Self> {
&lifetime.0
}
@ -247,7 +244,7 @@ impl chalk_ir::interner::Interner for Interner {
Interned::new(InternedWrapper(constant))
}
fn const_data<'a>(self, constant: &'a Self::InternedConst) -> &'a chalk_ir::ConstData<Self> {
fn const_data(self, constant: &Self::InternedConst) -> &chalk_ir::ConstData<Self> {
&constant.0
}
@ -267,10 +264,10 @@ impl chalk_ir::interner::Interner for Interner {
parameter
}
fn generic_arg_data<'a>(
fn generic_arg_data(
self,
parameter: &'a Self::InternedGenericArg,
) -> &'a chalk_ir::GenericArgData<Self> {
parameter: &Self::InternedGenericArg,
) -> &chalk_ir::GenericArgData<Self> {
parameter
}
@ -285,11 +282,11 @@ impl chalk_ir::interner::Interner for Interner {
data.into_iter().collect()
}
fn goal_data<'a>(self, goal: &'a Self::InternedGoal) -> &'a GoalData<Self> {
fn goal_data(self, goal: &Self::InternedGoal) -> &GoalData<Self> {
goal
}
fn goals_data<'a>(self, goals: &'a Self::InternedGoals) -> &'a [Goal<Interner>] {
fn goals_data(self, goals: &Self::InternedGoals) -> &[Goal<Interner>] {
goals
}
@ -300,10 +297,7 @@ impl chalk_ir::interner::Interner for Interner {
Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
}
fn substitution_data<'a>(
self,
substitution: &'a Self::InternedSubstitution,
) -> &'a [GenericArg] {
fn substitution_data(self, substitution: &Self::InternedSubstitution) -> &[GenericArg] {
&substitution.as_ref().0
}
@ -314,10 +308,10 @@ impl chalk_ir::interner::Interner for Interner {
data
}
fn program_clause_data<'a>(
fn program_clause_data(
self,
clause: &'a Self::InternedProgramClause,
) -> &'a chalk_ir::ProgramClauseData<Self> {
clause: &Self::InternedProgramClause,
) -> &chalk_ir::ProgramClauseData<Self> {
clause
}
@ -328,10 +322,10 @@ impl chalk_ir::interner::Interner for Interner {
Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
}
fn program_clauses_data<'a>(
fn program_clauses_data(
self,
clauses: &'a Self::InternedProgramClauses,
) -> &'a [chalk_ir::ProgramClause<Self>] {
clauses: &Self::InternedProgramClauses,
) -> &[chalk_ir::ProgramClause<Self>] {
clauses
}
@ -342,10 +336,10 @@ impl chalk_ir::interner::Interner for Interner {
Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
}
fn quantified_where_clauses_data<'a>(
fn quantified_where_clauses_data(
self,
clauses: &'a Self::InternedQuantifiedWhereClauses,
) -> &'a [chalk_ir::QuantifiedWhereClause<Self>] {
clauses: &Self::InternedQuantifiedWhereClauses,
) -> &[chalk_ir::QuantifiedWhereClause<Self>] {
clauses
}
@ -356,10 +350,10 @@ impl chalk_ir::interner::Interner for Interner {
Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
}
fn variable_kinds_data<'a>(
fn variable_kinds_data(
self,
parameter_kinds: &'a Self::InternedVariableKinds,
) -> &'a [chalk_ir::VariableKind<Self>] {
parameter_kinds: &Self::InternedVariableKinds,
) -> &[chalk_ir::VariableKind<Self>] {
&parameter_kinds.as_ref().0
}
@ -370,10 +364,10 @@ impl chalk_ir::interner::Interner for Interner {
Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
}
fn canonical_var_kinds_data<'a>(
fn canonical_var_kinds_data(
self,
canonical_var_kinds: &'a Self::InternedCanonicalVarKinds,
) -> &'a [chalk_ir::CanonicalVarKind<Self>] {
canonical_var_kinds: &Self::InternedCanonicalVarKinds,
) -> &[chalk_ir::CanonicalVarKind<Self>] {
canonical_var_kinds
}
@ -384,10 +378,10 @@ impl chalk_ir::interner::Interner for Interner {
data.into_iter().collect()
}
fn constraints_data<'a>(
fn constraints_data(
self,
constraints: &'a Self::InternedConstraints,
) -> &'a [chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>] {
constraints: &Self::InternedConstraints,
) -> &[chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>] {
constraints
}
fn debug_closure_id(
@ -410,10 +404,7 @@ impl chalk_ir::interner::Interner for Interner {
Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
}
fn variances_data<'a>(
self,
variances: &'a Self::InternedVariances,
) -> &'a [chalk_ir::Variance] {
fn variances_data(self, variances: &Self::InternedVariances) -> &[chalk_ir::Variance] {
variances
}
}

View file

@ -241,7 +241,7 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
fn layout_of_unit(cx: &LayoutCx<'_>, dl: &TargetDataLayout) -> Result<Layout, LayoutError> {
cx.univariant::<RustcEnumVariantIdx, &&Layout>(
&dl,
dl,
&[],
&ReprOptions::default(),
StructKind::AlwaysSized,

View file

@ -12,8 +12,7 @@ use super::layout_of_ty;
fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
// using unstable cargo features failed, fall back to using plain rustc
let mut cmd = std::process::Command::new("rustc");
cmd.args(&["-Z", "unstable-options", "--print", "target-spec-json"])
.env("RUSTC_BOOTSTRAP", "1");
cmd.args(["-Z", "unstable-options", "--print", "target-spec-json"]).env("RUSTC_BOOTSTRAP", "1");
let output = cmd.output().unwrap();
assert!(output.status.success(), "{}", output.status);
let stdout = String::from_utf8(output.stdout).unwrap();
@ -30,7 +29,6 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
let scope = &def_map[module_id.local_id].scope;
let adt_id = scope
.declarations()
.into_iter()
.find_map(|x| match x {
hir_def::ModuleDefId::AdtId(x) => {
let name = match x {
@ -38,7 +36,7 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
hir_def::AdtId::UnionId(x) => db.union_data(x).name.to_smol_str(),
hir_def::AdtId::EnumId(x) => db.enum_data(x).name.to_smol_str(),
};
(name == "Goal").then(|| x)
(name == "Goal").then_some(x)
}
_ => None,
})

View file

@ -780,7 +780,7 @@ impl<'a> TyLoweringContext<'a> {
|_, c, ty| {
const_or_path_to_chalk(
self.db,
&self.resolver,
self.resolver,
ty,
c,
self.type_param_mode,
@ -1796,8 +1796,7 @@ pub(crate) fn impl_self_ty_query(db: &dyn HirDatabase, impl_id: ImplId) -> Binde
let impl_data = db.impl_data(impl_id);
let resolver = impl_id.resolver(db.upcast());
let _cx = stdx::panic_context::enter(format!(
"impl_self_ty_query({:?} -> {:?} -> {:?})",
impl_id, impl_loc, impl_data
"impl_self_ty_query({impl_id:?} -> {impl_loc:?} -> {impl_data:?})"
));
let generics = generics(db.upcast(), impl_id.into());
let ctx =
@ -1834,8 +1833,7 @@ pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option<
let impl_data = db.impl_data(impl_id);
let resolver = impl_id.resolver(db.upcast());
let _cx = stdx::panic_context::enter(format!(
"impl_trait_query({:?} -> {:?} -> {:?})",
impl_id, impl_loc, impl_data
"impl_trait_query({impl_id:?} -> {impl_loc:?} -> {impl_data:?})"
));
let ctx =
TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
@ -1854,7 +1852,7 @@ pub(crate) fn return_type_impl_traits(
let ctx_ret = TyLoweringContext::new(db, &resolver)
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
.with_type_param_mode(ParamLoweringMode::Variable);
let _ret = (&ctx_ret).lower_ty(&data.ret_type);
let _ret = ctx_ret.lower_ty(&data.ret_type);
let generics = generics(db.upcast(), def.into());
let return_type_impl_traits =
ReturnTypeImplTraits { impl_traits: ctx_ret.opaque_type_data.into_inner() };

View file

@ -714,7 +714,7 @@ fn lookup_impl_assoc_item_for_trait_ref(
let impl_data = find_matching_impl(impls, table, trait_ref)?;
impl_data.items.iter().find_map(|it| match it {
AssocItemId::FunctionId(f) => {
(db.function_data(*f).name == *name).then(|| AssocItemId::FunctionId(*f))
(db.function_data(*f).name == *name).then_some(AssocItemId::FunctionId(*f))
}
AssocItemId::ConstId(c) => db
.const_data(*c)
@ -926,7 +926,7 @@ fn iterate_method_candidates_by_receiver(
while let Some((self_ty, _)) = autoderef.next() {
iterate_inherent_methods(
&self_ty,
&mut autoderef.table,
autoderef.table,
name,
Some(&receiver_ty),
Some(receiver_adjustments.clone()),
@ -941,7 +941,7 @@ fn iterate_method_candidates_by_receiver(
while let Some((self_ty, _)) = autoderef.next() {
iterate_trait_method_candidates(
&self_ty,
&mut autoderef.table,
autoderef.table,
traits_in_scope,
name,
Some(&receiver_ty),
@ -1246,7 +1246,7 @@ fn is_valid_candidate(
let expected_self_ty = TyBuilder::impl_self_ty(db, impl_id)
.fill_with_inference_vars(table)
.build();
table.unify(&expected_self_ty, &self_ty)
table.unify(&expected_self_ty, self_ty)
});
if !self_ty_matches {
cov_mark::hit!(const_candidate_self_type_mismatch);

View file

@ -55,13 +55,10 @@ impl TraitEnvironment {
}
}
pub fn traits_in_scope_from_clauses<'a>(
&'a self,
ty: Ty,
) -> impl Iterator<Item = TraitId> + 'a {
pub fn traits_in_scope_from_clauses(&self, ty: Ty) -> impl Iterator<Item = TraitId> + '_ {
self.traits_from_clauses
.iter()
.filter_map(move |(self_ty, trait_id)| (*self_ty == ty).then(|| *trait_id))
.filter_map(move |(self_ty, trait_id)| (*self_ty == ty).then_some(*trait_id))
}
}

View file

@ -184,9 +184,7 @@ pub(crate) struct Generics {
}
impl Generics {
pub(crate) fn iter_id<'a>(
&'a self,
) -> impl Iterator<Item = Either<TypeParamId, ConstParamId>> + 'a {
pub(crate) fn iter_id(&self) -> impl Iterator<Item = Either<TypeParamId, ConstParamId>> + '_ {
self.iter().map(|(id, data)| match data {
TypeOrConstParamData::TypeParamData(_) => Either::Left(TypeParamId::from_unchecked(id)),
TypeOrConstParamData::ConstParamData(_) => {
@ -216,9 +214,9 @@ impl Generics {
}
/// Iterator over types and const params of parent.
pub(crate) fn iter_parent<'a>(
&'a self,
) -> impl DoubleEndedIterator<Item = (TypeOrConstParamId, &'a TypeOrConstParamData)> + 'a {
pub(crate) fn iter_parent(
&self,
) -> impl DoubleEndedIterator<Item = (TypeOrConstParamId, &TypeOrConstParamData)> {
self.parent_generics().into_iter().flat_map(|it| {
let to_toc_id =
move |(local_id, p)| (TypeOrConstParamId { parent: it.def, local_id }, p);

View file

@ -608,7 +608,7 @@ impl Module {
pub fn legacy_macros(self, db: &dyn HirDatabase) -> Vec<Macro> {
let def_map = self.id.def_map(db.upcast());
let scope = &def_map[self.id.local_id].scope;
scope.legacy_macros().flat_map(|(_, it)| it).map(|&it| MacroId::from(it).into()).collect()
scope.legacy_macros().flat_map(|(_, it)| it).map(|&it| it.into()).collect()
}
pub fn impl_defs(self, db: &dyn HirDatabase) -> Vec<Impl> {
@ -1559,7 +1559,7 @@ impl Function {
}
pub fn self_param(self, db: &dyn HirDatabase) -> Option<SelfParam> {
self.has_self_param(db).then(|| SelfParam { func: self.id })
self.has_self_param(db).then_some(SelfParam { func: self.id })
}
pub fn assoc_fn_params(self, db: &dyn HirDatabase) -> Vec<Param> {
@ -2411,7 +2411,7 @@ pub struct DeriveHelper {
impl DeriveHelper {
pub fn derive(&self) -> Macro {
Macro { id: self.derive.into() }
Macro { id: self.derive }
}
pub fn name(&self, db: &dyn HirDatabase) -> Name {
@ -2781,7 +2781,7 @@ impl Impl {
pub fn all_for_trait(db: &dyn HirDatabase, trait_: Trait) -> Vec<Impl> {
let krate = trait_.module(db).krate();
let mut all = Vec::new();
for Crate { id } in krate.transitive_reverse_dependencies(db).into_iter() {
for Crate { id } in krate.transitive_reverse_dependencies(db) {
let impls = db.trait_impls_in_crate(id);
all.extend(impls.for_trait(trait_.id).map(Self::from))
}

View file

@ -795,7 +795,7 @@ impl<'db> SemanticsImpl<'db> {
// requeue the tokens we got from mapping our current token down
stack.extend(mapped_tokens);
// if the length changed we have found a mapping for the token
(stack.len() != len).then(|| ())
(stack.len() != len).then_some(())
};
// Remap the next token in the queue into a macro call its in, if it is not being remapped
@ -1221,7 +1221,7 @@ impl<'db> SemanticsImpl<'db> {
krate
.dependencies(self.db)
.into_iter()
.find_map(|dep| (dep.name == name).then(|| dep.krate))
.find_map(|dep| (dep.name == name).then_some(dep.krate))
}
fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
@ -1252,7 +1252,7 @@ impl<'db> SemanticsImpl<'db> {
fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
let src = self.find_file(src.syntax()).with_value(src).cloned();
T::to_def(&self, src)
T::to_def(self, src)
}
fn to_module_def(&self, file: FileId) -> impl Iterator<Item = Module> {

View file

@ -228,7 +228,7 @@ impl SourceAnalyzer {
db: &dyn HirDatabase,
pat: &ast::Pat,
) -> Option<SmallVec<[Type; 1]>> {
let pat_id = self.pat_id(&pat)?;
let pat_id = self.pat_id(pat)?;
let infer = self.infer.as_ref()?;
Some(
infer
@ -270,7 +270,7 @@ impl SourceAnalyzer {
db: &dyn HirDatabase,
await_expr: &ast::AwaitExpr,
) -> Option<FunctionId> {
let mut ty = self.ty_of_expr(db, &await_expr.expr()?.into())?.clone();
let mut ty = self.ty_of_expr(db, &await_expr.expr()?)?.clone();
let into_future_trait = self
.resolver
@ -316,7 +316,7 @@ impl SourceAnalyzer {
ast::UnaryOp::Not => name![not],
ast::UnaryOp::Neg => name![neg],
};
let ty = self.ty_of_expr(db, &prefix_expr.expr()?.into())?;
let ty = self.ty_of_expr(db, &prefix_expr.expr()?)?;
let (op_trait, op_fn) = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?;
// HACK: subst for all methods coincides with that for their trait because the methods
@ -331,8 +331,8 @@ impl SourceAnalyzer {
db: &dyn HirDatabase,
index_expr: &ast::IndexExpr,
) -> Option<FunctionId> {
let base_ty = self.ty_of_expr(db, &index_expr.base()?.into())?;
let index_ty = self.ty_of_expr(db, &index_expr.index()?.into())?;
let base_ty = self.ty_of_expr(db, &index_expr.base()?)?;
let index_ty = self.ty_of_expr(db, &index_expr.index()?)?;
let lang_item_name = name![index];
@ -352,8 +352,8 @@ impl SourceAnalyzer {
binop_expr: &ast::BinExpr,
) -> Option<FunctionId> {
let op = binop_expr.op_kind()?;
let lhs = self.ty_of_expr(db, &binop_expr.lhs()?.into())?;
let rhs = self.ty_of_expr(db, &binop_expr.rhs()?.into())?;
let lhs = self.ty_of_expr(db, &binop_expr.lhs()?)?;
let rhs = self.ty_of_expr(db, &binop_expr.rhs()?)?;
let (op_trait, op_fn) = lang_names_for_bin_op(op)
.and_then(|(name, lang_item)| self.lang_trait_fn(db, &lang_item, &name))?;
@ -372,7 +372,7 @@ impl SourceAnalyzer {
db: &dyn HirDatabase,
try_expr: &ast::TryExpr,
) -> Option<FunctionId> {
let ty = self.ty_of_expr(db, &try_expr.expr()?.into())?;
let ty = self.ty_of_expr(db, &try_expr.expr()?)?;
let op_fn =
db.lang_item(self.resolver.krate(), name![branch].to_smol_str())?.as_function()?;
@ -824,7 +824,7 @@ impl SourceAnalyzer {
}
fn ty_of_expr(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<&Ty> {
self.infer.as_ref()?.type_of_expr.get(self.expr_id(db, &expr)?)
self.infer.as_ref()?.type_of_expr.get(self.expr_id(db, expr)?)
}
}
@ -987,7 +987,7 @@ fn resolve_hir_path_(
db,
def,
res.in_type_ns()?,
|name, id| (name == unresolved.name).then(|| id),
|name, id| (name == unresolved.name).then_some(id),
)
})
.map(TypeAlias::from)

View file

@ -326,7 +326,7 @@ impl ExtendedEnum {
fn resolve_enum_def(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> Option<ExtendedEnum> {
sema.type_of_expr(expr)?.adjusted().autoderef(sema.db).find_map(|ty| match ty.as_adt() {
Some(Adt::Enum(e)) => Some(ExtendedEnum::Enum(e)),
_ => ty.is_bool().then(|| ExtendedEnum::Bool),
_ => ty.is_bool().then_some(ExtendedEnum::Bool),
})
}
@ -344,7 +344,7 @@ fn resolve_tuple_of_enum_def(
// For now we only handle expansion for a tuple of enums. Here
// we map non-enum items to None and rely on `collect` to
// convert Vec<Option<hir::Enum>> into Option<Vec<hir::Enum>>.
_ => ty.is_bool().then(|| ExtendedEnum::Bool),
_ => ty.is_bool().then_some(ExtendedEnum::Bool),
})
})
.collect()

View file

@ -35,16 +35,16 @@ pub(crate) fn add_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
match builder_edit_pos {
InsertOrReplace::Insert(insert_pos, needs_whitespace) => {
let preceeding_whitespace = if needs_whitespace { " " } else { "" };
builder.insert(insert_pos, &format!("{preceeding_whitespace}-> {ty} "))
builder.insert(insert_pos, format!("{preceeding_whitespace}-> {ty} "))
}
InsertOrReplace::Replace(text_range) => {
builder.replace(text_range, &format!("-> {ty}"))
builder.replace(text_range, format!("-> {ty}"))
}
}
if let FnType::Closure { wrap_expr: true } = fn_type {
cov_mark::hit!(wrap_closure_non_block_expr);
// `|x| x` becomes `|x| -> T x` which is invalid, so wrap it in a block
builder.replace(tail_expr.syntax().text_range(), &format!("{{{tail_expr}}}"));
builder.replace(tail_expr.syntax().text_range(), format!("{{{tail_expr}}}"));
}
},
)

View file

@ -203,7 +203,7 @@ fn relevance_score(
// get the distance between the imported path and the current module
// (prefer items that are more local)
Some((item_module, current_module)) => {
score -= module_distance_hueristic(db, &current_module, &item_module) as i32;
score -= module_distance_hueristic(db, current_module, &item_module) as i32;
}
// could not find relevant modules, so just use the length of the path as an estimate

View file

@ -216,7 +216,7 @@ fn validate_method_call_expr(
let krate = module.krate();
let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
it_type.impls_trait(sema.db, iter_trait, &[]).then(|| (expr, receiver))
it_type.impls_trait(sema.db, iter_trait, &[]).then_some((expr, receiver))
}
#[cfg(test)]

View file

@ -588,7 +588,7 @@ impl FunctionBody {
FunctionBody::Expr(expr) => Some(expr.clone()),
FunctionBody::Span { parent, text_range } => {
let tail_expr = parent.tail_expr()?;
text_range.contains_range(tail_expr.syntax().text_range()).then(|| tail_expr)
text_range.contains_range(tail_expr.syntax().text_range()).then_some(tail_expr)
}
}
}

View file

@ -118,13 +118,13 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let mut body_items: Vec<String> = Vec::new();
let mut items_to_be_processed: Vec<ast::Item> = module.body_items.clone();
let mut new_item_indent = old_item_indent + 1;
if impl_parent.is_some() {
new_item_indent = old_item_indent + 2;
let new_item_indent = if impl_parent.is_some() {
old_item_indent + 2
} else {
items_to_be_processed = [module.use_items.clone(), items_to_be_processed].concat();
}
old_item_indent + 1
};
for item in items_to_be_processed {
let item = item.indent(IndentLevel(1));

View file

@ -178,7 +178,7 @@ fn extract_generic_params(
.fold(false, |tagged, ty| tag_generics_in_variant(&ty, &mut generics) || tagged),
};
let generics = generics.into_iter().filter_map(|(param, tag)| tag.then(|| param));
let generics = generics.into_iter().filter_map(|(param, tag)| tag.then_some(param));
tagged_one.then(|| make::generic_param_list(generics))
}

View file

@ -53,7 +53,7 @@ pub(crate) fn generate_default_from_new(acc: &mut Assists, ctx: &AssistContext<'
return None;
}
let impl_ = fn_node.syntax().ancestors().into_iter().find_map(ast::Impl::cast)?;
let impl_ = fn_node.syntax().ancestors().find_map(ast::Impl::cast)?;
if is_default_implemented(ctx, &impl_) {
cov_mark::hit!(default_block_is_already_present);
cov_mark::hit!(struct_in_module_with_default);

View file

@ -85,8 +85,7 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()
let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
let field = ctx.find_node_at_offset::<ast::TupleField>()?;
let field_list = ctx.find_node_at_offset::<ast::TupleFieldList>()?;
let field_list_index =
field_list.syntax().children().into_iter().position(|s| &s == field.syntax())?;
let field_list_index = field_list.syntax().children().position(|s| &s == field.syntax())?;
let deref_type_to_generate = match existing_deref_impl(&ctx.sema, &strukt) {
None => DerefType::Deref,

View file

@ -180,7 +180,7 @@ fn make_tuple_field_list(
) -> Option<ast::FieldList> {
let args = call_expr.arg_list()?.args();
let tuple_fields = args.map(|arg| {
let ty = expr_ty(ctx, arg, &scope).unwrap_or_else(make::ty_placeholder);
let ty = expr_ty(ctx, arg, scope).unwrap_or_else(make::ty_placeholder);
make::tuple_field(None, ty)
});
Some(make::tuple_field_list(tuple_fields).into())

View file

@ -514,7 +514,7 @@ fn fn_args(
/// vec!["foo_1".into(), "foo_2".into(), "bar_1".into(), "baz".into(), "bar_2".into()];
/// assert_eq!(names, expected);
/// ```
fn deduplicate_arg_names(arg_names: &mut Vec<String>) {
fn deduplicate_arg_names(arg_names: &mut [String]) {
let mut arg_name_counts = FxHashMap::default();
for name in arg_names.iter() {
*arg_name_counts.entry(name).or_insert(0) += 1;

View file

@ -176,7 +176,7 @@ pub(crate) fn generate_getter_impl(
// for separating it from other assoc items, that needs
// to be handled spearately
let mut getter_buf =
generate_getter_from_info(ctx, &getter_info, &record_field_info);
generate_getter_from_info(ctx, &getter_info, record_field_info);
// Insert `$0` only for last getter we generate
if i == record_fields_count - 1 {
@ -271,7 +271,7 @@ fn generate_getter_from_info(
}}",
vis,
record_field_info.fn_name,
info.mutable.then(|| "mut ").unwrap_or_default(),
info.mutable.then_some("mut ").unwrap_or_default(),
ty,
body,
);

View file

@ -70,7 +70,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
)?;
let expr = use_trivial_constructor(
&ctx.sema.db,
ctx.sema.db,
ide_db::helpers::mod_path_to_ast(&type_path),
&ty,
)?;

View file

@ -138,7 +138,7 @@ pub(crate) fn inline_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
replacement = Replacement::Plain;
}
_ => {
let alias = get_type_alias(&ctx, &alias_instance)?;
let alias = get_type_alias(ctx, &alias_instance)?;
concrete_type = alias.ty()?;
replacement = inline(&alias, &alias_instance)?;
}
@ -158,7 +158,7 @@ impl Replacement {
fn to_text(&self, concrete_type: &ast::Type) -> String {
match self {
Replacement::Generic { lifetime_map, const_and_type_map } => {
create_replacement(&lifetime_map, &const_and_type_map, &concrete_type)
create_replacement(lifetime_map, const_and_type_map, concrete_type)
}
Replacement::Plain => concrete_type.to_string(),
}
@ -240,7 +240,7 @@ impl ConstAndTypeMap {
) -> Option<Self> {
let mut inner = HashMap::new();
let instance_generics = generic_args_to_const_and_type_generics(instance_args);
let alias_generics = generic_param_list_to_const_and_type_generics(&alias_generics);
let alias_generics = generic_param_list_to_const_and_type_generics(alias_generics);
if instance_generics.len() > alias_generics.len() {
cov_mark::hit!(too_many_generic_args);

View file

@ -64,7 +64,7 @@ fn compute_dbg_replacement(macro_call: ast::MacroCall) -> Option<(TextRange, Str
let input_expressions = mac_input.group_by(|tok| tok.kind() == T![,]);
let input_expressions = input_expressions
.into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
.map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
.collect::<Option<Vec<ast::Expr>>>()?;

View file

@ -42,7 +42,7 @@ pub(crate) fn replace_turbofish_with_explicit_type(
let r_angle = generic_args.r_angle_token()?;
let turbofish_range = TextRange::new(colon2.text_range().start(), r_angle.text_range().end());
let turbofish_args: Vec<GenericArg> = generic_args.generic_args().into_iter().collect();
let turbofish_args: Vec<GenericArg> = generic_args.generic_args().collect();
// Find type of ::<_>
if turbofish_args.len() != 1 {

View file

@ -107,7 +107,7 @@ fn find_all_references(
/// If no await expression is found, returns None.
fn find_await_expression(ctx: &AssistContext<'_>, nameref: &NameRef) -> Option<ast::AwaitExpr> {
// From the nameref, walk up the tree to the await expression.
let await_expr = if let Some(path) = full_path_of_name_ref(&nameref) {
let await_expr = if let Some(path) = full_path_of_name_ref(nameref) {
// Function calls.
path.syntax()
.parent()

View file

@ -95,8 +95,7 @@ impl Assist {
let id = block.id;
assert!(
id.chars().all(|it| it.is_ascii_lowercase() || it == '_'),
"invalid assist id: {:?}",
id
"invalid assist id: {id:?}"
);
let mut lines = block.contents.iter().peekable();
let location = sourcegen::Location { file: path.to_path_buf(), line: block.line };

View file

@ -613,7 +613,7 @@ pub(crate) fn convert_reference_type(
}
fn handle_copy(ty: &hir::Type, db: &dyn HirDatabase) -> Option<ReferenceConversionType> {
ty.is_copy(db).then(|| ReferenceConversionType::Copy)
ty.is_copy(db).then_some(ReferenceConversionType::Copy)
}
fn handle_as_ref_str(
@ -624,7 +624,7 @@ fn handle_as_ref_str(
let str_type = hir::BuiltinType::str().ty(db);
ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[str_type])
.then(|| ReferenceConversionType::AsRefStr)
.then_some(ReferenceConversionType::AsRefStr)
}
fn handle_as_ref_slice(
@ -636,7 +636,7 @@ fn handle_as_ref_slice(
let slice_type = hir::Type::new_slice(type_argument);
ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[slice_type])
.then(|| ReferenceConversionType::AsRefSlice)
.then_some(ReferenceConversionType::AsRefSlice)
}
fn handle_dereferenced(
@ -647,7 +647,7 @@ fn handle_dereferenced(
let type_argument = ty.type_arguments().next()?;
ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[type_argument])
.then(|| ReferenceConversionType::Dereferenced)
.then_some(ReferenceConversionType::Dereferenced)
}
fn handle_option_as_ref(

View file

@ -419,7 +419,7 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
make::ext::path_from_idents(["Self", &variant.name()?.to_string()])
}
fn gen_tuple_field(field_name: &String) -> ast::Pat {
fn gen_tuple_field(field_name: &str) -> ast::Pat {
ast::Pat::IdentPat(make::ident_pat(false, false, make::name(field_name)))
}

View file

@ -357,7 +357,7 @@ fn parse_comma_sep_expr(input: ast::TokenTree) -> Option<Vec<ast::Expr>> {
Some(
input_expressions
.into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
.filter_map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
.collect::<Vec<ast::Expr>>(),
)
@ -371,9 +371,7 @@ fn attributes_are_sorted() {
attrs.for_each(|next| {
assert!(
prev < next,
r#"ATTRIBUTES array is not sorted, "{}" should come after "{}""#,
prev,
next
r#"ATTRIBUTES array is not sorted, "{prev}" should come after "{next}""#
);
prev = next;
});

View file

@ -32,12 +32,12 @@ pub(crate) fn complete_dot(
complete_fields(
acc,
ctx,
&receiver_ty,
receiver_ty,
|acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty),
|acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty),
);
}
complete_methods(ctx, &receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None));
complete_methods(ctx, receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None));
}
pub(crate) fn complete_undotted_self(

View file

@ -68,28 +68,26 @@ mod tests {
&format!(
r#"
#[rustc_builtin_macro]
macro_rules! {} {{
macro_rules! {macro_name} {{
($var:literal) => {{ 0 }}
}}
fn main() {{
let foo = {}!("CAR$0");
let foo = {macro_name}!("CAR$0");
}}
"#,
macro_name, macro_name
"#
),
&format!(
r#"
#[rustc_builtin_macro]
macro_rules! {} {{
macro_rules! {macro_name} {{
($var:literal) => {{ 0 }}
}}
fn main() {{
let foo = {}!("CARGO_BIN_NAME");
let foo = {macro_name}!("CARGO_BIN_NAME");
}}
"#,
macro_name, macro_name
"#
),
);
}

View file

@ -64,7 +64,7 @@ pub(crate) fn complete_expr_path(
acc.add_enum_variants(ctx, path_ctx, e);
}
ctx.iterate_path_candidates(&ty, |item| {
ctx.iterate_path_candidates(ty, |item| {
add_assoc_item(acc, item);
});

View file

@ -13,7 +13,7 @@ pub(crate) fn format_string(
original: &ast::String,
expanded: &ast::String,
) {
if !is_format_string(&expanded) {
if !is_format_string(expanded) {
return;
}
let cursor = ctx.position.offset;

View file

@ -845,11 +845,10 @@ trait Test {{
struct T;
impl Test for T {{
{}
{}
{hint}
{next_sibling}
}}
"#,
hint, next_sibling
"#
),
&format!(
r#"
@ -861,11 +860,10 @@ trait Test {{
struct T;
impl Test for T {{
{}
{}
{completed}
{next_sibling}
}}
"#,
completed, next_sibling
"#
),
)
};
@ -905,10 +903,9 @@ struct T;
impl Foo for T {{
// Comment
#[bar]
{}
{hint}
}}
"#,
hint
"#
),
&format!(
r#"
@ -922,10 +919,9 @@ struct T;
impl Foo for T {{
// Comment
#[bar]
{}
{completed}
}}
"#,
completed
"#
),
)
};

View file

@ -153,8 +153,7 @@ pub(crate) fn complete_postfix(
"match",
"match expr {}",
&format!(
"match {} {{\n Some(${{1:_}}) => {{$2}},\n None => {{$0}},\n}}",
receiver_text
"match {receiver_text} {{\n Some(${{1:_}}) => {{$2}},\n None => {{$0}},\n}}"
),
)
.add_to(acc);

View file

@ -58,7 +58,7 @@ pub(crate) fn complete_type_path(
trait_.items(ctx.sema.db).into_iter().for_each(|item| add_assoc_item(acc, item))
}
Qualified::TypeAnchor { ty: Some(ty), trait_: None } => {
ctx.iterate_path_candidates(&ty, |item| {
ctx.iterate_path_candidates(ty, |item| {
add_assoc_item(acc, item);
});

View file

@ -226,7 +226,7 @@ fn analyze(
find_node_at_offset(&file_with_fake_ident, offset)
{
let parent = name_ref.syntax().parent()?;
let (mut nameref_ctx, _) = classify_name_ref(&sema, &original_file, name_ref, parent)?;
let (mut nameref_ctx, _) = classify_name_ref(sema, &original_file, name_ref, parent)?;
if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind {
path_ctx.kind = PathKind::Derive {
existing_derives: sema
@ -277,7 +277,7 @@ fn analyze(
return Some((analysis, (None, None), QualifierCtx::default()));
}
};
let expected = expected_type_and_name(sema, &self_token, &name_like);
let expected = expected_type_and_name(sema, self_token, &name_like);
let mut qual_ctx = QualifierCtx::default();
let analysis = match name_like {
ast::NameLike::Lifetime(lifetime) => {
@ -374,7 +374,7 @@ fn expected_type_and_name(
ast::ArgList(_) => {
cov_mark::hit!(expected_type_fn_param);
ActiveParameter::at_token(
&sema,
sema,
token.clone(),
).map(|ap| {
let name = ap.ident().map(NameOrNameRef::Name);
@ -507,7 +507,7 @@ fn classify_lifetime(
_ => LifetimeKind::Lifetime,
}
};
let lifetime = find_node_at_offset(&original_file, lifetime.syntax().text_range().start());
let lifetime = find_node_at_offset(original_file, lifetime.syntax().text_range().start());
Some(LifetimeContext { lifetime, kind })
}
@ -548,7 +548,7 @@ fn classify_name(
_ => return None,
}
};
let name = find_node_at_offset(&original_file, name.syntax().text_range().start());
let name = find_node_at_offset(original_file, name.syntax().text_range().start());
Some(NameContext { name, kind })
}
@ -558,7 +558,7 @@ fn classify_name_ref(
name_ref: ast::NameRef,
parent: SyntaxNode,
) -> Option<(NameRefContext, QualifierCtx)> {
let nameref = find_node_at_offset(&original_file, name_ref.syntax().text_range().start());
let nameref = find_node_at_offset(original_file, name_ref.syntax().text_range().start());
let make_res = |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());

View file

@ -68,7 +68,7 @@ pub(crate) fn render_union_literal(
item.set_documentation(ctx.docs(un))
.set_deprecated(ctx.is_deprecated(un))
.detail(&detail)
.detail(detail)
.set_relevance(ctx.completion_relevance());
match ctx.snippet_cap() {

View file

@ -183,7 +183,7 @@ pub(crate) fn check_edit_with_config(
let ra_fixture_after = trim_indent(ra_fixture_after);
let (db, position) = position(ra_fixture_before);
let completions: Vec<CompletionItem> =
crate::completions(&db, &config, position, None).unwrap().into();
crate::completions(&db, &config, position, None).unwrap();
let (completion,) = completions
.iter()
.filter(|it| it.lookup() == what)

View file

@ -91,7 +91,7 @@ fn recursive_merge(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehavior)
.flat_map(|list| list.use_trees())
// We use Option here to early return from this function(this is not the
// same as a `filter` op).
.map(|tree| merge.is_tree_allowed(&tree).then(|| tree))
.map(|tree| merge.is_tree_allowed(&tree).then_some(tree))
.collect::<Option<_>>()?;
use_trees.sort_unstable_by(|a, b| path_cmp_for_sort(a.path(), b.path()));
for rhs_t in rhs.use_tree_list().into_iter().flat_map(|list| list.use_trees()) {

View file

@ -608,7 +608,7 @@ impl<'a> FindUsages<'a> {
let reference = FileReference {
range,
name: ast::NameLike::NameRef(name_ref.clone()),
category: is_name_ref_in_import(name_ref).then(|| ReferenceCategory::Import),
category: is_name_ref_in_import(name_ref).then_some(ReferenceCategory::Import),
};
sink(file_id, reference)
}
@ -787,7 +787,7 @@ impl ReferenceCategory {
fn new(def: &Definition, r: &ast::NameRef) -> Option<ReferenceCategory> {
// Only Locals and Fields have accesses for now.
if !matches!(def, Definition::Local(_) | Definition::Field(_)) {
return is_name_ref_in_import(r).then(|| ReferenceCategory::Import);
return is_name_ref_in_import(r).then_some(ReferenceCategory::Import);
}
let mode = r.syntax().ancestors().find_map(|node| {

View file

@ -452,7 +452,7 @@ pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Pat
let input_expressions = tokens.group_by(|tok| tok.kind() == T![,]);
let paths = input_expressions
.into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
.filter_map(|mut tokens| {
syntax::hacks::parse_expr_from_str(&tokens.join("")).and_then(|expr| match expr {
ast::Expr::PathExpr(it) => it.path(),

View file

@ -128,9 +128,9 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
)?;
use_trivial_constructor(
&ctx.sema.db,
ctx.sema.db,
ide_db::helpers::mod_path_to_ast(&type_path),
&ty,
ty,
)
})();

View file

@ -68,7 +68,7 @@ fn missing_record_expr_field_fixes(
}
let new_field = make::record_field(
None,
make::name(&record_expr_field.field_name()?.ident_token()?.text()),
make::name(record_expr_field.field_name()?.ident_token()?.text()),
make::ty(&new_field_type.display_source_code(sema.db, module.into()).ok()?),
);

View file

@ -229,7 +229,7 @@ pub fn diagnostics(
for node in parse.syntax().descendants() {
handlers::useless_braces::useless_braces(&mut res, file_id, &node);
handlers::field_shorthand::field_shorthand(&mut res, file_id, &node);
handlers::json_is_not_rust::json_in_items(&sema, &mut res, file_id, &node, &config);
handlers::json_is_not_rust::json_in_items(&sema, &mut res, file_id, &node, config);
}
let module = sema.to_module_def(file_id);

View file

@ -11,7 +11,7 @@ fn sourcegen_diagnostic_docs() {
diagnostics.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
let contents = sourcegen::add_preamble("sourcegen_diagnostic_docs", contents);
let dst = project_root().join("docs/user/generated_diagnostic.adoc");
fs::write(&dst, &contents).unwrap();
fs::write(dst, contents).unwrap();
}
#[derive(Debug)]

View file

@ -273,7 +273,7 @@ impl DocCommentToken {
let (in_expansion_range, link, ns) =
extract_definitions_from_docs(&docs).into_iter().find_map(|(range, link, ns)| {
let mapped = doc_mapping.map(range)?;
(mapped.value.contains(abs_in_expansion_offset)).then(|| (mapped.value, link, ns))
(mapped.value.contains(abs_in_expansion_offset)).then_some((mapped.value, link, ns))
})?;
// get the relative range to the doc/attribute in the expansion
let in_expansion_relative_range = in_expansion_range - descended_prefix_len - token_start;
@ -285,7 +285,7 @@ impl DocCommentToken {
}
}
fn broken_link_clone_cb<'a>(link: BrokenLink<'a>) -> Option<(CowStr<'a>, CowStr<'a>)> {
fn broken_link_clone_cb(link: BrokenLink<'_>) -> Option<(CowStr<'_>, CowStr<'_>)> {
Some((/*url*/ link.reference.clone(), /*title*/ link.reference))
}

View file

@ -163,7 +163,7 @@ fn _format(
) -> Option<String> {
use ide_db::base_db::{FileLoader, SourceDatabase};
// hack until we get hygiene working (same character amount to preserve formatting as much as possible)
const DOLLAR_CRATE_REPLACE: &str = &"__r_a_";
const DOLLAR_CRATE_REPLACE: &str = "__r_a_";
let expansion = expansion.replace("$crate", DOLLAR_CRATE_REPLACE);
let (prefix, suffix) = match kind {
SyntaxKind::MACRO_PAT => ("fn __(", ": u32);"),

View file

@ -205,7 +205,7 @@ fn extend_single_word_in_comment_or_string(
}
let start_idx = before.rfind(non_word_char)? as u32;
let end_idx = after.find(non_word_char).unwrap_or_else(|| after.len()) as u32;
let end_idx = after.find(non_word_char).unwrap_or(after.len()) as u32;
let from: TextSize = (start_idx + 1).into();
let to: TextSize = (cursor_position + end_idx).into();

View file

@ -110,7 +110,7 @@ fn impls_for_trait_item(
.filter_map(|imp| {
let item = imp.items(sema.db).iter().find_map(|itm| {
let itm_name = itm.name(sema.db)?;
(itm_name == fun_name).then(|| *itm)
(itm_name == fun_name).then_some(*itm)
})?;
item.try_to_nav(sema.db)
})

View file

@ -110,7 +110,7 @@ fn highlight_references(
.and_then(|decl| decl.focus_range)
.map(|range| {
let category =
references::decl_mutability(&def, node, range).then(|| ReferenceCategory::Write);
references::decl_mutability(&def, node, range).then_some(ReferenceCategory::Write);
HighlightedRange { range, category }
});
if let Some(hl_range) = hl_range {
@ -365,7 +365,7 @@ mod tests {
let mut expected = annotations
.into_iter()
.map(|(r, access)| (r.range, (!access.is_empty()).then(|| access)))
.map(|(r, access)| (r.range, (!access.is_empty()).then_some(access)))
.collect::<Vec<_>>();
let mut actual = hls

View file

@ -195,7 +195,7 @@ pub(crate) fn hover(
// fallback to type hover if there aren't any other suggestions
// this finds its own range instead of using the closest token's range
.or_else(|| {
descended.iter().find_map(|token| hover_type_fallback(sema, config, token, &token))
descended.iter().find_map(|token| hover_type_fallback(sema, config, token, token))
})
}

View file

@ -276,7 +276,7 @@ pub(super) fn struct_rest_pat(
}
};
for (_, t) in &missing_fields {
walk_and_push_ty(sema.db, &t, &mut push_new_def);
walk_and_push_ty(sema.db, t, &mut push_new_def);
}
res.markup = {

View file

@ -253,7 +253,7 @@ fn label_of_ty(
ty: hir::Type,
label_builder: &mut InlayHintLabelBuilder<'_>,
) {
let iter_item_type = hint_iterator(sema, &famous_defs, &ty);
let iter_item_type = hint_iterator(sema, famous_defs, &ty);
match iter_item_type {
Some(ty) => {
const LABEL_START: &str = "impl Iterator<Item = ";
@ -279,7 +279,7 @@ fn label_of_ty(
location_link_enabled: config.location_links,
result: InlayHintLabel::default(),
};
rec(sema, &famous_defs, config.max_length, ty, &mut label_builder);
rec(sema, famous_defs, config.max_length, ty, &mut label_builder);
let r = label_builder.finish();
Some(r)
}
@ -315,7 +315,7 @@ pub(crate) fn inlay_hints(
let mut acc = Vec::new();
if let Some(scope) = sema.scope(&file) {
if let Some(scope) = sema.scope(file) {
let famous_defs = FamousDefs(&sema, scope.krate());
let hints = |node| hints(&mut acc, &famous_defs, config, file_id, node);

View file

@ -167,7 +167,7 @@ fn is_named_constructor(
ast::PathSegmentKind::Type { type_ref: Some(ty), trait_ref: None } => ty.to_string(),
_ => return None,
};
(ctor_name == ty_name).then(|| ())
(ctor_name == ty_name).then_some(())
}
fn pat_is_enum_variant(db: &RootDatabase, bind_pat: &ast::IdentPat, pat_ty: &hir::Type) -> bool {

View file

@ -31,7 +31,7 @@ pub(super) fn hints(
.last();
let range =
outer_paren_pat.as_ref().map_or_else(|| pat.syntax(), |it| it.syntax()).text_range();
let pattern_adjustments = sema.pattern_adjustments(&pat);
let pattern_adjustments = sema.pattern_adjustments(pat);
pattern_adjustments.iter().for_each(|ty| {
let reference = ty.is_reference();
let mut_reference = ty.is_mutable_reference();

View file

@ -364,11 +364,8 @@ mod tests {
}
Err(err) => {
if ra_fixture_after.starts_with("error:") {
let error_message = ra_fixture_after
.chars()
.into_iter()
.skip("error:".len())
.collect::<String>();
let error_message =
ra_fixture_after.chars().skip("error:".len()).collect::<String>();
assert_eq!(error_message.trim(), err.to_string());
} else {
panic!("Rename to '{new_name}' failed unexpectedly: {err}")

View file

@ -111,7 +111,7 @@ fn punctuation(
let is_raw_ptr = (|| {
let prefix_expr = parent.and_then(ast::PrefixExpr::cast)?;
let expr = prefix_expr.expr()?;
sema.type_of_expr(&expr)?.original.is_raw_ptr().then(|| ())
sema.type_of_expr(&expr)?.original.is_raw_ptr().then_some(())
})();
if let Some(()) = is_raw_ptr {
HlTag::Operator(HlOperator::Other) | HlMod::Unsafe

View file

@ -140,7 +140,7 @@ impl Shift {
| tt::Leaf::Punct(tt::Punct { id, .. })
| tt::Leaf::Literal(tt::Literal { id, .. })) = leaf;
(id != tt::TokenId::unspecified()).then(|| id.0)
(id != tt::TokenId::unspecified()).then_some(id.0)
}
};
subtree.token_trees.iter().filter_map(filter).max()

View file

@ -116,9 +116,9 @@ enum Mode {
Template,
}
fn next_op<'a>(
fn next_op(
first_peeked: &tt::TokenTree,
src: &mut TtIter<'a>,
src: &mut TtIter<'_>,
mode: Mode,
) -> Result<Op, ParseError> {
let res = match first_peeked {
@ -273,7 +273,7 @@ fn parse_repeat(src: &mut TtIter<'_>) -> Result<(Option<Separator>, RepeatKind),
_ => return Err(ParseError::InvalidRepeat),
},
};
return Ok((has_sep.then(|| separator), repeat_kind));
return Ok((has_sep.then_some(separator), repeat_kind));
}
}
}

View file

@ -145,7 +145,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
}
if iter.peek_n(0).is_some() {
res.push(tt::Subtree { delimiter: None, token_trees: iter.into_iter().cloned().collect() });
res.push(tt::Subtree { delimiter: None, token_trees: iter.cloned().collect() });
}
res

View file

@ -30,7 +30,7 @@ fn check_punct_spacing(fixture: &str) {
while !cursor.eof() {
while let Some(token_tree) = cursor.token_tree() {
if let TokenTreeRef::Leaf(Leaf::Punct(Punct { spacing, id, .. }), _) = token_tree {
if let Some(expected) = annotations.remove(&id) {
if let Some(expected) = annotations.remove(id) {
assert_eq!(expected, *spacing);
}
}

View file

@ -83,11 +83,12 @@ fn path_segment(p: &mut Parser<'_>, mode: Mode, first: bool) {
}
p.expect(T![>]);
} else {
let mut empty = true;
if first {
let empty = if first {
p.eat(T![::]);
empty = false;
}
false
} else {
true
};
match p.current() {
IDENT => {
name_ref(p);

View file

@ -54,7 +54,7 @@ impl Output {
}
pub(crate) fn token(&mut self, kind: SyntaxKind, n_tokens: u8) {
let e = ((kind as u16 as u32) << 16) | ((n_tokens as u32) << 8) | (0 << 4) | 1;
let e = ((kind as u16 as u32) << 16) | ((n_tokens as u32) << 8) | 1;
self.event.push(e)
}

View file

@ -210,7 +210,7 @@ impl<'a> Writer<'a> {
let idx_tag = match child {
tt::TokenTree::Subtree(it) => {
let idx = self.enqueue(it);
idx << 2 | 0b00
idx << 2
}
tt::TokenTree::Leaf(leaf) => match leaf {
tt::Leaf::Literal(lit) => {

View file

@ -182,7 +182,7 @@ impl server::TokenStream for RustAnalyzer {
.map(|tree| match tree {
tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
bridge::TokenTree::Ident(bridge::Ident {
sym: Symbol::intern(&ident.text.trim_start_matches("r#")),
sym: Symbol::intern(ident.text.trim_start_matches("r#")),
is_raw: ident.text.starts_with("r#"),
span: ident.id,
})

View file

@ -30,12 +30,12 @@ fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect:
let attr = attr.map(|attr| parse_string(attr).unwrap().into_subtree());
let res = expander.expand(macro_name, &fixture.into_subtree(), attr.as_ref()).unwrap();
expect.assert_eq(&format!("{:?}", res));
expect.assert_eq(&format!("{res:?}"));
}
pub(crate) fn list() -> Vec<String> {
let dylib_path = proc_macro_test_dylib_path();
let mut srv = ProcMacroSrv::default();
let res = srv.list_macros(&dylib_path).unwrap();
res.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect()
res.into_iter().map(|(name, kind)| format!("{name} [{kind:?}]")).collect()
}

View file

@ -114,11 +114,11 @@ impl Drop for CpuSpan {
match out {
Ok(out) if out.status.success() => {
let svg = profile_data.with_extension("svg");
std::fs::write(&svg, &out.stdout).unwrap();
std::fs::write(&svg, out.stdout).unwrap();
eprintln!("Profile rendered to:\n\n {}\n", svg.display());
}
_ => {
eprintln!("Failed to run:\n\n {:?}\n", cmd);
eprintln!("Failed to run:\n\n {cmd:?}\n");
}
}
}

View file

@ -66,7 +66,7 @@ impl WorkspaceBuildScripts {
_ => {
let mut cmd = Command::new(toolchain::cargo());
cmd.args(&["check", "--quiet", "--workspace", "--message-format=json"]);
cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]);
// --all-targets includes tests, benches and examples in addition to the
// default lib and bins. This is an independent concept from the --target
@ -74,7 +74,7 @@ impl WorkspaceBuildScripts {
cmd.arg("--all-targets");
if let Some(target) = &config.target {
cmd.args(&["--target", target]);
cmd.args(["--target", target]);
}
match &config.features {
@ -122,7 +122,7 @@ impl WorkspaceBuildScripts {
InvocationLocation::Root(root) if config.run_build_script_command.is_some() => {
root.as_path()
}
_ => &workspace.workspace_root(),
_ => workspace.workspace_root(),
}
.as_ref();
@ -133,7 +133,7 @@ impl WorkspaceBuildScripts {
// building build scripts failed, attempt to build with --keep-going so
// that we potentially get more build data
let mut cmd = Self::build_command(config)?;
cmd.args(&["-Z", "unstable-options", "--keep-going"]).env("RUSTC_BOOTSTRAP", "1");
cmd.args(["-Z", "unstable-options", "--keep-going"]).env("RUSTC_BOOTSTRAP", "1");
let mut res = Self::run_per_ws(cmd, workspace, current_dir, progress)?;
res.error = Some(error);
Ok(res)
@ -303,8 +303,7 @@ impl WorkspaceBuildScripts {
Ok(it) => acc.push(it),
Err(err) => {
push_err(&format!(
"invalid cfg from cargo-metadata: {}",
err
"invalid cfg from cargo-metadata: {err}"
));
return;
}

View file

@ -411,7 +411,7 @@ impl CargoWorkspace {
CargoWorkspace { packages, targets, workspace_root }
}
pub fn packages<'a>(&'a self) -> impl Iterator<Item = Package> + ExactSizeIterator + 'a {
pub fn packages(&self) -> impl Iterator<Item = Package> + ExactSizeIterator + '_ {
self.packages.iter().map(|(id, _pkg)| id)
}
@ -517,7 +517,7 @@ fn cargo_config_build_target(
cargo_config.envs(extra_env);
cargo_config
.current_dir(cargo_toml.parent())
.args(&["-Z", "unstable-options", "config", "get", "build.target"])
.args(["-Z", "unstable-options", "config", "get", "build.target"])
.env("RUSTC_BOOTSTRAP", "1");
// if successful we receive `build.target = "target-triple"`
// or `build.target = ["<target 1>", ..]`

View file

@ -50,10 +50,10 @@ fn get_rust_cfgs(
cargo_config.envs(extra_env);
cargo_config
.current_dir(cargo_toml.parent())
.args(&["rustc", "-Z", "unstable-options", "--print", "cfg"])
.args(["rustc", "-Z", "unstable-options", "--print", "cfg"])
.env("RUSTC_BOOTSTRAP", "1");
if let Some(target) = target {
cargo_config.args(&["--target", target]);
cargo_config.args(["--target", target]);
}
match utf8_stdout(cargo_config) {
Ok(it) => return Ok(it),
@ -63,9 +63,9 @@ fn get_rust_cfgs(
// using unstable cargo features failed, fall back to using plain rustc
let mut cmd = Command::new(toolchain::rustc());
cmd.envs(extra_env);
cmd.args(&["--print", "cfg", "-O"]);
cmd.args(["--print", "cfg", "-O"]);
if let Some(target) = target {
cmd.args(&["--target", target]);
cmd.args(["--target", target]);
}
utf8_stdout(cmd)
}

View file

@ -171,7 +171,7 @@ fn discover_sysroot_dir(
) -> Result<AbsPathBuf> {
let mut rustc = Command::new(toolchain::rustc());
rustc.envs(extra_env);
rustc.current_dir(current_dir).args(&["--print", "sysroot"]);
rustc.current_dir(current_dir).args(["--print", "sysroot"]);
tracing::debug!("Discovering sysroot by {:?}", rustc);
let stdout = utf8_stdout(rustc)?;
Ok(AbsPathBuf::assert(PathBuf::from(stdout)))
@ -203,7 +203,7 @@ fn discover_sysroot_src_dir_or_add_component(
.or_else(|| {
let mut rustup = Command::new(toolchain::rustup());
rustup.envs(extra_env);
rustup.current_dir(current_dir).args(&["component", "add", "rust-src"]);
rustup.current_dir(current_dir).args(["component", "add", "rust-src"]);
tracing::info!("adding rust-src component by {:?}", rustup);
utf8_stdout(rustup).ok()?;
get_rust_src(sysroot_path)

View file

@ -15,10 +15,10 @@ pub(super) fn get(
let mut cmd = Command::new(toolchain::rustc());
cmd.envs(extra_env);
cmd.current_dir(cargo_toml.parent())
.args(&["-Z", "unstable-options", "rustc", "--print", "target-spec-json"])
.args(["-Z", "unstable-options", "rustc", "--print", "target-spec-json"])
.env("RUSTC_BOOTSTRAP", "1");
if let Some(target) = target {
cmd.args(&["--target", target]);
cmd.args(["--target", target]);
}
match utf8_stdout(cmd) {
Ok(it) => return Ok(it),
@ -28,10 +28,10 @@ pub(super) fn get(
// using unstable cargo features failed, fall back to using plain rustc
let mut cmd = Command::new(toolchain::rustc());
cmd.envs(extra_env)
.args(&["-Z", "unstable-options", "rustc", "--print", "target-spec-json"])
.args(["-Z", "unstable-options", "rustc", "--print", "target-spec-json"])
.env("RUSTC_BOOTSTRAP", "1");
if let Some(target) = target {
cmd.args(&["--target", target]);
cmd.args(["--target", target]);
}
utf8_stdout(cmd)
})()

View file

@ -407,7 +407,7 @@ impl ProjectWorkspace {
["libexec", "lib"]
.into_iter()
.map(|segment| sysroot.root().join(segment).join(&standalone_server_name))
.find(|server_path| std::fs::metadata(&server_path).is_ok())
.find(|server_path| std::fs::metadata(server_path).is_ok())
}
_ => None,
}

View file

@ -42,7 +42,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities {
"(".to_string(),
]),
all_commit_characters: None,
completion_item: completion_item(&config),
completion_item: completion_item(config),
work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None },
}),
signature_help_provider: Some(SignatureHelpOptions {
@ -67,7 +67,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities {
},
document_on_type_formatting_provider: Some(DocumentOnTypeFormattingOptions {
first_trigger_character: "=".to_string(),
more_trigger_character: Some(more_trigger_character(&config)),
more_trigger_character: Some(more_trigger_character(config)),
}),
selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)),
folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)),

View file

@ -102,7 +102,7 @@ impl flags::Scip {
let symbol = tokens_to_symbol
.entry(id)
.or_insert_with(|| {
let symbol = token_to_symbol(&token).unwrap_or_else(&mut new_local_symbol);
let symbol = token_to_symbol(token).unwrap_or_else(&mut new_local_symbol);
scip::symbol::format_symbol(symbol)
})
.clone();
@ -176,7 +176,7 @@ fn get_relative_filepath(
rootpath: &vfs::AbsPathBuf,
file_id: ide::FileId,
) -> Option<String> {
Some(vfs.file_path(file_id).as_path()?.strip_prefix(&rootpath)?.as_ref().to_str()?.to_string())
Some(vfs.file_path(file_id).as_path()?.strip_prefix(rootpath)?.as_ref().to_str()?.to_string())
}
// SCIP Ranges have a (very large) optimization that ranges if they are on the same line

View file

@ -1908,9 +1908,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
let doc = doc.trim_end_matches('\n');
assert!(
doc.ends_with('.') && doc.starts_with(char::is_uppercase),
"bad docs for {}: {:?}",
field,
doc
"bad docs for {field}: {doc:?}"
);
let default = default.parse::<serde_json::Value>().unwrap();
@ -2213,17 +2211,16 @@ fn manual(fields: &[(&'static str, &'static str, &[&str], &str)]) -> String {
let doc = doc_comment_to_string(doc);
if default.contains('\n') {
format!(
r#"[[{}]]{}::
r#"[[{name}]]{name}::
+
--
Default:
----
{}
{default}
----
{}
{doc}
--
"#,
name, name, default, doc
"#
)
} else {
format!("[[{name}]]{name} (default: `{default}`)::\n+\n--\n{doc}--\n")

View file

@ -101,8 +101,7 @@ impl DiagnosticCollection {
file_id: FileId,
) -> impl Iterator<Item = &lsp_types::Diagnostic> {
let native = self.native.get(&file_id).into_iter().flatten();
let check =
self.check.values().filter_map(move |it| it.get(&file_id)).into_iter().flatten();
let check = self.check.values().filter_map(move |it| it.get(&file_id)).flatten();
native.chain(check)
}

View file

@ -505,8 +505,7 @@ fn rustc_code_description(code: Option<&str>) -> Option<lsp_types::CodeDescripti
fn clippy_code_description(code: Option<&str>) -> Option<lsp_types::CodeDescription> {
code.and_then(|code| {
lsp_types::Url::parse(&format!(
"https://rust-lang.github.io/rust-clippy/master/index.html#{}",
code
"https://rust-lang.github.io/rust-clippy/master/index.html#{code}"
))
.ok()
.map(|href| lsp_types::CodeDescription { href })

Some files were not shown because too many files have changed in this diff Show more