diff --git a/Cargo.toml b/Cargo.toml index 583c7bbe33..18c9bb5ce0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,9 +10,7 @@ license = "MIT OR Apache-2.0" authors = ["rust-analyzer team"] [profile.dev] -# Disabling debug info speeds up builds a bunch, -# and we don't rely on it for debugging that much. -debug = 0 +debug = 1 [profile.dev.package] # These speed up local tests. diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index 8cf1e15f31..55043fdc4b 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -637,10 +637,6 @@ impl<'a> AssocItemCollector<'a> { attr, ) { Ok(ResolvedAttr::Macro(call_id)) => { - // If proc attribute macro expansion is disabled, skip expanding it here - if !self.db.expand_proc_attr_macros() { - continue 'attrs; - } let loc = self.db.lookup_intern_macro_call(call_id); if let MacroDefKind::ProcMacro(_, exp, _) = loc.def.kind { // If there's no expander for the proc macro (e.g. the diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 6d2eb71549..b5045efb62 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -83,7 +83,9 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI let name = Name::new_text_dont_use(it.name.clone()); ( name, - if it.disabled { + if !db.expand_proc_attr_macros() { + CustomProcMacroExpander::dummy() + } else if it.disabled { CustomProcMacroExpander::disabled() } else { CustomProcMacroExpander::new(hir_expand::proc_macro::ProcMacroId::new( @@ -1331,16 +1333,6 @@ impl DefCollector<'_> { let call_id = call_id(); if let MacroDefKind::ProcMacro(_, exp, _) = def.kind { - // If proc attribute macro expansion is disabled, skip expanding it here - if !self.db.expand_proc_attr_macros() { - self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( - directive.module_id, - self.db.lookup_intern_macro_call(call_id).kind, - def.krate, - )); - return recollect_without(self); - } - // If there's no expander for the proc macro (e.g. // because proc macros are disabled, or building the // proc macro crate failed), report this and skip diff --git a/crates/hir-def/src/nameres/diagnostics.rs b/crates/hir-def/src/nameres/diagnostics.rs index 523a4c107b..4ab53d20b5 100644 --- a/crates/hir-def/src/nameres/diagnostics.rs +++ b/crates/hir-def/src/nameres/diagnostics.rs @@ -17,16 +17,47 @@ use crate::{ #[derive(Debug, PartialEq, Eq)] pub enum DefDiagnosticKind { - UnresolvedModule { ast: AstId, candidates: Box<[String]> }, - UnresolvedExternCrate { ast: AstId }, - UnresolvedImport { id: ItemTreeId, index: Idx }, - UnconfiguredCode { ast: ErasedAstId, cfg: CfgExpr, opts: CfgOptions }, - UnresolvedProcMacro { ast: MacroCallKind, krate: CrateId }, - UnresolvedMacroCall { ast: MacroCallKind, path: ModPath }, - UnimplementedBuiltinMacro { ast: AstId }, - InvalidDeriveTarget { ast: AstId, id: usize }, - MalformedDerive { ast: AstId, id: usize }, - MacroDefError { ast: AstId, message: String }, + UnresolvedModule { + ast: AstId, + candidates: Box<[String]>, + }, + UnresolvedExternCrate { + ast: AstId, + }, + UnresolvedImport { + id: ItemTreeId, + index: Idx, + }, + UnconfiguredCode { + ast: ErasedAstId, + cfg: CfgExpr, + opts: CfgOptions, + }, + /// A proc-macro that is lacking an expander, this might be due to build scripts not yet having + /// run or proc-macro expansion being disabled. + UnresolvedProcMacro { + ast: MacroCallKind, + krate: CrateId, + }, + UnresolvedMacroCall { + ast: MacroCallKind, + path: ModPath, + }, + UnimplementedBuiltinMacro { + ast: AstId, + }, + InvalidDeriveTarget { + ast: AstId, + id: usize, + }, + MalformedDerive { + ast: AstId, + id: usize, + }, + MacroDefError { + ast: AstId, + message: String, + }, } #[derive(Clone, Debug, PartialEq, Eq)] @@ -92,10 +123,6 @@ impl DefDiagnostic { Self { in_module: container, kind: DefDiagnosticKind::UnconfiguredCode { ast, cfg, opts } } } - // FIXME: Whats the difference between this and unresolved_macro_call - // FIXME: This is used for a lot of things, unresolved proc macros, disabled proc macros, etc - // yet the diagnostic handler in ide-diagnostics has to figure out what happened because this - // struct loses all that information! pub fn unresolved_proc_macro( container: LocalModuleId, ast: MacroCallKind, diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs index 743fac50f4..fc9fa93268 100644 --- a/crates/hir-expand/src/files.rs +++ b/crates/hir-expand/src/files.rs @@ -1,4 +1,6 @@ //! Things to wrap other things in file ids. +use std::borrow::Borrow; + use either::Either; use span::{ AstIdNode, ErasedFileAstId, FileAstId, FileId, FileRange, HirFileId, HirFileIdRepr, @@ -76,6 +78,13 @@ impl InFileWrapper { pub fn as_ref(&self) -> InFileWrapper { self.with_value(&self.value) } + + pub fn borrow(&self) -> InFileWrapper + where + T: Borrow, + { + self.with_value(self.value.borrow()) + } } impl InFileWrapper { @@ -156,14 +165,61 @@ impl InFileWrapper { } // region:specific impls +impl> InRealFile { + pub fn file_range(&self) -> FileRange { + FileRange { file_id: self.file_id, range: self.value.borrow().text_range() } + } +} + +impl> InFile { + pub fn parent_ancestors_with_macros( + self, + db: &dyn db::ExpandDatabase, + ) -> impl Iterator> + '_ { + let succ = move |node: &InFile| match node.value.parent() { + Some(parent) => Some(node.with_value(parent)), + None => db + .lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id) + .to_node_item(db) + .syntax() + .cloned() + .map(|node| node.parent()) + .transpose(), + }; + std::iter::successors(succ(&self.borrow().cloned()), succ) + } + + pub fn ancestors_with_macros( + self, + db: &dyn db::ExpandDatabase, + ) -> impl Iterator> + '_ { + let succ = move |node: &InFile| match node.value.parent() { + Some(parent) => Some(node.with_value(parent)), + None => db + .lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id) + .to_node_item(db) + .syntax() + .cloned() + .map(|node| node.parent()) + .transpose(), + }; + std::iter::successors(Some(self.borrow().cloned()), succ) + } + + pub fn kind(&self) -> parser::SyntaxKind { + self.value.borrow().kind() + } + + pub fn text_range(&self) -> TextRange { + self.value.borrow().text_range() + } -impl InFile<&SyntaxNode> { /// Falls back to the macro call range if the node cannot be mapped up fully. /// /// For attributes and derives, this will point back to the attribute only. /// For the entire item use [`InFile::original_file_range_full`]. pub fn original_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange { - self.map(SyntaxNode::text_range).original_node_file_range_rooted(db) + self.borrow().map(SyntaxNode::text_range).original_node_file_range_rooted(db) } /// Falls back to the macro call range if the node cannot be mapped up fully. @@ -171,15 +227,7 @@ impl InFile<&SyntaxNode> { self, db: &dyn db::ExpandDatabase, ) -> FileRange { - self.map(SyntaxNode::text_range).original_node_file_range_with_macro_call_body(db) - } - - /// Attempts to map the syntax node back up its macro calls. - pub fn original_file_range_opt( - self, - db: &dyn db::ExpandDatabase, - ) -> Option<(FileRange, SyntaxContextId)> { - self.map(SyntaxNode::text_range).original_node_file_range_opt(db) + self.borrow().map(SyntaxNode::text_range).original_node_file_range_with_macro_call_body(db) } pub fn original_syntax_node_rooted( @@ -190,16 +238,19 @@ impl InFile<&SyntaxNode> { // as we don't have node inputs otherwise and therefore can't find an `N` node in the input let file_id = match self.file_id.repr() { HirFileIdRepr::FileId(file_id) => { - return Some(InRealFile { file_id, value: self.value.clone() }) + return Some(InRealFile { file_id, value: self.value.borrow().clone() }) } HirFileIdRepr::MacroFile(m) if m.is_attr_macro(db) => m, _ => return None, }; - let FileRange { file_id, range } = - map_node_range_up_rooted(db, &db.expansion_span_map(file_id), self.value.text_range())?; + let FileRange { file_id, range } = map_node_range_up_rooted( + db, + &db.expansion_span_map(file_id), + self.value.borrow().text_range(), + )?; - let kind = self.value.kind(); + let kind = self.kind(); let value = db .parse(file_id) .syntax_node() @@ -211,6 +262,16 @@ impl InFile<&SyntaxNode> { } } +impl InFile<&SyntaxNode> { + /// Attempts to map the syntax node back up its macro calls. + pub fn original_file_range_opt( + self, + db: &dyn db::ExpandDatabase, + ) -> Option<(FileRange, SyntaxContextId)> { + self.borrow().map(SyntaxNode::text_range).original_node_file_range_opt(db) + } +} + impl InMacroFile { pub fn upmap_once( self, diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 8e71a54f80..81c57f6cae 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -68,38 +68,44 @@ impl SourceAnalyzer { pub(crate) fn new_for_body( db: &dyn HirDatabase, def: DefWithBodyId, - node @ InFile { file_id, .. }: InFile<&SyntaxNode>, + node: InFile<&SyntaxNode>, offset: Option, ) -> SourceAnalyzer { - let (body, source_map) = db.body_with_source_map(def); - let scopes = db.expr_scopes(def); - let scope = match offset { - None => scope_for(&scopes, &source_map, node), - Some(offset) => scope_for_offset(db, &scopes, &source_map, node.file_id, offset), - }; - let resolver = resolver_for_scope(db.upcast(), def, scope); - SourceAnalyzer { - resolver, - def: Some((def, body, source_map)), - infer: Some(db.infer(def)), - file_id, - } + Self::new_for_body_(db, def, node, offset, Some(db.infer(def))) } pub(crate) fn new_for_body_no_infer( + db: &dyn HirDatabase, + def: DefWithBodyId, + node: InFile<&SyntaxNode>, + offset: Option, + ) -> SourceAnalyzer { + Self::new_for_body_(db, def, node, offset, None) + } + + pub(crate) fn new_for_body_( db: &dyn HirDatabase, def: DefWithBodyId, node @ InFile { file_id, .. }: InFile<&SyntaxNode>, offset: Option, + infer: Option>, ) -> SourceAnalyzer { let (body, source_map) = db.body_with_source_map(def); let scopes = db.expr_scopes(def); let scope = match offset { - None => scope_for(&scopes, &source_map, node), - Some(offset) => scope_for_offset(db, &scopes, &source_map, node.file_id, offset), + None => scope_for(db, &scopes, &source_map, node), + Some(offset) => { + debug_assert!( + node.text_range().contains_inclusive(offset), + "{:?} not in {:?}", + offset, + node.text_range() + ); + scope_for_offset(db, &scopes, &source_map, node.file_id, offset) + } }; let resolver = resolver_for_scope(db.upcast(), def, scope); - SourceAnalyzer { resolver, def: Some((def, body, source_map)), infer: None, file_id } + SourceAnalyzer { resolver, def: Some((def, body, source_map)), infer, file_id } } pub(crate) fn new_for_resolver( @@ -662,7 +668,6 @@ impl SourceAnalyzer { return resolved; } - // This must be a normal source file rather than macro file. let ctx = LowerCtx::new(db.upcast(), self.file_id); let hir_path = Path::from_src(&ctx, path.clone())?; @@ -955,14 +960,15 @@ impl SourceAnalyzer { } fn scope_for( + db: &dyn HirDatabase, scopes: &ExprScopes, source_map: &BodySourceMap, node: InFile<&SyntaxNode>, ) -> Option { - node.value - .ancestors() - .filter_map(ast::Expr::cast) - .filter_map(|it| source_map.node_expr(InFile::new(node.file_id, &it))) + node.ancestors_with_macros(db.upcast()) + .take_while(|it| !ast::Item::can_cast(it.kind()) || ast::MacroCall::can_cast(it.kind())) + .filter_map(|it| it.map(ast::Expr::cast).transpose()) + .filter_map(|it| source_map.node_expr(it.as_ref())) .find_map(|it| scopes.scope_for(it)) } @@ -988,8 +994,8 @@ fn scope_for_offset( Some(it.file_id.macro_file()?.call_node(db.upcast())) }) .find(|it| it.file_id == from_file) - .filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?; - Some((source.value.text_range(), scope)) + .filter(|it| it.kind() == SyntaxKind::MACRO_CALL)?; + Some((source.text_range(), scope)) }) .filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end()) // find containing scope diff --git a/crates/hir/src/term_search/tactics.rs b/crates/hir/src/term_search/tactics.rs index d1fc68d009..1b0e6f8bd5 100644 --- a/crates/hir/src/term_search/tactics.rs +++ b/crates/hir/src/term_search/tactics.rs @@ -598,9 +598,8 @@ pub(super) fn famous_types<'a, DB: HirDatabase>( Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::unit()), value: "()" }, ] .into_iter() - .map(|exprs| { + .inspect(|exprs| { lookup.insert(exprs.ty(db), std::iter::once(exprs.clone())); - exprs }) .filter(|expr| expr.ty(db).could_unify_with_deeply(db, &ctx.goal)) } diff --git a/crates/ide-assists/src/handlers/bool_to_enum.rs b/crates/ide-assists/src/handlers/bool_to_enum.rs index c95e24693d..2aeca0bae0 100644 --- a/crates/ide-assists/src/handlers/bool_to_enum.rs +++ b/crates/ide-assists/src/handlers/bool_to_enum.rs @@ -76,7 +76,11 @@ pub(crate) fn bool_to_enum(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option let usages = definition.usages(&ctx.sema).all(); add_enum_def(edit, ctx, &usages, target_node, &target_module); - replace_usages(edit, ctx, usages, definition, &target_module); + let mut delayed_mutations = Vec::new(); + replace_usages(edit, ctx, usages, definition, &target_module, &mut delayed_mutations); + for (scope, path) in delayed_mutations { + insert_use(&scope, path, &ctx.config.insert_use); + } }, ) } @@ -197,6 +201,7 @@ fn replace_usages( usages: UsageSearchResult, target_definition: Definition, target_module: &hir::Module, + delayed_mutations: &mut Vec<(ImportScope, ast::Path)>, ) { for (file_id, references) in usages { edit.edit_file(file_id); @@ -217,6 +222,7 @@ fn replace_usages( def.usages(&ctx.sema).all(), target_definition, target_module, + delayed_mutations, ) } } else if let Some(initializer) = find_assignment_usage(&name) { @@ -255,6 +261,7 @@ fn replace_usages( def.usages(&ctx.sema).all(), target_definition, target_module, + delayed_mutations, ) } } @@ -306,7 +313,7 @@ fn replace_usages( ImportScope::Module(it) => ImportScope::Module(edit.make_mut(it)), ImportScope::Block(it) => ImportScope::Block(edit.make_mut(it)), }; - insert_use(&scope, path, &ctx.config.insert_use); + delayed_mutations.push((scope, path)); } }, ) diff --git a/crates/ide-db/src/rust_doc.rs b/crates/ide-db/src/rust_doc.rs index ab2a250289..eacd9b9b4d 100644 --- a/crates/ide-db/src/rust_doc.rs +++ b/crates/ide-db/src/rust_doc.rs @@ -7,11 +7,7 @@ pub fn is_rust_fence(s: &str) -> bool { let mut seen_rust_tags = false; let mut seen_other_tags = false; - let tokens = s - .trim() - .split(|c| c == ',' || c == ' ' || c == '\t') - .map(str::trim) - .filter(|t| !t.is_empty()); + let tokens = s.trim().split([',', ' ', '\t']).map(str::trim).filter(|t| !t.is_empty()); for token in tokens { match token { diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs index be1e6ed572..a470ce72fc 100644 --- a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs +++ b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs @@ -28,10 +28,7 @@ pub(crate) fn trait_impl_redundant_assoc_item( let function = id; ( format!("`fn {redundant_assoc_item_name}`"), - function - .source(db) - .map(|it| it.syntax().value.text_range()) - .unwrap_or(default_range), + function.source(db).map(|it| it.syntax().text_range()).unwrap_or(default_range), format!("\n {};", function.display(db)), ) } @@ -39,10 +36,7 @@ pub(crate) fn trait_impl_redundant_assoc_item( let constant = id; ( format!("`const {redundant_assoc_item_name}`"), - constant - .source(db) - .map(|it| it.syntax().value.text_range()) - .unwrap_or(default_range), + constant.source(db).map(|it| it.syntax().text_range()).unwrap_or(default_range), format!("\n {};", constant.display(db)), ) } @@ -50,10 +44,7 @@ pub(crate) fn trait_impl_redundant_assoc_item( let type_alias = id; ( format!("`type {redundant_assoc_item_name}`"), - type_alias - .source(db) - .map(|it| it.syntax().value.text_range()) - .unwrap_or(default_range), + type_alias.source(db).map(|it| it.syntax().text_range()).unwrap_or(default_range), format!("\n type {};", type_alias.name(ctx.sema.db).to_smol_str()), ) } diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index 2feea09840..a68ee4f867 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs @@ -78,7 +78,6 @@ impl RunnableKind { } impl Runnable { - // test package::module::testname pub fn label(&self, target: Option<&str>) -> String { match &self.kind { RunnableKind::Test { test_id, .. } => format!("test {test_id}"), @@ -86,7 +85,7 @@ impl Runnable { RunnableKind::Bench { test_id } => format!("bench {test_id}"), RunnableKind::DocTest { test_id, .. } => format!("doctest {test_id}"), RunnableKind::Bin => { - target.map_or_else(|| "run binary".to_owned(), |t| format!("run {t}")) + format!("run {}", target.unwrap_or("binary")) } } } @@ -513,11 +512,11 @@ impl TestAttr { } } -const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"]; -const RUSTDOC_CODE_BLOCK_ATTRIBUTES_RUNNABLE: &[&str] = - &["", "rust", "should_panic", "edition2015", "edition2018", "edition2021"]; - fn has_runnable_doc_test(attrs: &hir::Attrs) -> bool { + const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"]; + const RUSTDOC_CODE_BLOCK_ATTRIBUTES_RUNNABLE: &[&str] = + &["", "rust", "should_panic", "edition2015", "edition2018", "edition2021"]; + docs_from_attrs(attrs).map_or(false, |doc| { let mut in_code_block = false; diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html index 573b3d4bd5..17411fefbd 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html @@ -94,10 +94,20 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd } } +macro_rules! id { + ($($tt:tt)*) => { + $($tt)* + }; +} include!(concat!("foo/", "foo.rs")); +struct S<T>(T); fn main() { + struct TestLocal; + // regression test, TestLocal here used to not resolve + let _: S<id![TestLocal]>; + format_args!("Hello, {}!", (92,).0); dont_color_me_braces!(); noop!(noop!(1)); diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index 08acfca2cb..5f711600a2 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs @@ -102,10 +102,20 @@ macro without_args { } } +macro_rules! id { + ($($tt:tt)*) => { + $($tt)* + }; +} include!(concat!("foo/", "foo.rs")); +struct S(T); fn main() { + struct TestLocal; + // regression test, TestLocal here used to not resolve + let _: S; + format_args!("Hello, {}!", (92,).0); dont_color_me_braces!(); noop!(noop!(1)); diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 90b81d0a80..a188adbe35 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -651,7 +651,7 @@ impl flags::AnalysisStats { if let Some(src) = source { let original_file = src.file_id.original_file(db); let path = vfs.file_path(original_file); - let syntax_range = src.value.text_range(); + let syntax_range = src.text_range(); format!("processing: {} ({} {:?})", full_name(), path, syntax_range) } else { format!("processing: {}", full_name()) @@ -945,7 +945,7 @@ impl flags::AnalysisStats { if let Some(src) = source { let original_file = src.file_id.original_file(db); let path = vfs.file_path(original_file); - let syntax_range = src.value.text_range(); + let syntax_range = src.text_range(); format!("processing: {} ({} {:?})", full_name(), path, syntax_range) } else { format!("processing: {}", full_name()) diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index f719a47248..42ba162e4f 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -3367,7 +3367,7 @@ mod tests { for idx in url_offsets { let link = &schema[idx..]; // matching on whitespace to ignore normal links - if let Some(link_end) = link.find(|c| c == ' ' || c == '[') { + if let Some(link_end) = link.find([' ', '[']) { if link.chars().nth(link_end) == Some('[') { if let Some(link_text_end) = link.find(']') { let link_text = link[link_end..(link_text_end + 1)].to_string(); diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs index c438325532..66100971fb 100644 --- a/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/crates/rust-analyzer/tests/slow-tests/support.rs @@ -388,9 +388,8 @@ impl Server { } fn recv(&self) -> Result, Timeout> { let msg = recv_timeout(&self.client.receiver)?; - let msg = msg.map(|msg| { + let msg = msg.inspect(|msg| { self.messages.borrow_mut().push(msg.clone()); - msg }); Ok(msg) } diff --git a/crates/rust-analyzer/tests/slow-tests/tidy.rs b/crates/rust-analyzer/tests/slow-tests/tidy.rs index 7dd6382cfa..8cd5cbf1c7 100644 --- a/crates/rust-analyzer/tests/slow-tests/tidy.rs +++ b/crates/rust-analyzer/tests/slow-tests/tidy.rs @@ -155,7 +155,7 @@ Zlib OR Apache-2.0 OR MIT let meta = cmd!(sh, "cargo metadata --format-version 1").read().unwrap(); let mut licenses = meta - .split(|c| c == ',' || c == '{' || c == '}') + .split([',', '{', '}']) .filter(|it| it.contains(r#""license""#)) .map(|it| it.trim()) .map(|it| it[r#""license":"#.len()..].trim_matches('"')) diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs index 43f62d0d1e..53d4a28bc3 100644 --- a/crates/test-utils/src/lib.rs +++ b/crates/test-utils/src/lib.rs @@ -305,7 +305,7 @@ fn extract_line_annotations(mut line: &str) -> Vec { } let range = TextRange::at(offset, len.try_into().unwrap()); let line_no_caret = &line[len..]; - let end_marker = line_no_caret.find(|c| c == '$'); + let end_marker = line_no_caret.find('$'); let next = line_no_caret.find(marker).map_or(line.len(), |it| it + len); let cond = |end_marker| { diff --git a/xtask/src/codegen/diagnostics_docs.rs b/xtask/src/codegen/diagnostics_docs.rs index cf30531e7f..dcc9c76a50 100644 --- a/xtask/src/codegen/diagnostics_docs.rs +++ b/xtask/src/codegen/diagnostics_docs.rs @@ -63,7 +63,7 @@ fn is_valid_diagnostic_name(diagnostic: &str) -> Result<(), String> { if diagnostic.chars().any(|c| c.is_ascii_uppercase()) { return Err("Diagnostic names can't contain uppercase symbols".into()); } - if diagnostic.chars().any(|c| !c.is_ascii()) { + if !diagnostic.is_ascii() { return Err("Diagnostic can't contain non-ASCII symbols".into()); } diff --git a/xtask/src/release.rs b/xtask/src/release.rs index 5699053a23..b936876b52 100644 --- a/xtask/src/release.rs +++ b/xtask/src/release.rs @@ -119,12 +119,11 @@ impl flags::RustcPull { // Fetch given rustc commit. cmd!(sh, "git fetch http://localhost:{JOSH_PORT}/rust-lang/rust.git@{commit}{JOSH_FILTER}.git") .run() - .map_err(|e| { + .inspect_err(|_| { // Try to un-do the previous `git commit`, to leave the repo in the state we found it it. cmd!(sh, "git reset --hard HEAD^") .run() .expect("FAILED to clean up again after failed `git fetch`, sorry for that"); - e }) .context("FAILED to fetch new commits, something went wrong (committing the rust-version file has been undone)")?;