From 12e3b4c70b5ef23b2fdfc197296d483680e125f9 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Fri, 8 Feb 2019 14:49:43 +0300 Subject: [PATCH] reformat the world --- crates/gen_lsp_server/src/msg.rs | 39 +-- crates/gen_lsp_server/src/stdio.rs | 4 +- crates/ra_arena/src/lib.rs | 20 +- crates/ra_arena/src/map.rs | 10 +- crates/ra_assists/src/add_derive.rs | 6 +- crates/ra_assists/src/add_impl.rs | 22 +- crates/ra_assists/src/assist_ctx.rs | 16 +- crates/ra_assists/src/change_visibility.rs | 42 +-- crates/ra_assists/src/fill_match_arms.rs | 5 +- crates/ra_assists/src/introduce_variable.rs | 6 +- crates/ra_assists/src/lib.rs | 16 +- .../src/replace_if_let_with_match.rs | 6 +- crates/ra_assists/src/split_import.rs | 4 +- crates/ra_cli/src/main.rs | 19 +- crates/ra_db/src/input.rs | 30 +-- crates/ra_db/src/lib.rs | 7 +- crates/ra_db/src/loc2id.rs | 9 +- crates/ra_hir/src/adt.rs | 23 +- crates/ra_hir/src/code_model_api.rs | 71 +---- crates/ra_hir/src/code_model_impl/function.rs | 12 +- crates/ra_hir/src/code_model_impl/krate.rs | 9 +- crates/ra_hir/src/code_model_impl/module.rs | 10 +- crates/ra_hir/src/expr.rs | 150 ++--------- crates/ra_hir/src/expr/scope.rs | 52 +--- crates/ra_hir/src/generics.rs | 10 +- crates/ra_hir/src/ids.rs | 34 +-- crates/ra_hir/src/impl_block.rs | 23 +- crates/ra_hir/src/macros.rs | 22 +- crates/ra_hir/src/mock.rs | 5 +- crates/ra_hir/src/module_tree.rs | 22 +- crates/ra_hir/src/nameres.rs | 95 ++----- crates/ra_hir/src/nameres/lower.rs | 31 +-- crates/ra_hir/src/nameres/tests.rs | 34 +-- crates/ra_hir/src/path.rs | 44 +--- crates/ra_hir/src/query_definitions.rs | 4 +- crates/ra_hir/src/resolve.rs | 35 +-- crates/ra_hir/src/source_binder.rs | 31 +-- crates/ra_hir/src/ty.rs | 247 +++++------------- crates/ra_hir/src/ty/method_resolution.rs | 30 +-- crates/ra_hir/src/ty/tests.rs | 9 +- crates/ra_hir/src/type_ref.rs | 13 +- crates/ra_ide_api/src/assists.rs | 5 +- crates/ra_ide_api/src/call_info.rs | 38 +-- crates/ra_ide_api/src/change.rs | 39 +-- .../ra_ide_api/src/completion/complete_dot.rs | 4 +- .../src/completion/complete_fn_param.rs | 9 +- .../src/completion/complete_path.rs | 12 +- .../src/completion/complete_postfix.rs | 6 +- .../src/completion/complete_scope.rs | 10 +- .../src/completion/completion_context.rs | 8 +- .../src/completion/completion_item.rs | 16 +- crates/ra_ide_api/src/diagnostics.rs | 12 +- crates/ra_ide_api/src/extend_selection.rs | 4 +- crates/ra_ide_api/src/goto_definition.rs | 40 +-- crates/ra_ide_api/src/hover.rs | 14 +- crates/ra_ide_api/src/impls.rs | 24 +- crates/ra_ide_api/src/lib.rs | 24 +- crates/ra_ide_api/src/mock_analysis.rs | 9 +- crates/ra_ide_api/src/navigation_target.rs | 10 +- crates/ra_ide_api/src/references.rs | 4 +- crates/ra_ide_api/src/runnables.rs | 31 +-- crates/ra_ide_api/src/status.rs | 21 +- crates/ra_ide_api/src/symbol_index.rs | 5 +- crates/ra_ide_api/src/syntax_highlighting.rs | 16 +- crates/ra_ide_api_light/src/diagnostics.rs | 32 +-- .../ra_ide_api_light/src/extend_selection.rs | 57 +--- crates/ra_ide_api_light/src/folding_ranges.rs | 47 +--- crates/ra_ide_api_light/src/formatting.rs | 5 +- crates/ra_ide_api_light/src/join_lines.rs | 27 +- crates/ra_ide_api_light/src/lib.rs | 14 +- crates/ra_ide_api_light/src/line_index.rs | 165 ++---------- .../ra_ide_api_light/src/line_index_utils.rs | 51 +--- crates/ra_ide_api_light/src/structure.rs | 13 +- crates/ra_ide_api_light/src/typing.rs | 17 +- crates/ra_lsp_server/src/caps.rs | 20 +- crates/ra_lsp_server/src/cargo_target_spec.rs | 5 +- crates/ra_lsp_server/src/conv.rs | 59 +---- crates/ra_lsp_server/src/main.rs | 26 +- crates/ra_lsp_server/src/main_loop.rs | 55 +--- .../ra_lsp_server/src/main_loop/handlers.rs | 75 ++---- .../src/main_loop/subscriptions.rs | 4 +- .../src/project_model/cargo_workspace.rs | 17 +- .../src/project_model/sysroot.rs | 9 +- crates/ra_lsp_server/src/server_world.rs | 28 +- .../ra_lsp_server/tests/heavy_tests/main.rs | 10 +- .../tests/heavy_tests/support.rs | 16 +- crates/ra_mbe/src/lib.rs | 28 +- crates/ra_mbe/src/mbe_expander.rs | 29 +- crates/ra_mbe/src/mbe_parser.rs | 11 +- crates/ra_mbe/src/syntax_bridge.rs | 25 +- crates/ra_syntax/src/algo/visit.rs | 17 +- crates/ra_syntax/src/ast.rs | 37 +-- crates/ra_syntax/src/grammar/expressions.rs | 15 +- .../ra_syntax/src/grammar/expressions/atom.rs | 9 +- crates/ra_syntax/src/grammar/items.rs | 6 +- crates/ra_syntax/src/grammar/params.rs | 6 +- crates/ra_syntax/src/grammar/patterns.rs | 4 +- crates/ra_syntax/src/lexer/ptr.rs | 5 +- crates/ra_syntax/src/lib.rs | 9 +- crates/ra_syntax/src/parser_api.rs | 5 +- crates/ra_syntax/src/parser_impl.rs | 22 +- crates/ra_syntax/src/parser_impl/event.rs | 41 +-- crates/ra_syntax/src/parser_impl/input.rs | 6 +- crates/ra_syntax/src/ptr.rs | 19 +- crates/ra_syntax/src/reparsing.rs | 10 +- crates/ra_syntax/src/string_lexing/parser.rs | 9 +- crates/ra_syntax/src/string_lexing/string.rs | 7 +- crates/ra_syntax/src/validation/block.rs | 6 +- crates/ra_syntax/src/validation/byte.rs | 30 +-- .../ra_syntax/src/validation/byte_string.rs | 20 +- crates/ra_syntax/src/validation/char.rs | 20 +- crates/ra_syntax/src/validation/string.rs | 20 +- crates/ra_syntax/src/yellow/builder.rs | 5 +- crates/ra_syntax/src/yellow/syntax_error.rs | 12 +- crates/ra_syntax/src/yellow/syntax_text.rs | 10 +- crates/ra_syntax/tests/test.rs | 59 ++--- crates/ra_text_edit/src/lib.rs | 5 +- crates/ra_text_edit/src/test_utils.rs | 8 +- crates/ra_text_edit/src/text_edit.rs | 4 +- crates/ra_vfs/src/io.rs | 57 +--- crates/ra_vfs/src/lib.rs | 86 ++---- crates/ra_vfs/tests/vfs.rs | 62 ++--- crates/test_utils/src/lib.rs | 37 +-- crates/test_utils/src/marks.rs | 5 +- crates/thread_worker/src/lib.rs | 9 +- crates/tools/src/bin/pre-commit.rs | 13 +- crates/tools/src/lib.rs | 38 +-- crates/tools/src/main.rs | 10 +- crates/tools/tests/cli.rs | 10 +- 129 files changed, 727 insertions(+), 2509 deletions(-) diff --git a/crates/gen_lsp_server/src/msg.rs b/crates/gen_lsp_server/src/msg.rs index 818111fe7e..02c7a18585 100644 --- a/crates/gen_lsp_server/src/msg.rs +++ b/crates/gen_lsp_server/src/msg.rs @@ -80,10 +80,7 @@ impl RawMessage { #[serde(flatten)] msg: RawMessage, } - let text = to_string(&JsonRpc { - jsonrpc: "2.0", - msg: self, - })?; + let text = to_string(&JsonRpc { jsonrpc: "2.0", msg: self })?; write_msg_text(w, &text)?; Ok(()) } @@ -95,11 +92,7 @@ impl RawRequest { R: Request, R::Params: serde::Serialize, { - RawRequest { - id, - method: R::METHOD.to_string(), - params: to_value(params).unwrap(), - } + RawRequest { id, method: R::METHOD.to_string(), params: to_value(params).unwrap() } } pub fn cast(self) -> ::std::result::Result<(u64, R::Params), RawRequest> where @@ -121,23 +114,11 @@ impl RawResponse { R: Request, R::Result: serde::Serialize, { - RawResponse { - id, - result: Some(to_value(&result).unwrap()), - error: None, - } + RawResponse { id, result: Some(to_value(&result).unwrap()), error: None } } pub fn err(id: u64, code: i32, message: String) -> RawResponse { - let error = RawResponseError { - code, - message, - data: None, - }; - RawResponse { - id, - result: None, - error: Some(error), - } + let error = RawResponseError { code, message, data: None }; + RawResponse { id, result: None, error: Some(error) } } } @@ -147,10 +128,7 @@ impl RawNotification { N: Notification, N::Params: serde::Serialize, { - RawNotification { - method: N::METHOD.to_string(), - params: to_value(params).unwrap(), - } + RawNotification { method: N::METHOD.to_string(), params: to_value(params).unwrap() } } pub fn is(&self) -> bool where @@ -187,9 +165,8 @@ fn read_msg_text(inp: &mut impl BufRead) -> Result> { } let mut parts = buf.splitn(2, ": "); let header_name = parts.next().unwrap(); - let header_value = parts - .next() - .ok_or_else(|| format_err!("malformed header: {:?}", buf))?; + let header_value = + parts.next().ok_or_else(|| format_err!("malformed header: {:?}", buf))?; if header_name == "Content-Length" { size = Some(header_value.parse::()?); } diff --git a/crates/gen_lsp_server/src/stdio.rs b/crates/gen_lsp_server/src/stdio.rs index 5c8e338547..dab2d8da8f 100644 --- a/crates/gen_lsp_server/src/stdio.rs +++ b/crates/gen_lsp_server/src/stdio.rs @@ -13,9 +13,7 @@ pub fn stdio_transport() -> (Receiver, Sender, Threads) let writer = thread::spawn(move || { let stdout = stdout(); let mut stdout = stdout.lock(); - writer_receiver - .into_iter() - .try_for_each(|it| it.write(&mut stdout))?; + writer_receiver.into_iter().try_for_each(|it| it.write(&mut stdout))?; Ok(()) }); let (reader_sender, reader_receiver) = bounded::(16); diff --git a/crates/ra_arena/src/lib.rs b/crates/ra_arena/src/lib.rs index 97f5548384..1c97c2662d 100644 --- a/crates/ra_arena/src/lib.rs +++ b/crates/ra_arena/src/lib.rs @@ -44,10 +44,7 @@ pub struct Arena { impl fmt::Debug for Arena { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt.debug_struct("Arena") - .field("len", &self.len()) - .field("data", &self.data) - .finish() + fmt.debug_struct("Arena").field("len", &self.len()).field("data", &self.data).finish() } } @@ -80,19 +77,13 @@ impl Arena { ID::from_raw(id) } pub fn iter<'a>(&'a self) -> impl Iterator { - self.data - .iter() - .enumerate() - .map(|(idx, value)| (ID::from_raw(RawId(idx as u32)), value)) + self.data.iter().enumerate().map(|(idx, value)| (ID::from_raw(RawId(idx as u32)), value)) } } impl Default for Arena { fn default() -> Arena { - Arena { - data: Vec::new(), - _ty: PhantomData, - } + Arena { data: Vec::new(), _ty: PhantomData } } } @@ -116,9 +107,6 @@ impl FromIterator for Arena { where I: IntoIterator, { - Arena { - data: Vec::from_iter(iter), - _ty: PhantomData, - } + Arena { data: Vec::from_iter(iter), _ty: PhantomData } } } diff --git a/crates/ra_arena/src/map.rs b/crates/ra_arena/src/map.rs index be80edaf38..b73d4e3651 100644 --- a/crates/ra_arena/src/map.rs +++ b/crates/ra_arena/src/map.rs @@ -42,10 +42,7 @@ impl ArenaMap { } pub fn iter(&self) -> impl Iterator { - self.v - .iter() - .enumerate() - .filter_map(|(idx, o)| Some((Self::from_idx(idx), o.as_ref()?))) + self.v.iter().enumerate().filter_map(|(idx, o)| Some((Self::from_idx(idx), o.as_ref()?))) } fn to_idx(id: ID) -> usize { @@ -66,9 +63,6 @@ impl std::ops::Index for ArenaMap { impl Default for ArenaMap { fn default() -> Self { - ArenaMap { - v: Vec::new(), - _ty: PhantomData, - } + ArenaMap { v: Vec::new(), _ty: PhantomData } } } diff --git a/crates/ra_assists/src/add_derive.rs b/crates/ra_assists/src/add_derive.rs index 01a4079f68..caf21e0791 100644 --- a/crates/ra_assists/src/add_derive.rs +++ b/crates/ra_assists/src/add_derive.rs @@ -30,10 +30,8 @@ pub(crate) fn add_derive(ctx: AssistCtx) -> Option { // Insert `derive` after doc comments. fn derive_insertion_offset(nominal: &ast::NominalDef) -> Option { - let non_ws_child = nominal - .syntax() - .children() - .find(|it| it.kind() != COMMENT && it.kind() != WHITESPACE)?; + let non_ws_child = + nominal.syntax().children().find(|it| it.kind() != COMMENT && it.kind() != WHITESPACE)?; Some(non_ws_child.range().start()) } diff --git a/crates/ra_assists/src/add_impl.rs b/crates/ra_assists/src/add_impl.rs index 699508f915..f2360bc891 100644 --- a/crates/ra_assists/src/add_impl.rs +++ b/crates/ra_assists/src/add_impl.rs @@ -21,17 +21,11 @@ pub(crate) fn add_impl(ctx: AssistCtx) -> Option { buf.push_str(" "); buf.push_str(name.text().as_str()); if let Some(type_params) = type_params { - let lifetime_params = type_params - .lifetime_params() - .filter_map(|it| it.lifetime()) - .map(|it| it.text()); - let type_params = type_params - .type_params() - .filter_map(|it| it.name()) - .map(|it| it.text()); - join(lifetime_params.chain(type_params)) - .surround_with("<", ">") - .to_buf(&mut buf); + let lifetime_params = + type_params.lifetime_params().filter_map(|it| it.lifetime()).map(|it| it.text()); + let type_params = + type_params.type_params().filter_map(|it| it.name()).map(|it| it.text()); + join(lifetime_params.chain(type_params)).surround_with("<", ">").to_buf(&mut buf); } buf.push_str(" {\n"); edit.set_cursor(start_offset + TextUnit::of_str(&buf)); @@ -47,11 +41,7 @@ mod tests { #[test] fn test_add_impl() { - check_assist( - add_impl, - "struct Foo {<|>}\n", - "struct Foo {}\n\nimpl Foo {\n<|>\n}\n", - ); + check_assist(add_impl, "struct Foo {<|>}\n", "struct Foo {}\n\nimpl Foo {\n<|>\n}\n"); check_assist( add_impl, "struct Foo {<|>}", diff --git a/crates/ra_assists/src/assist_ctx.rs b/crates/ra_assists/src/assist_ctx.rs index 6d09bde52a..0bf640241b 100644 --- a/crates/ra_assists/src/assist_ctx.rs +++ b/crates/ra_assists/src/assist_ctx.rs @@ -69,12 +69,7 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> { F: FnOnce(AssistCtx) -> T, { let source_file = &db.parse(frange.file_id); - let ctx = AssistCtx { - db, - frange, - source_file, - should_compute_edit, - }; + let ctx = AssistCtx { db, frange, source_file, should_compute_edit }; f(ctx) } @@ -83,9 +78,7 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> { label: impl Into, f: impl FnOnce(&mut AssistBuilder), ) -> Option { - let label = AssistLabel { - label: label.into(), - }; + let label = AssistLabel { label: label.into() }; if !self.should_compute_edit { return Some(Assist::Unresolved(label)); } @@ -146,9 +139,6 @@ impl AssistBuilder { } fn build(self) -> AssistAction { - AssistAction { - edit: self.edit.finish(), - cursor_position: self.cursor_position, - } + AssistAction { edit: self.edit.finish(), cursor_position: self.cursor_position } } } diff --git a/crates/ra_assists/src/change_visibility.rs b/crates/ra_assists/src/change_visibility.rs index 4cd32985e0..fa5f231c88 100644 --- a/crates/ra_assists/src/change_visibility.rs +++ b/crates/ra_assists/src/change_visibility.rs @@ -81,31 +81,11 @@ mod tests { #[test] fn change_visibility_adds_pub_crate_to_items() { - check_assist( - change_visibility, - "<|>fn foo() {}", - "<|>pub(crate) fn foo() {}", - ); - check_assist( - change_visibility, - "f<|>n foo() {}", - "<|>pub(crate) fn foo() {}", - ); - check_assist( - change_visibility, - "<|>struct Foo {}", - "<|>pub(crate) struct Foo {}", - ); - check_assist( - change_visibility, - "<|>mod foo {}", - "<|>pub(crate) mod foo {}", - ); - check_assist( - change_visibility, - "<|>trait Foo {}", - "<|>pub(crate) trait Foo {}", - ); + check_assist(change_visibility, "<|>fn foo() {}", "<|>pub(crate) fn foo() {}"); + check_assist(change_visibility, "f<|>n foo() {}", "<|>pub(crate) fn foo() {}"); + check_assist(change_visibility, "<|>struct Foo {}", "<|>pub(crate) struct Foo {}"); + check_assist(change_visibility, "<|>mod foo {}", "<|>pub(crate) mod foo {}"); + check_assist(change_visibility, "<|>trait Foo {}", "<|>pub(crate) trait Foo {}"); check_assist(change_visibility, "m<|>od {}", "<|>pub(crate) mod {}"); check_assist( change_visibility, @@ -125,20 +105,12 @@ mod tests { #[test] fn change_visibility_pub_to_pub_crate() { - check_assist( - change_visibility, - "<|>pub fn foo() {}", - "<|>pub(crate) fn foo() {}", - ) + check_assist(change_visibility, "<|>pub fn foo() {}", "<|>pub(crate) fn foo() {}") } #[test] fn change_visibility_pub_crate_to_pub() { - check_assist( - change_visibility, - "<|>pub(crate) fn foo() {}", - "<|>pub fn foo() {}", - ) + check_assist(change_visibility, "<|>pub(crate) fn foo() {}", "<|>pub fn foo() {}") } #[test] diff --git a/crates/ra_assists/src/fill_match_arms.rs b/crates/ra_assists/src/fill_match_arms.rs index 9aa37d94cd..741f75e2a7 100644 --- a/crates/ra_assists/src/fill_match_arms.rs +++ b/crates/ra_assists/src/fill_match_arms.rs @@ -27,10 +27,7 @@ pub(crate) fn fill_match_arms(ctx: AssistCtx) -> Option e, + Ty::Adt { def_id: AdtDef::Enum(e), .. } => e, _ => return None, }; let enum_name = enum_def.name(ctx.db)?; diff --git a/crates/ra_assists/src/introduce_variable.rs b/crates/ra_assists/src/introduce_variable.rs index f587b4fe6e..4f7c9f3c23 100644 --- a/crates/ra_assists/src/introduce_variable.rs +++ b/crates/ra_assists/src/introduce_variable.rs @@ -81,11 +81,7 @@ fn anchor_stmt(expr: &ast::Expr) -> Option<(&SyntaxNode, bool)> { return Some((node, false)); } - if let Some(expr) = node - .parent() - .and_then(ast::Block::cast) - .and_then(|it| it.expr()) - { + if let Some(expr) = node.parent().and_then(ast::Block::cast).and_then(|it| it.expr()) { if expr.syntax() == node { return Some((node, false)); } diff --git a/crates/ra_assists/src/lib.rs b/crates/ra_assists/src/lib.rs index 555af51bce..881db6347f 100644 --- a/crates/ra_assists/src/lib.rs +++ b/crates/ra_assists/src/lib.rs @@ -89,9 +89,7 @@ fn all_assists() -> &'static [fn(AssistCtx) -> Option Option<&SyntaxNode> { - node.siblings(direction) - .skip(1) - .find(|node| !node.kind().is_trivia()) + node.siblings(direction).skip(1).find(|node| !node.kind().is_trivia()) } #[cfg(test)] @@ -110,10 +108,8 @@ mod helpers { ) { let (before_cursor_pos, before) = extract_offset(before); let (db, _source_root, file_id) = MockDatabase::with_single_file(&before); - let frange = FileRange { - file_id, - range: TextRange::offset_len(before_cursor_pos, 0.into()), - }; + let frange = + FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) }; let assist = AssistCtx::with_ctx(&db, frange, true, assist).expect("code action is not applicable"); let action = match assist { @@ -161,10 +157,8 @@ mod helpers { ) { let (before_cursor_pos, before) = extract_offset(before); let (db, _source_root, file_id) = MockDatabase::with_single_file(&before); - let frange = FileRange { - file_id, - range: TextRange::offset_len(before_cursor_pos, 0.into()), - }; + let frange = + FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) }; let assist = AssistCtx::with_ctx(&db, frange, true, assist); assert!(assist.is_none()); } diff --git a/crates/ra_assists/src/replace_if_let_with_match.rs b/crates/ra_assists/src/replace_if_let_with_match.rs index f6af47ec91..683f0d119d 100644 --- a/crates/ra_assists/src/replace_if_let_with_match.rs +++ b/crates/ra_assists/src/replace_if_let_with_match.rs @@ -30,11 +30,7 @@ fn build_match_expr( ) -> String { let mut buf = String::new(); buf.push_str(&format!("match {} {{\n", expr.syntax().text())); - buf.push_str(&format!( - " {} => {}\n", - pat1.syntax().text(), - format_arm(arm1) - )); + buf.push_str(&format!(" {} => {}\n", pat1.syntax().text(), format_arm(arm1))); buf.push_str(&format!(" _ => {}\n", format_arm(arm2))); buf.push_str("}"); buf diff --git a/crates/ra_assists/src/split_import.rs b/crates/ra_assists/src/split_import.rs index 7e34be0878..fb69cef9c5 100644 --- a/crates/ra_assists/src/split_import.rs +++ b/crates/ra_assists/src/split_import.rs @@ -8,9 +8,7 @@ use ra_syntax::{ use crate::{AssistCtx, Assist}; pub(crate) fn split_import(ctx: AssistCtx) -> Option { - let colon_colon = ctx - .leaf_at_offset() - .find(|leaf| leaf.kind() == COLONCOLON)?; + let colon_colon = ctx.leaf_at_offset().find(|leaf| leaf.kind() == COLONCOLON)?; let path = colon_colon.parent().and_then(ast::Path::cast)?; let top_path = generate(Some(path), |it| it.parent_path()).last()?; diff --git a/crates/ra_cli/src/main.rs b/crates/ra_cli/src/main.rs index 6b5be27be8..a4debeb48e 100644 --- a/crates/ra_cli/src/main.rs +++ b/crates/ra_cli/src/main.rs @@ -13,18 +13,8 @@ fn main() -> Result<()> { .setting(clap::AppSettings::SubcommandRequiredElseHelp) .subcommand( SubCommand::with_name("render-test") - .arg( - Arg::with_name("line") - .long("--line") - .required(true) - .takes_value(true), - ) - .arg( - Arg::with_name("file") - .long("--file") - .required(true) - .takes_value(true), - ), + .arg(Arg::with_name("line").long("--line").required(true).takes_value(true)) + .arg(Arg::with_name("file").long("--file").required(true).takes_value(true)), ) .subcommand(SubCommand::with_name("parse").arg(Arg::with_name("no-dump").long("--no-dump"))) .subcommand(SubCommand::with_name("symbols")) @@ -108,8 +98,5 @@ fn selections(file: &SourceFile, start: u32, end: u32) -> String { .iter() .map(|r| (1 + u32::from(r.start()), 1 + u32::from(r.end()))) .map(|(s, e)| format!("({} {})", s, e)); - join(ranges) - .separator(" ") - .surround_with("(", ")") - .to_string() + join(ranges).separator(" ").surround_with("(", ")").to_string() } diff --git a/crates/ra_db/src/input.rs b/crates/ra_db/src/input.rs index 275894252d..614325a0f6 100644 --- a/crates/ra_db/src/input.rs +++ b/crates/ra_db/src/input.rs @@ -64,10 +64,7 @@ struct CrateData { impl CrateData { fn new(file_id: FileId) -> CrateData { - CrateData { - file_id, - dependencies: Vec::new(), - } + CrateData { file_id, dependencies: Vec::new() } } fn add_dep(&mut self, name: SmolStr, crate_id: CrateId) { @@ -112,10 +109,7 @@ impl CrateGraph { self.arena[&crate_id].file_id } pub fn crate_id_for_crate_root(&self, file_id: FileId) -> Option { - let (&crate_id, _) = self - .arena - .iter() - .find(|(_crate_id, data)| data.file_id == file_id)?; + let (&crate_id, _) = self.arena.iter().find(|(_crate_id, data)| data.file_id == file_id)?; Some(crate_id) } pub fn dependencies<'a>( @@ -153,15 +147,9 @@ mod tests { let crate1 = graph.add_crate_root(FileId(1u32)); let crate2 = graph.add_crate_root(FileId(2u32)); let crate3 = graph.add_crate_root(FileId(3u32)); - assert!(graph - .add_dep(crate1, SmolStr::new("crate2"), crate2) - .is_ok()); - assert!(graph - .add_dep(crate2, SmolStr::new("crate3"), crate3) - .is_ok()); - assert!(graph - .add_dep(crate3, SmolStr::new("crate1"), crate1) - .is_err()); + assert!(graph.add_dep(crate1, SmolStr::new("crate2"), crate2).is_ok()); + assert!(graph.add_dep(crate2, SmolStr::new("crate3"), crate3).is_ok()); + assert!(graph.add_dep(crate3, SmolStr::new("crate1"), crate1).is_err()); } #[test] @@ -170,11 +158,7 @@ mod tests { let crate1 = graph.add_crate_root(FileId(1u32)); let crate2 = graph.add_crate_root(FileId(2u32)); let crate3 = graph.add_crate_root(FileId(3u32)); - assert!(graph - .add_dep(crate1, SmolStr::new("crate2"), crate2) - .is_ok()); - assert!(graph - .add_dep(crate2, SmolStr::new("crate3"), crate3) - .is_ok()); + assert!(graph.add_dep(crate1, SmolStr::new("crate2"), crate2).is_ok()); + assert!(graph.add_dep(crate2, SmolStr::new("crate3"), crate3).is_ok()); } } diff --git a/crates/ra_db/src/lib.rs b/crates/ra_db/src/lib.rs index 66634e05b3..31442713df 100644 --- a/crates/ra_db/src/lib.rs +++ b/crates/ra_db/src/lib.rs @@ -94,11 +94,8 @@ pub trait SourceDatabase: CheckCanceled + std::fmt::Debug { fn source_root_crates(db: &impl SourceDatabase, id: SourceRootId) -> Arc> { let root = db.source_root(id); let graph = db.crate_graph(); - let res = root - .files - .values() - .filter_map(|&it| graph.crate_id_for_crate_root(it)) - .collect::>(); + let res = + root.files.values().filter_map(|&it| graph.crate_id_for_crate_root(it)).collect::>(); Arc::new(res) } diff --git a/crates/ra_db/src/loc2id.rs b/crates/ra_db/src/loc2id.rs index 359cd893d8..d27fa76822 100644 --- a/crates/ra_db/src/loc2id.rs +++ b/crates/ra_db/src/loc2id.rs @@ -30,10 +30,7 @@ where LOC: Clone + Eq + Hash, { fn default() -> Self { - Loc2IdMap { - id2loc: Arena::default(), - loc2id: FxHashMap::default(), - } + Loc2IdMap { id2loc: Arena::default(), loc2id: FxHashMap::default() } } } @@ -85,9 +82,7 @@ where LOC: Clone + Eq + Hash, { fn default() -> Self { - LocationIntener { - map: Default::default(), - } + LocationIntener { map: Default::default() } } } diff --git a/crates/ra_hir/src/adt.rs b/crates/ra_hir/src/adt.rs index c549e2126e..6d917bb1b6 100644 --- a/crates/ra_hir/src/adt.rs +++ b/crates/ra_hir/src/adt.rs @@ -62,10 +62,7 @@ impl StructData { } fn variants(enum_def: &ast::EnumDef) -> impl Iterator { - enum_def - .variant_list() - .into_iter() - .flat_map(|it| it.variants()) + enum_def.variant_list().into_iter().flat_map(|it| it.variants()) } impl EnumVariant { @@ -83,9 +80,7 @@ impl EnumVariant { (file_id, var) } pub(crate) fn variant_data(&self, db: &impl PersistentHirDatabase) -> Arc { - db.enum_data(self.parent).variants[self.id] - .variant_data - .clone() + db.enum_data(self.parent).variants[self.id].variant_data.clone() } } @@ -222,14 +217,12 @@ impl StructField { }; let field_sources = match struct_flavor { - ast::StructFlavor::Tuple(fl) => fl - .fields() - .map(|it| FieldSource::Pos(it.to_owned())) - .collect(), - ast::StructFlavor::Named(fl) => fl - .fields() - .map(|it| FieldSource::Named(it.to_owned())) - .collect(), + ast::StructFlavor::Tuple(fl) => { + fl.fields().map(|it| FieldSource::Pos(it.to_owned())).collect() + } + ast::StructFlavor::Named(fl) => { + fl.fields().map(|it| FieldSource::Named(it.to_owned())).collect() + } ast::StructFlavor::Unit => Vec::new(), }; let field = field_sources diff --git a/crates/ra_hir/src/code_model_api.rs b/crates/ra_hir/src/code_model_api.rs index a58bf8f87b..cafc5279d5 100644 --- a/crates/ra_hir/src/code_model_api.rs +++ b/crates/ra_hir/src/code_model_api.rs @@ -71,17 +71,7 @@ pub enum ModuleDef { Trait(Trait), Type(Type), } -impl_froms!( - ModuleDef: Module, - Function, - Struct, - Enum, - EnumVariant, - Const, - Static, - Trait, - Type -); +impl_froms!(ModuleDef: Module, Function, Struct, Enum, EnumVariant, Const, Static, Trait, Type); pub enum ModuleSource { SourceFile(TreeArc), @@ -90,13 +80,8 @@ pub enum ModuleSource { #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub enum Problem { - UnresolvedModule { - candidate: RelativePathBuf, - }, - NotDirOwner { - move_to: RelativePathBuf, - candidate: RelativePathBuf, - }, + UnresolvedModule { candidate: RelativePathBuf }, + NotDirOwner { move_to: RelativePathBuf, candidate: RelativePathBuf }, } impl Module { @@ -187,8 +172,7 @@ impl Module { impl Docs for Module { fn docs(&self, db: &impl HirDatabase) -> Option { - self.declaration_source(db) - .and_then(|it| docs_from_ast(&*it.1)) + self.declaration_source(db).and_then(|it| docs_from_ast(&*it.1)) } } @@ -206,9 +190,7 @@ pub enum FieldSource { impl StructField { pub fn name(&self, db: &impl HirDatabase) -> Name { - self.parent.variant_data(db).fields().unwrap()[self.id] - .name - .clone() + self.parent.variant_data(db).fields().unwrap()[self.id].name.clone() } pub fn source(&self, db: &impl PersistentHirDatabase) -> (HirFileId, FieldSource) { @@ -257,10 +239,7 @@ impl Struct { .fields() .into_iter() .flat_map(|it| it.iter()) - .map(|(id, _)| StructField { - parent: (*self).into(), - id, - }) + .map(|(id, _)| StructField { parent: (*self).into(), id }) .collect() } @@ -271,10 +250,7 @@ impl Struct { .into_iter() .flat_map(|it| it.iter()) .find(|(_id, data)| data.name == *name) - .map(|(id, _)| StructField { - parent: (*self).into(), - id, - }) + .map(|(id, _)| StructField { parent: (*self).into(), id }) } pub fn generic_params(&self, db: &impl PersistentHirDatabase) -> Arc { @@ -292,11 +268,7 @@ impl Struct { let r = self.module(db).resolver(db); // ...and add generic params, if present let p = self.generic_params(db); - let r = if !p.params.is_empty() { - r.push_generic_params_scope(p) - } else { - r - }; + let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r }; r } } @@ -356,11 +328,7 @@ impl Enum { let r = self.module(db).resolver(db); // ...and add generic params, if present let p = self.generic_params(db); - let r = if !p.params.is_empty() { - r.push_generic_params_scope(p) - } else { - r - }; + let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r }; r } } @@ -400,10 +368,7 @@ impl EnumVariant { .fields() .into_iter() .flat_map(|it| it.iter()) - .map(|(id, _)| StructField { - parent: (*self).into(), - id, - }) + .map(|(id, _)| StructField { parent: (*self).into(), id }) .collect() } @@ -413,10 +378,7 @@ impl EnumVariant { .into_iter() .flat_map(|it| it.iter()) .find(|(_id, data)| data.name == *name) - .map(|(id, _)| StructField { - parent: (*self).into(), - id, - }) + .map(|(id, _)| StructField { parent: (*self).into(), id }) } } @@ -488,10 +450,7 @@ impl Function { pub fn scopes(&self, db: &impl HirDatabase) -> ScopesWithSyntaxMapping { let scopes = db.expr_scopes(*self); let syntax_mapping = db.body_syntax_mapping(*self); - ScopesWithSyntaxMapping { - scopes, - syntax_mapping, - } + ScopesWithSyntaxMapping { scopes, syntax_mapping } } pub fn signature(&self, db: &impl HirDatabase) -> Arc { @@ -516,11 +475,7 @@ impl Function { .unwrap_or_else(|| self.module(db).resolver(db)); // ...and add generic params, if present let p = self.generic_params(db); - let r = if !p.params.is_empty() { - r.push_generic_params_scope(p) - } else { - r - }; + let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r }; r } } diff --git a/crates/ra_hir/src/code_model_impl/function.rs b/crates/ra_hir/src/code_model_impl/function.rs index 8326c02c78..b9438fdb79 100644 --- a/crates/ra_hir/src/code_model_impl/function.rs +++ b/crates/ra_hir/src/code_model_impl/function.rs @@ -25,10 +25,7 @@ impl FnSignature { func: Function, ) -> Arc { let (_, node) = func.source(db); - let name = node - .name() - .map(|n| n.as_name()) - .unwrap_or_else(Name::missing); + let name = node.name().map(|n| n.as_name()).unwrap_or_else(Name::missing); let mut params = Vec::new(); let mut has_self_param = false; if let Some(param_list) = node.param_list() { @@ -61,12 +58,7 @@ impl FnSignature { TypeRef::unit() }; - let sig = FnSignature { - name, - params, - ret_type, - has_self_param, - }; + let sig = FnSignature { name, params, ret_type, has_self_param }; Arc::new(sig) } } diff --git a/crates/ra_hir/src/code_model_impl/krate.rs b/crates/ra_hir/src/code_model_impl/krate.rs index 1517434b82..161ae6e189 100644 --- a/crates/ra_hir/src/code_model_impl/krate.rs +++ b/crates/ra_hir/src/code_model_impl/krate.rs @@ -11,9 +11,7 @@ impl Crate { crate_graph .dependencies(self.crate_id) .map(|dep| { - let krate = Crate { - crate_id: dep.crate_id(), - }; + let krate = Crate { crate_id: dep.crate_id() }; let name = dep.as_name(); CrateDependency { krate, name } }) @@ -23,10 +21,7 @@ impl Crate { let module_tree = db.module_tree(*self); let module_id = module_tree.modules().next()?; - let module = Module { - krate: *self, - module_id, - }; + let module = Module { krate: *self, module_id }; Some(module) } } diff --git a/crates/ra_hir/src/code_model_impl/module.rs b/crates/ra_hir/src/code_model_impl/module.rs index f487b8532d..1425fa693d 100644 --- a/crates/ra_hir/src/code_model_impl/module.rs +++ b/crates/ra_hir/src/code_model_impl/module.rs @@ -12,10 +12,7 @@ use crate::{ impl Module { fn with_module_id(&self, module_id: ModuleId) -> Module { - Module { - module_id, - krate: self.krate, - } + Module { module_id, krate: self.krate } } pub(crate) fn name_impl(&self, db: &impl HirDatabase) -> Option { @@ -42,10 +39,7 @@ impl Module { ) -> Option<(FileId, TreeArc)> { let module_tree = db.module_tree(self.krate); let link = self.module_id.parent_link(&module_tree)?; - let file_id = link - .owner(&module_tree) - .file_id(&module_tree) - .as_original_file(); + let file_id = link.owner(&module_tree).file_id(&module_tree).as_original_file(); let src = link.source(&module_tree, db); Some((file_id, src)) } diff --git a/crates/ra_hir/src/expr.rs b/crates/ra_hir/src/expr.rs index 6826e966bf..4e61d87ff0 100644 --- a/crates/ra_hir/src/expr.rs +++ b/crates/ra_hir/src/expr.rs @@ -121,9 +121,7 @@ impl BodySyntaxMapping { } pub fn node_expr(&self, node: &ast::Expr) -> Option { - self.expr_syntax_mapping - .get(&SyntaxNodePtr::new(node.syntax())) - .cloned() + self.expr_syntax_mapping.get(&SyntaxNodePtr::new(node.syntax())).cloned() } pub fn pat_syntax(&self, pat: PatId) -> Option { @@ -135,9 +133,7 @@ impl BodySyntaxMapping { } pub fn node_pat(&self, node: &ast::Pat) -> Option { - self.pat_syntax_mapping - .get(&SyntaxNodePtr::new(node.syntax())) - .cloned() + self.pat_syntax_mapping.get(&SyntaxNodePtr::new(node.syntax())).cloned() } pub fn body(&self) -> &Arc { @@ -262,11 +258,7 @@ pub struct StructLitField { #[derive(Debug, Clone, Eq, PartialEq)] pub enum Statement { - Let { - pat: PatId, - type_ref: Option, - initializer: Option, - }, + Let { pat: PatId, type_ref: Option, initializer: Option }, Expr(ExprId), } @@ -275,11 +267,7 @@ impl Expr { match self { Expr::Missing => {} Expr::Path(_) => {} - Expr::If { - condition, - then_branch, - else_branch, - } => { + Expr::If { condition, then_branch, else_branch } => { f(*condition); f(*then_branch); if let Some(else_branch) = else_branch { @@ -457,11 +445,7 @@ impl Pat { args.iter().map(|pat| *pat).for_each(f); } Pat::Ref { pat, .. } => f(*pat), - Pat::Slice { - prefix, - rest, - suffix, - } => { + Pat::Slice { prefix, rest, suffix } => { let total_iter = prefix.iter().chain(rest.iter()).chain(suffix.iter()); total_iter.map(|pat| *pat).for_each(f); } @@ -520,10 +504,7 @@ impl ExprCollector { } fn empty_block(&mut self) -> ExprId { - let block = Expr::Block { - statements: Vec::new(), - tail: None, - }; + let block = Expr::Block { statements: Vec::new(), tail: None }; self.exprs.alloc(block) } @@ -549,24 +530,10 @@ impl ExprCollector { .unwrap_or_else(|| self.empty_block()); let placeholder_pat = self.pats.alloc(Pat::Missing); let arms = vec![ - MatchArm { - pats: vec![pat], - expr: then_branch, - guard: None, - }, - MatchArm { - pats: vec![placeholder_pat], - expr: else_branch, - guard: None, - }, + MatchArm { pats: vec![pat], expr: then_branch, guard: None }, + MatchArm { pats: vec![placeholder_pat], expr: else_branch, guard: None }, ]; - self.alloc_expr( - Expr::Match { - expr: match_expr, - arms, - }, - syntax_ptr, - ) + self.alloc_expr(Expr::Match { expr: match_expr, arms }, syntax_ptr) } else { let condition = self.collect_expr_opt(e.condition().and_then(|c| c.expr())); let then_branch = self.collect_block_opt(e.then_branch()); @@ -577,14 +544,7 @@ impl ExprCollector { self.collect_expr(expr) } }); - self.alloc_expr( - Expr::If { - condition, - then_branch, - else_branch, - }, - syntax_ptr, - ) + self.alloc_expr(Expr::If { condition, then_branch, else_branch }, syntax_ptr) } } ast::ExprKind::BlockExpr(e) => self.collect_block_opt(e.block()), @@ -610,14 +570,7 @@ impl ExprCollector { let iterable = self.collect_expr_opt(e.iterable()); let pat = self.collect_pat_opt(e.pat()); let body = self.collect_block_opt(e.loop_body()); - self.alloc_expr( - Expr::For { - iterable, - pat, - body, - }, - syntax_ptr, - ) + self.alloc_expr(Expr::For { iterable, pat, body }, syntax_ptr) } ast::ExprKind::CallExpr(e) => { let callee = self.collect_expr_opt(e.expr()); @@ -635,18 +588,8 @@ impl ExprCollector { } else { Vec::new() }; - let method_name = e - .name_ref() - .map(|nr| nr.as_name()) - .unwrap_or_else(Name::missing); - self.alloc_expr( - Expr::MethodCall { - receiver, - method_name, - args, - }, - syntax_ptr, - ) + let method_name = e.name_ref().map(|nr| nr.as_name()).unwrap_or_else(Name::missing); + self.alloc_expr(Expr::MethodCall { receiver, method_name, args }, syntax_ptr) } ast::ExprKind::MatchExpr(e) => { let expr = self.collect_expr_opt(e.expr()); @@ -668,11 +611,8 @@ impl ExprCollector { self.alloc_expr(Expr::Match { expr, arms }, syntax_ptr) } ast::ExprKind::PathExpr(e) => { - let path = e - .path() - .and_then(Path::from_ast) - .map(Expr::Path) - .unwrap_or(Expr::Missing); + let path = + e.path().and_then(Path::from_ast).map(Expr::Path).unwrap_or(Expr::Missing); self.alloc_expr(path, syntax_ptr) } ast::ExprKind::ContinueExpr(_e) => { @@ -721,21 +661,11 @@ impl ExprCollector { Vec::new() }; let spread = e.spread().map(|s| self.collect_expr(s)); - self.alloc_expr( - Expr::StructLit { - path, - fields, - spread, - }, - syntax_ptr, - ) + self.alloc_expr(Expr::StructLit { path, fields, spread }, syntax_ptr) } ast::ExprKind::FieldExpr(e) => { let expr = self.collect_expr_opt(e.expr()); - let name = e - .name_ref() - .map(|nr| nr.as_name()) - .unwrap_or_else(Name::missing); + let name = e.name_ref().map(|nr| nr.as_name()).unwrap_or_else(Name::missing); self.alloc_expr(Expr::Field { expr, name }, syntax_ptr) } ast::ExprKind::TryExpr(e) => { @@ -772,14 +702,7 @@ impl ExprCollector { } } let body = self.collect_expr_opt(e.body()); - self.alloc_expr( - Expr::Lambda { - args, - arg_types, - body, - }, - syntax_ptr, - ) + self.alloc_expr(Expr::Lambda { args, arg_types, body }, syntax_ptr) } ast::ExprKind::BinExpr(e) => { let lhs = self.collect_expr_opt(e.lhs()); @@ -804,9 +727,8 @@ impl ExprCollector { let lit = match child.flavor() { LiteralFlavor::IntNumber { suffix } => { - let known_name = suffix - .map(Name::new) - .and_then(|name| UncertainIntTy::from_name(&name)); + let known_name = + suffix.map(Name::new).and_then(|name| UncertainIntTy::from_name(&name)); Literal::Int( Default::default(), @@ -857,11 +779,7 @@ impl ExprCollector { let pat = self.collect_pat_opt(stmt.pat()); let type_ref = stmt.type_ref().map(TypeRef::from_ast); let initializer = stmt.initializer().map(|e| self.collect_expr(e)); - Statement::Let { - pat, - type_ref, - initializer, - } + Statement::Let { pat, type_ref, initializer } } ast::StmtKind::ExprStmt(stmt) => { Statement::Expr(self.collect_expr_opt(stmt.expr())) @@ -869,10 +787,7 @@ impl ExprCollector { }) .collect(); let tail = block.expr().map(|e| self.collect_expr(e)); - self.alloc_expr( - Expr::Block { statements, tail }, - SyntaxNodePtr::new(block.syntax()), - ) + self.alloc_expr(Expr::Block { statements, tail }, SyntaxNodePtr::new(block.syntax())) } fn collect_block_opt(&mut self, block: Option<&ast::Block>) -> ExprId { @@ -886,17 +801,10 @@ impl ExprCollector { fn collect_pat(&mut self, pat: &ast::Pat) -> PatId { let pattern = match pat.kind() { ast::PatKind::BindPat(bp) => { - let name = bp - .name() - .map(|nr| nr.as_name()) - .unwrap_or_else(Name::missing); + let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing); let annotation = BindingAnnotation::new(bp.is_mutable(), bp.is_ref()); let subpat = bp.pat().map(|subpat| self.collect_pat(subpat)); - Pat::Bind { - name, - mode: annotation, - subpat, - } + Pat::Bind { name, mode: annotation, subpat } } ast::PatKind::TupleStructPat(p) => { let path = p.path().and_then(Path::from_ast); @@ -919,9 +827,8 @@ impl ExprCollector { ast::PatKind::PlaceholderPat(_) => Pat::Wild, ast::PatKind::StructPat(p) => { let path = p.path().and_then(Path::from_ast); - let field_pat_list = p - .field_pat_list() - .expect("every struct should have a field list"); + let field_pat_list = + p.field_pat_list().expect("every struct should have a field list"); let mut fields: Vec<_> = field_pat_list .bind_pats() .map(|bind_pat| { @@ -961,10 +868,7 @@ impl ExprCollector { if let Some(param_list) = node.param_list() { if let Some(self_param) = param_list.self_param() { let self_param = SyntaxNodePtr::new( - self_param - .self_kw() - .expect("self param without self keyword") - .syntax(), + self_param.self_kw().expect("self param without self keyword").syntax(), ); let param_pat = self.alloc_pat( Pat::Bind { diff --git a/crates/ra_hir/src/expr/scope.rs b/crates/ra_hir/src/expr/scope.rs index 368994bf7c..44d5c24293 100644 --- a/crates/ra_hir/src/expr/scope.rs +++ b/crates/ra_hir/src/expr/scope.rs @@ -74,17 +74,11 @@ impl ExprScopes { } fn root_scope(&mut self) -> ScopeId { - self.scopes.alloc(ScopeData { - parent: None, - entries: vec![], - }) + self.scopes.alloc(ScopeData { parent: None, entries: vec![] }) } fn new_scope(&mut self, parent: ScopeId) -> ScopeId { - self.scopes.alloc(ScopeData { - parent: Some(parent), - entries: vec![], - }) + self.scopes.alloc(ScopeData { parent: Some(parent), entries: vec![] }) } fn add_bindings(&mut self, body: &Body, scope: ScopeId, pat: PatId) { @@ -92,10 +86,7 @@ impl ExprScopes { Pat::Bind { name, .. } => { // bind can have a subpattern, but it's actually not allowed // to bind to things in there - let entry = ScopeEntry { - name: name.clone(), - pat, - }; + let entry = ScopeEntry { name: name.clone(), pat }; self.scopes[scope].entries.push(entry) } p => p.walk_child_pats(|pat| self.add_bindings(body, scope, pat)), @@ -104,9 +95,7 @@ impl ExprScopes { fn add_params_bindings(&mut self, scope: ScopeId, params: &[PatId]) { let body = Arc::clone(&self.body); - params - .iter() - .for_each(|pat| self.add_bindings(&body, scope, *pat)); + params.iter().for_each(|pat| self.add_bindings(&body, scope, *pat)); } fn set_scope(&mut self, node: ExprId, scope: ScopeId) { @@ -142,9 +131,7 @@ impl ScopeEntryWithSyntax { impl ScopesWithSyntaxMapping { fn scope_chain<'a>(&'a self, node: &SyntaxNode) -> impl Iterator + 'a { - generate(self.scope_for(node), move |&scope| { - self.scopes.scopes[scope].parent - }) + generate(self.scope_for(node), move |&scope| self.scopes.scopes[scope].parent) } pub fn scope_for_offset(&self, offset: TextUnit) -> Option { @@ -154,10 +141,7 @@ impl ScopesWithSyntaxMapping { .filter_map(|(id, scope)| Some((self.syntax_mapping.expr_syntax(*id)?, scope))) // find containing scope .min_by_key(|(ptr, _scope)| { - ( - !(ptr.range().start() <= offset && offset <= ptr.range().end()), - ptr.range().len(), - ) + (!(ptr.range().start() <= offset && offset <= ptr.range().end()), ptr.range().len()) }) .map(|(ptr, scope)| self.adjust(ptr, *scope, offset)) } @@ -251,9 +235,7 @@ fn compute_block_scopes( ) { for stmt in statements { match stmt { - Statement::Let { - pat, initializer, .. - } => { + Statement::Let { pat, initializer, .. } => { if let Some(expr) = initializer { scopes.set_scope(*expr, scope); compute_expr_scopes(*expr, body, scopes, scope); @@ -278,21 +260,13 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope Expr::Block { statements, tail } => { compute_block_scopes(&statements, *tail, body, scopes, scope); } - Expr::For { - iterable, - pat, - body: body_expr, - } => { + Expr::For { iterable, pat, body: body_expr } => { compute_expr_scopes(*iterable, body, scopes, scope); let scope = scopes.new_scope(scope); scopes.add_bindings(body, scope, *pat); compute_expr_scopes(*body_expr, body, scopes, scope); } - Expr::Lambda { - args, - body: body_expr, - .. - } => { + Expr::Lambda { args, body: body_expr, .. } => { let scope = scopes.new_scope(scope); scopes.add_params_bindings(scope, &args); compute_expr_scopes(*body_expr, body, scopes, scope); @@ -341,9 +315,7 @@ mod tests { let file = SourceFile::parse(&code); let marker: &ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); let fn_def: &ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap(); - let irrelevant_function = Function { - id: crate::ids::FunctionId::from_raw(0.into()), - }; + let irrelevant_function = Function { id: crate::ids::FunctionId::from_raw(0.into()) }; let body_hir = expr::collect_fn_body_syntax(irrelevant_function, fn_def); let scopes = ExprScopes::new(Arc::clone(body_hir.body())); let scopes = ScopesWithSyntaxMapping { @@ -444,9 +416,7 @@ mod tests { let fn_def: &ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap(); let name_ref: &ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap(); - let irrelevant_function = Function { - id: crate::ids::FunctionId::from_raw(0.into()), - }; + let irrelevant_function = Function { id: crate::ids::FunctionId::from_raw(0.into()) }; let body_hir = expr::collect_fn_body_syntax(irrelevant_function, fn_def); let scopes = ExprScopes::new(Arc::clone(body_hir.body())); let scopes = ScopesWithSyntaxMapping { diff --git a/crates/ra_hir/src/generics.rs b/crates/ra_hir/src/generics.rs index a82205f0b3..c72360f442 100644 --- a/crates/ra_hir/src/generics.rs +++ b/crates/ra_hir/src/generics.rs @@ -58,14 +58,8 @@ impl GenericParams { fn fill_params(&mut self, params: &ast::TypeParamList) { for (idx, type_param) in params.type_params().enumerate() { - let name = type_param - .name() - .map(AsName::as_name) - .unwrap_or_else(Name::missing); - let param = GenericParam { - idx: idx as u32, - name, - }; + let name = type_param.name().map(AsName::as_name).unwrap_or_else(Name::missing); + let param = GenericParam { idx: idx as u32, name }; self.params.push(param); } } diff --git a/crates/ra_hir/src/ids.rs b/crates/ra_hir/src/ids.rs index 95678bf703..ea13c1196d 100644 --- a/crates/ra_hir/src/ids.rs +++ b/crates/ra_hir/src/ids.rs @@ -169,11 +169,7 @@ impl Hash for ItemLoc { impl Clone for ItemLoc { fn clone(&self) -> ItemLoc { - ItemLoc { - module: self.module, - raw: self.raw, - _ty: PhantomData, - } + ItemLoc { module: self.module, raw: self.raw, _ty: PhantomData } } } @@ -186,11 +182,7 @@ pub(crate) struct LocationCtx { impl<'a, DB: PersistentHirDatabase> LocationCtx<&'a DB> { pub(crate) fn new(db: &'a DB, module: Module, file_id: HirFileId) -> LocationCtx<&'a DB> { - LocationCtx { - db, - module, - file_id, - } + LocationCtx { db, module, file_id } } pub(crate) fn to_def(self, ast: &N) -> DEF where @@ -205,15 +197,9 @@ pub(crate) trait AstItemDef: ArenaId + Clone { fn interner(interner: &HirInterner) -> &LocationIntener, Self>; fn from_ast(ctx: LocationCtx<&impl PersistentHirDatabase>, ast: &N) -> Self { let items = ctx.db.file_items(ctx.file_id); - let raw = SourceItemId { - file_id: ctx.file_id, - item_id: items.id_of(ctx.file_id, ast.syntax()), - }; - let loc = ItemLoc { - module: ctx.module, - raw, - _ty: PhantomData, - }; + let raw = + SourceItemId { file_id: ctx.file_id, item_id: items.id_of(ctx.file_id, ast.syntax()) }; + let loc = ItemLoc { module: ctx.module, raw, _ty: PhantomData }; Self::interner(ctx.db.as_ref()).loc2id(&loc) } @@ -221,9 +207,8 @@ pub(crate) trait AstItemDef: ArenaId + Clone { let int = Self::interner(db.as_ref()); let loc = int.id2loc(self); let syntax = db.file_item(loc.raw); - let ast = N::cast(&syntax) - .unwrap_or_else(|| panic!("invalid ItemLoc: {:?}", loc.raw)) - .to_owned(); + let ast = + N::cast(&syntax).unwrap_or_else(|| panic!("invalid ItemLoc: {:?}", loc.raw)).to_owned(); (loc.raw.file_id, ast) } fn module(self, db: &impl HirDatabase) -> Module { @@ -317,10 +302,7 @@ pub struct SourceFileItems { impl SourceFileItems { pub(crate) fn new(file_id: HirFileId, source_file: &SourceFile) -> SourceFileItems { - let mut res = SourceFileItems { - file_id, - arena: Arena::default(), - }; + let mut res = SourceFileItems { file_id, arena: Arena::default() }; res.init(source_file); res } diff --git a/crates/ra_hir/src/impl_block.rs b/crates/ra_hir/src/impl_block.rs index 094dbedb35..b2fbee8d70 100644 --- a/crates/ra_hir/src/impl_block.rs +++ b/crates/ra_hir/src/impl_block.rs @@ -50,17 +50,11 @@ impl ImplBlock { item: ImplItem, ) -> Option { let impl_id = *module_impl_blocks.impls_by_def.get(&item)?; - Some(ImplBlock { - module_impl_blocks, - impl_id, - }) + Some(ImplBlock { module_impl_blocks, impl_id }) } pub(crate) fn from_id(module_impl_blocks: Arc, impl_id: ImplId) -> ImplBlock { - ImplBlock { - module_impl_blocks, - impl_id, - } + ImplBlock { module_impl_blocks, impl_id } } pub fn id(&self) -> ImplId { @@ -144,11 +138,7 @@ impl ImplData { } else { Vec::new() }; - ImplData { - target_trait, - target_type, - items, - } + ImplData { target_trait, target_type, items } } pub fn target_trait(&self) -> Option<&TypeRef> { @@ -212,10 +202,9 @@ impl ModuleImplBlocks { let file_id: HirFileId = file_id.into(); let node = match &module_source { ModuleSource::SourceFile(node) => node.syntax(), - ModuleSource::Module(node) => node - .item_list() - .expect("inline module should have item list") - .syntax(), + ModuleSource::Module(node) => { + node.item_list().expect("inline module should have item list").syntax() + } }; for impl_block_ast in node.children().filter_map(ast::ImplBlock::cast) { diff --git a/crates/ra_hir/src/macros.rs b/crates/ra_hir/src/macros.rs index cb8a9312e0..95925159f7 100644 --- a/crates/ra_hir/src/macros.rs +++ b/crates/ra_hir/src/macros.rs @@ -48,9 +48,7 @@ impl MacroDef { let input = { let arg = macro_call.token_tree()?.syntax(); - MacroInput { - text: arg.text().to_string(), - } + MacroInput { text: arg.text().to_string() } }; Some((def, input)) } @@ -68,20 +66,14 @@ impl MacroDef { let ptr = SyntaxNodePtr::new(array_expr.syntax()); let src_range = TextRange::offset_len(0.into(), TextUnit::of_str(&input.text)); let ranges_map = vec![(src_range, array_expr.syntax().range())]; - let res = MacroExpansion { - text, - ranges_map, - ptr, - }; + let res = MacroExpansion { text, ranges_map, ptr }; Some(res) } fn expand_query_group(self, input: MacroInput) -> Option { let anchor = "trait "; let pos = input.text.find(anchor)? + anchor.len(); - let trait_name = input.text[pos..] - .chars() - .take_while(|c| c.is_alphabetic()) - .collect::(); + let trait_name = + input.text[pos..].chars().take_while(|c| c.is_alphabetic()).collect::(); if trait_name.is_empty() { return None; } @@ -92,11 +84,7 @@ impl MacroDef { let name = trait_def.name()?; let ptr = SyntaxNodePtr::new(trait_def.syntax()); let ranges_map = vec![(src_range, name.syntax().range())]; - let res = MacroExpansion { - text, - ranges_map, - ptr, - }; + let res = MacroExpansion { text, ranges_map, ptr }; Some(res) } } diff --git a/crates/ra_hir/src/mock.rs b/crates/ra_hir/src/mock.rs index 87095fb219..950f89948f 100644 --- a/crates/ra_hir/src/mock.rs +++ b/crates/ra_hir/src/mock.rs @@ -64,10 +64,7 @@ impl MockDatabase { let mut source_root = SourceRoot::default(); for entry in parse_fixture(fixture) { if entry.text.contains(CURSOR_MARKER) { - assert!( - position.is_none(), - "only one marker (<|>) per fixture is allowed" - ); + assert!(position.is_none(), "only one marker (<|>) per fixture is allowed"); position = Some(self.add_file_with_position( source_root_id, &mut source_root, diff --git a/crates/ra_hir/src/module_tree.rs b/crates/ra_hir/src/module_tree.rs index a1aa3d8ce8..1f19ee1913 100644 --- a/crates/ra_hir/src/module_tree.rs +++ b/crates/ra_hir/src/module_tree.rs @@ -153,10 +153,8 @@ impl ModuleTree { file_id: HirFileId, decl_id: Option, ) -> Option { - let (res, _) = self - .mods - .iter() - .find(|(_, m)| (m.file_id, m.decl_id) == (file_id, decl_id))?; + let (res, _) = + self.mods.iter().find(|(_, m)| (m.file_id, m.decl_id) == (file_id, decl_id))?; Some(res) } @@ -178,18 +176,10 @@ impl ModuleTree { decl_id: Option, ) -> ModuleId { let is_root = parent.is_none(); - let id = self.alloc_mod(ModuleData { - file_id, - decl_id, - parent, - children: Vec::new(), - }); + let id = self.alloc_mod(ModuleData { file_id, decl_id, parent, children: Vec::new() }); for sub in db.submodules(file_id, decl_id).iter() { let link = self.alloc_link(LinkData { - source: SourceItemId { - file_id, - item_id: sub.decl_id, - }, + source: SourceItemId { file_id, item_id: sub.decl_id }, name: sub.name.clone(), owner: id, points_to: Vec::new(), @@ -244,9 +234,7 @@ impl ModuleId { Some(tree.links[link].owner) } pub(crate) fn crate_root(self, tree: &ModuleTree) -> ModuleId { - generate(Some(self), move |it| it.parent(tree)) - .last() - .unwrap() + generate(Some(self), move |it| it.parent(tree)).last().unwrap() } pub(crate) fn child(self, tree: &ModuleTree, name: &Name) -> Option { let link = tree.mods[self] diff --git a/crates/ra_hir/src/nameres.rs b/crates/ra_hir/src/nameres.rs index 681aa9a676..b7382d9c31 100644 --- a/crates/ra_hir/src/nameres.rs +++ b/crates/ra_hir/src/nameres.rs @@ -83,40 +83,25 @@ pub struct PerNs { impl Default for PerNs { fn default() -> Self { - PerNs { - types: None, - values: None, - } + PerNs { types: None, values: None } } } impl PerNs { pub fn none() -> PerNs { - PerNs { - types: None, - values: None, - } + PerNs { types: None, values: None } } pub fn values(t: T) -> PerNs { - PerNs { - types: None, - values: Some(t), - } + PerNs { types: None, values: Some(t) } } pub fn types(t: T) -> PerNs { - PerNs { - types: Some(t), - values: None, - } + PerNs { types: Some(t), values: None } } pub fn both(types: T, values: T) -> PerNs { - PerNs { - types: Some(types), - values: Some(values), - } + PerNs { types: Some(types), values: Some(values) } } pub fn is_none(&self) -> bool { @@ -147,31 +132,19 @@ impl PerNs { } pub fn as_ref(&self) -> PerNs<&T> { - PerNs { - types: self.types.as_ref(), - values: self.values.as_ref(), - } + PerNs { types: self.types.as_ref(), values: self.values.as_ref() } } pub fn combine(self, other: PerNs) -> PerNs { - PerNs { - types: self.types.or(other.types), - values: self.values.or(other.values), - } + PerNs { types: self.types.or(other.types), values: self.values.or(other.values) } } pub fn and_then(self, f: impl Fn(T) -> Option) -> PerNs { - PerNs { - types: self.types.and_then(&f), - values: self.values.and_then(&f), - } + PerNs { types: self.types.and_then(&f), values: self.values.and_then(&f) } } pub fn map(self, f: impl Fn(T) -> U) -> PerNs { - PerNs { - types: self.types.map(&f), - values: self.values.map(&f), - } + PerNs { types: self.types.map(&f), values: self.values.map(&f) } } } @@ -233,9 +206,7 @@ where for dep in self.krate.dependencies(self.db) { log::debug!("crate dep {:?} -> {:?}", dep.name, dep.krate); if let Some(module) = dep.krate.root_module(self.db) { - self.result - .extern_prelude - .insert(dep.name.clone(), module.into()); + self.result.extern_prelude.insert(dep.name.clone(), module.into()); } } } @@ -245,17 +216,11 @@ where for (import_id, import_data) in input.imports.iter() { if let Some(last_segment) = import_data.path.segments.iter().last() { if !import_data.is_glob { - let name = import_data - .alias - .clone() - .unwrap_or_else(|| last_segment.name.clone()); - module_items.items.insert( - name, - Resolution { - def: PerNs::none(), - import: Some(import_id), - }, - ); + let name = + import_data.alias.clone().unwrap_or_else(|| last_segment.name.clone()); + module_items + .items + .insert(name, Resolution { def: PerNs::none(), import: Some(import_id) }); } } } @@ -267,10 +232,7 @@ where // Populate modules for (name, module_id) in module_id.children(&self.module_tree) { - let module = Module { - module_id, - krate: self.krate, - }; + let module = Module { module_id, krate: self.krate }; self.add_module_item(&mut module_items, name, PerNs::types(module.into())); } @@ -305,20 +267,13 @@ where if import.is_glob { return ReachedFixedPoint::Yes; }; - let original_module = Module { - krate: self.krate, - module_id, - }; + let original_module = Module { krate: self.krate, module_id }; let (def, reached_fixedpoint) = - self.result - .resolve_path_fp(self.db, original_module, &import.path); + self.result.resolve_path_fp(self.db, original_module, &import.path); if reached_fixedpoint == ReachedFixedPoint::Yes { let last_segment = import.path.segments.last().unwrap(); - let name = import - .alias - .clone() - .unwrap_or_else(|| last_segment.name.clone()); + let name = import.alias.clone().unwrap_or_else(|| last_segment.name.clone()); log::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def); // extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658 @@ -330,10 +285,7 @@ where } } self.update(module_id, |items| { - let res = Resolution { - def, - import: Some(import_id), - }; + let res = Resolution { def, import: Some(import_id) }; items.items.insert(name, res); }); } @@ -358,12 +310,7 @@ impl ItemMap { let module_tree = db.module_tree(krate); let input = module_tree .modules() - .map(|module_id| { - ( - module_id, - db.lower_module_module(Module { krate, module_id }), - ) - }) + .map(|module_id| (module_id, db.lower_module_module(Module { krate, module_id }))) .collect::>(); let resolver = Resolver::new(db, &input, krate); diff --git a/crates/ra_hir/src/nameres/lower.rs b/crates/ra_hir/src/nameres/lower.rs index 7e6e48ae00..7e9a3de2bf 100644 --- a/crates/ra_hir/src/nameres/lower.rs +++ b/crates/ra_hir/src/nameres/lower.rs @@ -82,13 +82,9 @@ impl LoweredModule { let mut source_map = ImportSourceMap::default(); let mut res = LoweredModule::default(); match source { - ModuleSource::SourceFile(it) => res.fill( - &mut source_map, - db, - module, - file_id, - &mut it.items_with_macros(), - ), + ModuleSource::SourceFile(it) => { + res.fill(&mut source_map, db, module, file_id, &mut it.items_with_macros()) + } ModuleSource::Module(it) => { if let Some(item_list) = it.item_list() { res.fill( @@ -121,10 +117,8 @@ impl LoweredModule { } ast::ItemOrMacro::Macro(macro_call) => { let item_id = file_items.id_of_unchecked(macro_call.syntax()); - let loc = MacroCallLoc { - module, - source_item_id: SourceItemId { file_id, item_id }, - }; + let loc = + MacroCallLoc { module, source_item_id: SourceItemId { file_id, item_id } }; let id = loc.id(db); let file_id = HirFileId::from(id); //FIXME: expand recursively @@ -163,22 +157,19 @@ impl LoweredModule { ast::ModuleItemKind::FnDef(it) => { if let Some(name) = it.name() { let func = Function { id: ctx.to_def(it) }; - self.declarations - .insert(name.as_name(), PerNs::values(func.into())); + self.declarations.insert(name.as_name(), PerNs::values(func.into())); } } ast::ModuleItemKind::TraitDef(it) => { if let Some(name) = it.name() { let t = Trait { id: ctx.to_def(it) }; - self.declarations - .insert(name.as_name(), PerNs::types(t.into())); + self.declarations.insert(name.as_name(), PerNs::types(t.into())); } } ast::ModuleItemKind::TypeDef(it) => { if let Some(name) = it.name() { let t = Type { id: ctx.to_def(it) }; - self.declarations - .insert(name.as_name(), PerNs::types(t.into())); + self.declarations.insert(name.as_name(), PerNs::types(t.into())); } } ast::ModuleItemKind::ImplBlock(_) => { @@ -207,15 +198,13 @@ impl LoweredModule { ast::ModuleItemKind::ConstDef(it) => { if let Some(name) = it.name() { let c = Const { id: ctx.to_def(it) }; - self.declarations - .insert(name.as_name(), PerNs::values(c.into())); + self.declarations.insert(name.as_name(), PerNs::values(c.into())); } } ast::ModuleItemKind::StaticDef(it) => { if let Some(name) = it.name() { let s = Static { id: ctx.to_def(it) }; - self.declarations - .insert(name.as_name(), PerNs::values(s.into())); + self.declarations.insert(name.as_name(), PerNs::values(s.into())); } } ast::ModuleItemKind::Module(_) => { diff --git a/crates/ra_hir/src/nameres/tests.rs b/crates/ra_hir/src/nameres/tests.rs index 0654dbaa13..3dfad6bf2a 100644 --- a/crates/ra_hir/src/nameres/tests.rs +++ b/crates/ra_hir/src/nameres/tests.rs @@ -42,19 +42,11 @@ fn check_module_item_map(map: &ItemMap, module_id: ModuleId, expected: &str) { .collect::>(); lines.sort(); let actual = lines.join("\n"); - let expected = expected - .trim() - .lines() - .map(|it| it.trim()) - .collect::>() - .join("\n"); + let expected = expected.trim().lines().map(|it| it.trim()).collect::>().join("\n"); assert_eq_text!(&expected, &actual); fn dump_resolution(resolution: &Resolution) -> &'static str { - match ( - resolution.def.types.is_some(), - resolution.def.values.is_some(), - ) { + match (resolution.def.types.is_some(), resolution.def.values.is_some()) { (true, true) => "t v", (true, false) => "t", (false, true) => "v", @@ -314,9 +306,7 @@ fn item_map_across_crates() { let mut crate_graph = CrateGraph::default(); let main_crate = crate_graph.add_crate_root(main_id); let lib_crate = crate_graph.add_crate_root(lib_id); - crate_graph - .add_dep(main_crate, "test_crate".into(), lib_crate) - .unwrap(); + crate_graph.add_dep(main_crate, "test_crate".into(), lib_crate).unwrap(); db.set_crate_graph(Arc::new(crate_graph)); @@ -357,9 +347,7 @@ fn extern_crate_rename() { let mut crate_graph = CrateGraph::default(); let main_crate = crate_graph.add_crate_root(main_id); let lib_crate = crate_graph.add_crate_root(lib_id); - crate_graph - .add_dep(main_crate, "alloc".into(), lib_crate) - .unwrap(); + crate_graph.add_dep(main_crate, "alloc".into(), lib_crate).unwrap(); db.set_crate_graph(Arc::new(crate_graph)); @@ -406,9 +394,7 @@ fn import_across_source_roots() { let mut crate_graph = CrateGraph::default(); let main_crate = crate_graph.add_crate_root(main_id); let lib_crate = crate_graph.add_crate_root(lib_id); - crate_graph - .add_dep(main_crate, "test_crate".into(), lib_crate) - .unwrap(); + crate_graph.add_dep(main_crate, "test_crate".into(), lib_crate).unwrap(); db.set_crate_graph(Arc::new(crate_graph)); @@ -447,9 +433,7 @@ fn reexport_across_crates() { let mut crate_graph = CrateGraph::default(); let main_crate = crate_graph.add_crate_root(main_id); let lib_crate = crate_graph.add_crate_root(lib_id); - crate_graph - .add_dep(main_crate, "test_crate".into(), lib_crate) - .unwrap(); + crate_graph.add_dep(main_crate, "test_crate".into(), lib_crate).unwrap(); db.set_crate_graph(Arc::new(crate_graph)); @@ -482,11 +466,7 @@ fn check_item_map_is_not_recomputed(initial: &str, file_change: &str) { let events = db.log_executed(|| { db.item_map(krate); }); - assert!( - !format!("{:?}", events).contains("item_map"), - "{:#?}", - events - ) + assert!(!format!("{:?}", events).contains("item_map"), "{:#?}", events) } } diff --git a/crates/ra_hir/src/path.rs b/crates/ra_hir/src/path.rs index cb0a045004..6a24c8aa76 100644 --- a/crates/ra_hir/src/path.rs +++ b/crates/ra_hir/src/path.rs @@ -66,14 +66,9 @@ impl Path { match segment.kind()? { ast::PathSegmentKind::Name(name) => { - let args = segment - .type_arg_list() - .and_then(GenericArgs::from_ast) - .map(Arc::new); - let segment = PathSegment { - name: name.as_name(), - args_and_bindings: args, - }; + let args = + segment.type_arg_list().and_then(GenericArgs::from_ast).map(Arc::new); + let segment = PathSegment { name: name.as_name(), args_and_bindings: args }; segments.push(segment); } ast::PathSegmentKind::CrateKw => { @@ -153,10 +148,7 @@ impl From for Path { fn from(name: Name) -> Path { Path { kind: PathKind::Plain, - segments: vec![PathSegment { - name, - args_and_bindings: None, - }], + segments: vec![PathSegment { name, args_and_bindings: None }], } } } @@ -209,18 +201,13 @@ fn expand_use_tree<'a>( } fn convert_path(prefix: Option, path: &ast::Path) -> Option { - let prefix = if let Some(qual) = path.qualifier() { - Some(convert_path(prefix, qual)?) - } else { - prefix - }; + let prefix = + if let Some(qual) = path.qualifier() { Some(convert_path(prefix, qual)?) } else { prefix }; let segment = path.segment()?; let res = match segment.kind()? { ast::PathSegmentKind::Name(name) => { - let mut res = prefix.unwrap_or_else(|| Path { - kind: PathKind::Plain, - segments: Vec::with_capacity(1), - }); + let mut res = prefix + .unwrap_or_else(|| Path { kind: PathKind::Plain, segments: Vec::with_capacity(1) }); res.segments.push(PathSegment { name: name.as_name(), args_and_bindings: None, // no type args in use @@ -231,28 +218,19 @@ fn convert_path(prefix: Option, path: &ast::Path) -> Option { if prefix.is_some() { return None; } - Path { - kind: PathKind::Crate, - segments: Vec::new(), - } + Path { kind: PathKind::Crate, segments: Vec::new() } } ast::PathSegmentKind::SelfKw => { if prefix.is_some() { return None; } - Path { - kind: PathKind::Self_, - segments: Vec::new(), - } + Path { kind: PathKind::Self_, segments: Vec::new() } } ast::PathSegmentKind::SuperKw => { if prefix.is_some() { return None; } - Path { - kind: PathKind::Super, - segments: Vec::new(), - } + Path { kind: PathKind::Super, segments: Vec::new() } } }; Some(res) diff --git a/crates/ra_hir/src/query_definitions.rs b/crates/ra_hir/src/query_definitions.rs index b4d8da1e65..03113e7cc2 100644 --- a/crates/ra_hir/src/query_definitions.rs +++ b/crates/ra_hir/src/query_definitions.rs @@ -23,7 +23,5 @@ pub(super) fn file_item( source_item_id: SourceItemId, ) -> TreeArc { let source_file = db.hir_parse(source_item_id.file_id); - db.file_items(source_item_id.file_id)[source_item_id.item_id] - .to_node(&source_file) - .to_owned() + db.file_items(source_item_id.file_id)[source_item_id.item_id].to_node(&source_file).to_owned() } diff --git a/crates/ra_hir/src/resolve.rs b/crates/ra_hir/src/resolve.rs index 0f60d47423..3d7ec5683d 100644 --- a/crates/ra_hir/src/resolve.rs +++ b/crates/ra_hir/src/resolve.rs @@ -138,10 +138,7 @@ impl Resolver { expr_scopes: Arc, scope_id: ScopeId, ) -> Resolver { - self.push_scope(Scope::ExprScope(ExprScope { - expr_scopes, - scope_id, - })) + self.push_scope(Scope::ExprScope(ExprScope { expr_scopes, scope_id })) } } @@ -170,11 +167,8 @@ impl Scope { } } Scope::ExprScope(e) => { - let entry = e - .expr_scopes - .entries(e.scope_id) - .iter() - .find(|entry| entry.name() == name); + let entry = + e.expr_scopes.entries(e.scope_id).iter().find(|entry| entry.name() == name); match entry { Some(e) => PerNs::values(Resolution::LocalBinding(e.pat())), None => PerNs::none(), @@ -193,35 +187,24 @@ impl Scope { // def: m.module.into(), // }), // ); - m.item_map[m.module.module_id] - .entries() - .for_each(|(name, res)| { - f(name.clone(), res.def.map(Resolution::Def)); - }); + m.item_map[m.module.module_id].entries().for_each(|(name, res)| { + f(name.clone(), res.def.map(Resolution::Def)); + }); m.item_map.extern_prelude.iter().for_each(|(name, def)| { f(name.clone(), PerNs::types(Resolution::Def(*def))); }); } Scope::GenericParams(gp) => { for param in &gp.params { - f( - param.name.clone(), - PerNs::types(Resolution::GenericParam(param.idx)), - ) + f(param.name.clone(), PerNs::types(Resolution::GenericParam(param.idx))) } } Scope::ImplBlockScope(i) => { - f( - Name::self_type(), - PerNs::types(Resolution::SelfType(i.clone())), - ); + f(Name::self_type(), PerNs::types(Resolution::SelfType(i.clone()))); } Scope::ExprScope(e) => { e.expr_scopes.entries(e.scope_id).iter().for_each(|e| { - f( - e.name().clone(), - PerNs::values(Resolution::LocalBinding(e.pat())), - ); + f(e.name().clone(), PerNs::values(Resolution::LocalBinding(e.pat()))); }); } } diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs index 59f7822774..625a2ce45a 100644 --- a/crates/ra_hir/src/source_binder.rs +++ b/crates/ra_hir/src/source_binder.rs @@ -65,11 +65,7 @@ pub fn module_from_child_node( file_id: FileId, child: &SyntaxNode, ) -> Option { - if let Some(m) = child - .ancestors() - .filter_map(ast::Module::cast) - .find(|it| !it.has_semi()) - { + if let Some(m) = child.ancestors().filter_map(ast::Module::cast).find(|it| !it.has_semi()) { module_from_inline(db, file_id.into(), m) } else { module_from_file_id(db, file_id.into()) @@ -82,14 +78,13 @@ fn module_from_source( decl_id: Option, ) -> Option { let source_root_id = db.file_source_root(file_id.as_original_file()); - db.source_root_crates(source_root_id) - .iter() - .map(|&crate_id| Crate { crate_id }) - .find_map(|krate| { + db.source_root_crates(source_root_id).iter().map(|&crate_id| Crate { crate_id }).find_map( + |krate| { let module_tree = db.module_tree(krate); let module_id = module_tree.find_module_by_source(file_id, decl_id)?; Some(Module { krate, module_id }) - }) + }, + ) } pub fn function_from_position(db: &impl HirDatabase, position: FilePosition) -> Option { @@ -116,9 +111,7 @@ pub fn function_from_module( let (file_id, _) = module.definition_source(db); let file_id = file_id.into(); let ctx = LocationCtx::new(db, module, file_id); - Function { - id: ctx.to_def(fn_def), - } + Function { id: ctx.to_def(fn_def) } } pub fn function_from_child_node( @@ -138,18 +131,14 @@ pub fn struct_from_module( let (file_id, _) = module.definition_source(db); let file_id = file_id.into(); let ctx = LocationCtx::new(db, module, file_id); - Struct { - id: ctx.to_def(struct_def), - } + Struct { id: ctx.to_def(struct_def) } } pub fn enum_from_module(db: &impl HirDatabase, module: Module, enum_def: &ast::EnumDef) -> Enum { let (file_id, _) = module.definition_source(db); let file_id = file_id.into(); let ctx = LocationCtx::new(db, module, file_id); - Enum { - id: ctx.to_def(enum_def), - } + Enum { id: ctx.to_def(enum_def) } } pub fn trait_from_module( @@ -160,9 +149,7 @@ pub fn trait_from_module( let (file_id, _) = module.definition_source(db); let file_id = file_id.into(); let ctx = LocationCtx::new(db, module, file_id); - Trait { - id: ctx.to_def(trait_def), - } + Trait { id: ctx.to_def(trait_def) } } pub fn macro_symbols(db: &impl HirDatabase, file_id: FileId) -> Vec<(SmolStr, TextRange)> { diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs index 86a7f8b837..453520bbe3 100644 --- a/crates/ra_hir/src/ty.rs +++ b/crates/ra_hir/src/ty.rs @@ -305,10 +305,8 @@ impl Ty { match type_ref { TypeRef::Never => Ty::Never, TypeRef::Tuple(inner) => { - let inner_tys = inner - .iter() - .map(|tr| Ty::from_hir(db, resolver, tr)) - .collect::>(); + let inner_tys = + inner.iter().map(|tr| Ty::from_hir(db, resolver, tr)).collect::>(); Ty::Tuple(inner_tys.into()) } TypeRef::Path(path) => Ty::from_hir_path(db, resolver, path), @@ -330,17 +328,11 @@ impl Ty { } TypeRef::Placeholder => Ty::Unknown, TypeRef::Fn(params) => { - let mut inner_tys = params - .iter() - .map(|tr| Ty::from_hir(db, resolver, tr)) - .collect::>(); - let return_ty = inner_tys - .pop() - .expect("TypeRef::Fn should always have at least return type"); - let sig = FnSig { - input: inner_tys, - output: return_ty, - }; + let mut inner_tys = + params.iter().map(|tr| Ty::from_hir(db, resolver, tr)).collect::>(); + let return_ty = + inner_tys.pop().expect("TypeRef::Fn should always have at least return type"); + let sig = FnSig { input: inner_tys, output: return_ty }; Ty::FnPtr(Arc::new(sig)) } TypeRef::Error => Ty::Unknown, @@ -407,10 +399,7 @@ impl Ty { resolved: TypableDef, ) -> Substs { let mut substs = Vec::new(); - let last = path - .segments - .last() - .expect("path should have at least one segment"); + let last = path.segments.last().expect("path should have at least one segment"); let (def_generics, segment) = match resolved { TypableDef::Function(func) => (func.generic_params(db), last), TypableDef::Struct(s) => (s.generic_params(db), last), @@ -447,11 +436,8 @@ impl Ty { } // add placeholders for args that were not provided // TODO: handle defaults - let supplied_params = segment - .args_and_bindings - .as_ref() - .map(|ga| ga.args.len()) - .unwrap_or(0); + let supplied_params = + segment.args_and_bindings.as_ref().map(|ga| ga.args.len()).unwrap_or(0); for _ in supplied_params..def_generics.params.len() { substs.push(Ty::Unknown); } @@ -531,17 +517,8 @@ impl Ty { /// `Option` afterwards.) pub fn apply_substs(self, substs: Substs) -> Ty { match self { - Ty::Adt { def_id, name, .. } => Ty::Adt { - def_id, - name, - substs, - }, - Ty::FnDef { def, name, sig, .. } => Ty::FnDef { - def, - name, - sig, - substs, - }, + Ty::Adt { def_id, name, .. } => Ty::Adt { def_id, name, substs }, + Ty::FnDef { def, name, sig, .. } => Ty::FnDef { def, name, sig, substs }, _ => self, } } @@ -591,42 +568,25 @@ impl fmt::Display for Ty { if ts.len() == 1 { write!(f, "({},)", ts[0]) } else { - join(ts.iter()) - .surround_with("(", ")") - .separator(", ") - .to_fmt(f) + join(ts.iter()).surround_with("(", ")").separator(", ").to_fmt(f) } } Ty::FnPtr(sig) => { - join(sig.input.iter()) - .surround_with("fn(", ")") - .separator(", ") - .to_fmt(f)?; + join(sig.input.iter()).surround_with("fn(", ")").separator(", ").to_fmt(f)?; write!(f, " -> {}", sig.output) } - Ty::FnDef { - name, substs, sig, .. - } => { + Ty::FnDef { name, substs, sig, .. } => { write!(f, "fn {}", name)?; if substs.0.len() > 0 { - join(substs.0.iter()) - .surround_with("<", ">") - .separator(", ") - .to_fmt(f)?; + join(substs.0.iter()).surround_with("<", ">").separator(", ").to_fmt(f)?; } - join(sig.input.iter()) - .surround_with("(", ")") - .separator(", ") - .to_fmt(f)?; + join(sig.input.iter()).surround_with("(", ")").separator(", ").to_fmt(f)?; write!(f, " -> {}", sig.output) } Ty::Adt { name, substs, .. } => { write!(f, "{}", name)?; if substs.0.len() > 0 { - join(substs.0.iter()) - .surround_with("<", ">") - .separator(", ") - .to_fmt(f)?; + join(substs.0.iter()).surround_with("<", ">").separator(", ").to_fmt(f)?; } Ok(()) } @@ -646,31 +606,16 @@ fn type_for_fn(db: &impl HirDatabase, def: Function) -> Ty { let resolver = def.resolver(db); let generics = def.generic_params(db); let name = def.name(db); - let input = signature - .params() - .iter() - .map(|tr| Ty::from_hir(db, &resolver, tr)) - .collect::>(); + let input = + signature.params().iter().map(|tr| Ty::from_hir(db, &resolver, tr)).collect::>(); let output = Ty::from_hir(db, &resolver, signature.ret_type()); let sig = Arc::new(FnSig { input, output }); let substs = make_substs(&generics); - Ty::FnDef { - def, - sig, - name, - substs, - } + Ty::FnDef { def, sig, name, substs } } fn make_substs(generics: &GenericParams) -> Substs { - Substs( - generics - .params - .iter() - .map(|_p| Ty::Unknown) - .collect::>() - .into(), - ) + Substs(generics.params.iter().map(|_p| Ty::Unknown).collect::>().into()) } fn type_for_struct(db: &impl HirDatabase, s: Struct) -> Ty { @@ -935,11 +880,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { } fn unify_substs(&mut self, substs1: &Substs, substs2: &Substs) -> bool { - substs1 - .0 - .iter() - .zip(substs2.0.iter()) - .all(|(t1, t2)| self.unify(t1, t2)) + substs1.0.iter().zip(substs2.0.iter()).all(|(t1, t2)| self.unify(t1, t2)) } fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool { @@ -961,25 +902,16 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { }, (Ty::Bool, _) | (Ty::Str, _) | (Ty::Never, _) | (Ty::Char, _) => ty1 == ty2, ( - Ty::Adt { - def_id: def_id1, - substs: substs1, - .. - }, - Ty::Adt { - def_id: def_id2, - substs: substs2, - .. - }, + Ty::Adt { def_id: def_id1, substs: substs1, .. }, + Ty::Adt { def_id: def_id2, substs: substs2, .. }, ) if def_id1 == def_id2 => self.unify_substs(substs1, substs2), (Ty::Slice(t1), Ty::Slice(t2)) => self.unify(t1, t2), (Ty::RawPtr(t1, m1), Ty::RawPtr(t2, m2)) if m1 == m2 => self.unify(t1, t2), (Ty::Ref(t1, m1), Ty::Ref(t2, m2)) if m1 == m2 => self.unify(t1, t2), (Ty::FnPtr(sig1), Ty::FnPtr(sig2)) if sig1 == sig2 => true, - (Ty::Tuple(ts1), Ty::Tuple(ts2)) if ts1.len() == ts2.len() => ts1 - .iter() - .zip(ts2.iter()) - .all(|(t1, t2)| self.unify(t1, t2)), + (Ty::Tuple(ts1), Ty::Tuple(ts2)) if ts1.len() == ts2.len() => { + ts1.iter().zip(ts2.iter()).all(|(t1, t2)| self.unify(t1, t2)) + } (Ty::Infer(InferTy::TypeVar(tv1)), Ty::Infer(InferTy::TypeVar(tv2))) | (Ty::Infer(InferTy::IntVar(tv1)), Ty::Infer(InferTy::IntVar(tv2))) | (Ty::Infer(InferTy::FloatVar(tv1)), Ty::Infer(InferTy::FloatVar(tv2))) => { @@ -994,8 +926,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | (Ty::Infer(InferTy::FloatVar(tv)), other) | (other, Ty::Infer(InferTy::FloatVar(tv))) => { // the type var is unknown since we tried to resolve it - self.var_unification_table - .union_value(*tv, TypeVarValue::Known(other.clone())); + self.var_unification_table.union_value(*tv, TypeVarValue::Known(other.clone())); true } _ => false, @@ -1003,21 +934,15 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { } fn new_type_var(&mut self) -> Ty { - Ty::Infer(InferTy::TypeVar( - self.var_unification_table.new_key(TypeVarValue::Unknown), - )) + Ty::Infer(InferTy::TypeVar(self.var_unification_table.new_key(TypeVarValue::Unknown))) } fn new_integer_var(&mut self) -> Ty { - Ty::Infer(InferTy::IntVar( - self.var_unification_table.new_key(TypeVarValue::Unknown), - )) + Ty::Infer(InferTy::IntVar(self.var_unification_table.new_key(TypeVarValue::Unknown))) } fn new_float_var(&mut self) -> Ty { - Ty::Infer(InferTy::FloatVar( - self.var_unification_table.new_key(TypeVarValue::Unknown), - )) + Ty::Infer(InferTy::FloatVar(self.var_unification_table.new_key(TypeVarValue::Unknown))) } /// Replaces Ty::Unknown by a new type var, so we can maybe still infer it. @@ -1207,9 +1132,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { for subpat in subpats { let matching_field = def.and_then(|it| it.field(self.db, &subpat.name)); - let expected_ty = matching_field - .map_or(Ty::Unknown, |field| field.ty(self.db)) - .subst(&substs); + let expected_ty = + matching_field.map_or(Ty::Unknown, |field| field.ty(self.db)).subst(&substs); self.infer_pat(subpat.pat, &expected_ty); } @@ -1249,25 +1173,18 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { let subty = self.infer_pat(*pat, expectation); Ty::Ref(subty.into(), *mutability) } - Pat::TupleStruct { - path: ref p, - args: ref subpats, - } => self.infer_tuple_struct_pat(p.as_ref(), subpats, expected), - Pat::Struct { - path: ref p, - args: ref fields, - } => self.infer_struct_pat(p.as_ref(), fields, expected), + Pat::TupleStruct { path: ref p, args: ref subpats } => { + self.infer_tuple_struct_pat(p.as_ref(), subpats, expected) + } + Pat::Struct { path: ref p, args: ref fields } => { + self.infer_struct_pat(p.as_ref(), fields, expected) + } Pat::Path(path) => { // TODO use correct resolver for the surrounding expression let resolver = self.resolver.clone(); - self.infer_path_expr(&resolver, &path) - .unwrap_or(Ty::Unknown) + self.infer_path_expr(&resolver, &path).unwrap_or(Ty::Unknown) } - Pat::Bind { - mode, - name: _name, - subpat, - } => { + Pat::Bind { mode, name: _name, subpat } => { let subty = if let Some(subpat) = subpat { self.infer_pat(*subpat, expected) } else { @@ -1294,11 +1211,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { let body = Arc::clone(&self.body); // avoid borrow checker problem let ty = match &body[tgt_expr] { Expr::Missing => Ty::Unknown, - Expr::If { - condition, - then_branch, - else_branch, - } => { + Expr::If { condition, then_branch, else_branch } => { // if let is desugared to match, so this is always simple if self.infer_expr(*condition, &Expectation::has_type(Ty::Bool)); let then_ty = self.infer_expr(*then_branch, expected); @@ -1325,21 +1238,13 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { self.infer_expr(*body, &Expectation::has_type(Ty::unit())); Ty::unit() } - Expr::For { - iterable, - body, - pat, - } => { + Expr::For { iterable, body, pat } => { let _iterable_ty = self.infer_expr(*iterable, &Expectation::none()); self.infer_pat(*pat, &Ty::Unknown); self.infer_expr(*body, &Expectation::has_type(Ty::unit())); Ty::unit() } - Expr::Lambda { - body, - args, - arg_types, - } => { + Expr::Lambda { body, args, arg_types } => { assert_eq!(args.len(), arg_types.len()); for (arg_pat, arg_type) in args.iter().zip(arg_types.iter()) { @@ -1362,11 +1267,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { Ty::FnPtr(sig) => (sig.input.clone(), sig.output.clone()), Ty::FnDef { substs, sig, .. } => { let ret_ty = sig.output.clone().subst(&substs); - let param_tys = sig - .input - .iter() - .map(|ty| ty.clone().subst(&substs)) - .collect(); + let param_tys = + sig.input.iter().map(|ty| ty.clone().subst(&substs)).collect(); (param_tys, ret_ty) } _ => { @@ -1381,11 +1283,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { } ret_ty } - Expr::MethodCall { - receiver, - args, - method_name, - } => { + Expr::MethodCall { receiver, args, method_name } => { let receiver_ty = self.infer_expr(*receiver, &Expectation::none()); let resolved = receiver_ty.clone().lookup_method(self.db, method_name); let method_ty = match resolved { @@ -1399,11 +1297,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { let (expected_receiver_ty, param_tys, ret_ty) = match &method_ty { Ty::FnPtr(sig) => { if !sig.input.is_empty() { - ( - sig.input[0].clone(), - sig.input[1..].to_vec(), - sig.output.clone(), - ) + (sig.input[0].clone(), sig.input[1..].to_vec(), sig.output.clone()) } else { (Ty::Unknown, Vec::new(), sig.output.clone()) } @@ -1469,11 +1363,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { } Ty::Never } - Expr::StructLit { - path, - fields, - spread, - } => { + Expr::StructLit { path, fields, spread } => { let (ty, def_id) = self.resolve_variant(path.as_ref()); let substs = ty.substs().unwrap_or_else(Substs::empty); for field in fields { @@ -1497,14 +1387,12 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { let i = name.to_string().parse::().ok(); i.and_then(|i| fields.get(i).cloned()) } - Ty::Adt { - def_id: AdtDef::Struct(s), - ref substs, - .. - } => s.field(self.db, name).map(|field| { - self.write_field_resolution(tgt_expr, field); - field.ty(self.db).subst(substs) - }), + Ty::Adt { def_id: AdtDef::Struct(s), ref substs, .. } => { + s.field(self.db, name).map(|field| { + self.write_field_resolution(tgt_expr, field); + field.ty(self.db).subst(substs) + }) + } _ => None, }) .unwrap_or(Ty::Unknown); @@ -1635,15 +1523,9 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { ) -> Ty { for stmt in statements { match stmt { - Statement::Let { - pat, - type_ref, - initializer, - } => { - let decl_ty = type_ref - .as_ref() - .map(|tr| self.make_ty(tr)) - .unwrap_or(Ty::Unknown); + Statement::Let { pat, type_ref, initializer } => { + let decl_ty = + type_ref.as_ref().map(|tr| self.make_ty(tr)).unwrap_or(Ty::Unknown); let decl_ty = self.insert_type_vars(decl_ty); let ty = if let Some(expr) = initializer { let expr_ty = self.infer_expr(*expr, &Expectation::has_type(decl_ty)); @@ -1659,11 +1541,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { } } } - let ty = if let Some(expr) = tail { - self.infer_expr(expr, expected) - } else { - Ty::unit() - }; + let ty = if let Some(expr) = tail { self.infer_expr(expr, expected) } else { Ty::unit() }; ty } @@ -1678,10 +1556,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { } fn infer_body(&mut self) { - self.infer_expr( - self.body.body_expr(), - &Expectation::has_type(self.return_ty.clone()), - ); + self.infer_expr(self.body.body_expr(), &Expectation::has_type(self.return_ty.clone())); } } diff --git a/crates/ra_hir/src/ty/method_resolution.rs b/crates/ra_hir/src/ty/method_resolution.rs index a7d4517eec..da7587f015 100644 --- a/crates/ra_hir/src/ty/method_resolution.rs +++ b/crates/ra_hir/src/ty/method_resolution.rs @@ -46,18 +46,13 @@ impl CrateImplBlocks { ty: &Ty, ) -> impl Iterator + 'a { let fingerprint = TyFingerprint::for_impl(ty); - fingerprint - .and_then(|f| self.impls.get(&f)) - .into_iter() - .flat_map(|i| i.iter()) - .map(move |(module_id, impl_id)| { - let module = Module { - krate: self.krate, - module_id: *module_id, - }; + fingerprint.and_then(|f| self.impls.get(&f)).into_iter().flat_map(|i| i.iter()).map( + move |(module_id, impl_id)| { + let module = Module { krate: self.krate, module_id: *module_id }; let module_impl_blocks = db.impls_in_module(module); (module, ImplBlock::from_id(module_impl_blocks, *impl_id)) - }) + }, + ) } pub fn lookup_impl_blocks_for_trait<'a>( @@ -66,18 +61,13 @@ impl CrateImplBlocks { tr: &Trait, ) -> impl Iterator + 'a { let id = tr.id; - self.impls_by_trait - .get(&id) - .into_iter() - .flat_map(|i| i.iter()) - .map(move |(module_id, impl_id)| { - let module = Module { - krate: self.krate, - module_id: *module_id, - }; + self.impls_by_trait.get(&id).into_iter().flat_map(|i| i.iter()).map( + move |(module_id, impl_id)| { + let module = Module { krate: self.krate, module_id: *module_id }; let module_impl_blocks = db.impls_in_module(module); (module, ImplBlock::from_id(module_impl_blocks, *impl_id)) - }) + }, + ) } fn collect_recursive(&mut self, db: &impl HirDatabase, module: &Module) { diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs index 30da8fc230..2621d1b552 100644 --- a/crates/ra_hir/src/ty/tests.rs +++ b/crates/ra_hir/src/ty/tests.rs @@ -634,11 +634,7 @@ fn infer(content: &str) -> String { let (db, _, file_id) = MockDatabase::with_single_file(content); let source_file = db.parse(file_id); let mut acc = String::new(); - for fn_def in source_file - .syntax() - .descendants() - .filter_map(ast::FnDef::cast) - { + for fn_def in source_file.syntax().descendants().filter_map(ast::FnDef::cast) { let func = source_binder::function_from_source(&db, file_id, fn_def).unwrap(); let inference_result = func.infer(&db); let body_syntax_mapping = func.body_syntax_mapping(&db); @@ -725,8 +721,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { " .to_string(); - db.query_mut(ra_db::FileTextQuery) - .set(pos.file_id, Arc::new(new_text)); + db.query_mut(ra_db::FileTextQuery).set(pos.file_id, Arc::new(new_text)); { let events = db.log_executed(|| { diff --git a/crates/ra_hir/src/type_ref.rs b/crates/ra_hir/src/type_ref.rs index c9db4e0a58..ee8b7376ab 100644 --- a/crates/ra_hir/src/type_ref.rs +++ b/crates/ra_hir/src/type_ref.rs @@ -62,11 +62,9 @@ impl TypeRef { ParenType(inner) => TypeRef::from_ast_opt(inner.type_ref()), TupleType(inner) => TypeRef::Tuple(inner.fields().map(TypeRef::from_ast).collect()), NeverType(..) => TypeRef::Never, - PathType(inner) => inner - .path() - .and_then(Path::from_ast) - .map(TypeRef::Path) - .unwrap_or(TypeRef::Error), + PathType(inner) => { + inner.path().and_then(Path::from_ast).map(TypeRef::Path).unwrap_or(TypeRef::Error) + } PointerType(inner) => { let inner_ty = TypeRef::from_ast_opt(inner.type_ref()); let mutability = Mutability::from_mutable(inner.is_mut()); @@ -83,10 +81,7 @@ impl TypeRef { FnPointerType(inner) => { let ret_ty = TypeRef::from_ast_opt(inner.ret_type().and_then(|rt| rt.type_ref())); let mut params = if let Some(pl) = inner.param_list() { - pl.params() - .map(|p| p.type_ref()) - .map(TypeRef::from_ast_opt) - .collect() + pl.params().map(|p| p.type_ref()).map(TypeRef::from_ast_opt).collect() } else { Vec::new() }; diff --git a/crates/ra_ide_api/src/assists.rs b/crates/ra_ide_api/src/assists.rs index 2a96fdf471..7a9c666817 100644 --- a/crates/ra_ide_api/src/assists.rs +++ b/crates/ra_ide_api/src/assists.rs @@ -7,10 +7,7 @@ pub(crate) fn assists(db: &RootDatabase, frange: FileRange) -> Vec .into_iter() .map(|(label, action)| { let file_id = frange.file_id; - let file_edit = SourceFileEdit { - file_id, - edit: action.edit, - }; + let file_edit = SourceFileEdit { file_id, edit: action.edit }; SourceChange { label: label.label, source_file_edits: vec![file_edit], diff --git a/crates/ra_ide_api/src/call_info.rs b/crates/ra_ide_api/src/call_info.rs index a59ab78535..1b279615c5 100644 --- a/crates/ra_ide_api/src/call_info.rs +++ b/crates/ra_ide_api/src/call_info.rs @@ -21,9 +21,7 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option Option FnCallNode<'a> { _ => return None, }), - FnCallNode::MethodCallExpr(call_expr) => call_expr - .syntax() - .children() - .filter_map(ast::NameRef::cast) - .nth(0), + FnCallNode::MethodCallExpr(call_expr) => { + call_expr.syntax().children().filter_map(ast::NameRef::cast).nth(0) + } } } @@ -117,12 +108,7 @@ impl CallInfo { let label = crate::completion::function_label(node)?; let doc = function.docs(db); - Some(CallInfo { - parameters: param_list(node), - label, - doc, - active_parameter: None, - }) + Some(CallInfo { parameters: param_list(node), label, doc, active_parameter: None }) } } @@ -136,10 +122,7 @@ fn param_list(node: &ast::FnDef) -> Vec { // Maybe use param.pat here? See if we can just extract the name? //res.extend(param_list.params().map(|p| p.syntax().text().to_string())); res.extend( - param_list - .params() - .filter_map(|p| p.pat()) - .map(|pat| pat.syntax().text().to_string()), + param_list.params().filter_map(|p| p.pat()).map(|pat| pat.syntax().text().to_string()), ); } res @@ -378,10 +361,7 @@ pub fn foo() { "#, ); - assert_eq!( - info.parameters, - vec!["&mut self".to_string(), "ctx".to_string()] - ); + assert_eq!(info.parameters, vec!["&mut self".to_string(), "ctx".to_string()]); assert_eq!(info.active_parameter, Some(1)); assert_eq!( info.doc.map(|it| it.into()), diff --git a/crates/ra_ide_api/src/change.rs b/crates/ra_ide_api/src/change.rs index 9929557404..3f041f9c3a 100644 --- a/crates/ra_ide_api/src/change.rs +++ b/crates/ra_ide_api/src/change.rs @@ -65,16 +65,8 @@ impl AnalysisChange { path: RelativePathBuf, text: Arc, ) { - let file = AddFile { - file_id, - path, - text, - }; - self.roots_changed - .entry(root_id) - .or_default() - .added - .push(file); + let file = AddFile { file_id, path, text }; + self.roots_changed.entry(root_id).or_default().added.push(file); } pub fn change_file(&mut self, file_id: FileId, new_text: Arc) { @@ -83,11 +75,7 @@ impl AnalysisChange { pub fn remove_file(&mut self, root_id: SourceRootId, file_id: FileId, path: RelativePathBuf) { let file = RemoveFile { file_id, path }; - self.roots_changed - .entry(root_id) - .or_default() - .removed - .push(file); + self.roots_changed.entry(root_id).or_default().removed.push(file); } pub fn add_library(&mut self, data: LibraryData) { @@ -155,17 +143,9 @@ impl LibraryData { let mut root_change = RootChange::default(); root_change.added = files .into_iter() - .map(|(file_id, path, text)| AddFile { - file_id, - path, - text, - }) + .map(|(file_id, path, text)| AddFile { file_id, path, text }) .collect(); - LibraryData { - root_id, - root_change, - symbol_index, - } + LibraryData { root_id, root_change, symbol_index } } } @@ -226,10 +206,7 @@ impl RootDatabase { self.last_gc_check = time::Instant::now(); let retained_trees = syntax_tree_stats(self).retained; if retained_trees > 100 { - log::info!( - "automatic garbadge collection, {} retained trees", - retained_trees - ); + log::info!("automatic garbadge collection, {} retained trees", retained_trees); self.collect_garbage(); } } @@ -238,9 +215,7 @@ impl RootDatabase { pub(crate) fn collect_garbage(&mut self) { self.last_gc = time::Instant::now(); - let sweep = SweepStrategy::default() - .discard_values() - .sweep_all_revisions(); + let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); self.query(ra_db::ParseQuery).sweep(sweep); diff --git a/crates/ra_ide_api/src/completion/complete_dot.rs b/crates/ra_ide_api/src/completion/complete_dot.rs index bad51cc51f..a673dbdcf6 100644 --- a/crates/ra_ide_api/src/completion/complete_dot.rs +++ b/crates/ra_ide_api/src/completion/complete_dot.rs @@ -25,9 +25,7 @@ pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) { fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty) { for receiver in receiver.autoderef(ctx.db) { match receiver { - Ty::Adt { - def_id, ref substs, .. - } => { + Ty::Adt { def_id, ref substs, .. } => { match def_id { AdtDef::Struct(s) => { for field in s.fields(ctx.db) { diff --git a/crates/ra_ide_api/src/completion/complete_fn_param.rs b/crates/ra_ide_api/src/completion/complete_fn_param.rs index 8d4df4ea15..43532226fe 100644 --- a/crates/ra_ide_api/src/completion/complete_fn_param.rs +++ b/crates/ra_ide_api/src/completion/complete_fn_param.rs @@ -43,13 +43,12 @@ pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext) node: &'a N, params: &mut FxHashMap, ) { - node.functions() - .filter_map(|it| it.param_list()) - .flat_map(|it| it.params()) - .for_each(|param| { + node.functions().filter_map(|it| it.param_list()).flat_map(|it| it.params()).for_each( + |param| { let text = param.syntax().text().to_string(); params.entry(text).or_insert((0, param)).0 += 1; - }) + }, + ) } } diff --git a/crates/ra_ide_api/src/completion/complete_path.rs b/crates/ra_ide_api/src/completion/complete_path.rs index 0b9948d4b4..c47a14e9f7 100644 --- a/crates/ra_ide_api/src/completion/complete_path.rs +++ b/crates/ra_ide_api/src/completion/complete_path.rs @@ -31,14 +31,10 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) { hir::ModuleDef::Enum(e) => { e.variants(ctx.db).into_iter().for_each(|variant| { if let Some(name) = variant.name(ctx.db) { - let detail_types = variant - .fields(ctx.db) - .into_iter() - .map(|field| field.ty(ctx.db)); - let detail = join(detail_types) - .separator(", ") - .surround_with("(", ")") - .to_string(); + let detail_types = + variant.fields(ctx.db).into_iter().map(|field| field.ty(ctx.db)); + let detail = + join(detail_types).separator(", ").surround_with("(", ")").to_string(); CompletionItem::new( CompletionKind::Reference, diff --git a/crates/ra_ide_api/src/completion/complete_postfix.rs b/crates/ra_ide_api/src/completion/complete_postfix.rs index e3a739d6da..10a3c8db70 100644 --- a/crates/ra_ide_api/src/completion/complete_postfix.rs +++ b/crates/ra_ide_api/src/completion/complete_postfix.rs @@ -17,11 +17,7 @@ use ra_text_edit::TextEditBuilder; fn postfix_snippet(ctx: &CompletionContext, label: &str, snippet: &str) -> Builder { let replace_range = ctx.source_range(); - let receiver_range = ctx - .dot_receiver - .expect("no receiver available") - .syntax() - .range(); + let receiver_range = ctx.dot_receiver.expect("no receiver available").syntax().range(); let delete_range = TextRange::from_to(receiver_range.start(), replace_range.start()); let mut builder = TextEditBuilder::default(); builder.delete(delete_range); diff --git a/crates/ra_ide_api/src/completion/complete_scope.rs b/crates/ra_ide_api/src/completion/complete_scope.rs index 8674b1e66c..4457884072 100644 --- a/crates/ra_ide_api/src/completion/complete_scope.rs +++ b/crates/ra_ide_api/src/completion/complete_scope.rs @@ -7,13 +7,9 @@ pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) { let names = ctx.resolver.all_names(); names.into_iter().for_each(|(name, res)| { - CompletionItem::new( - CompletionKind::Reference, - ctx.source_range(), - name.to_string(), - ) - .from_resolution(ctx, &res) - .add_to(acc) + CompletionItem::new(CompletionKind::Reference, ctx.source_range(), name.to_string()) + .from_resolution(ctx, &res) + .add_to(acc) }); } diff --git a/crates/ra_ide_api/src/completion/completion_context.rs b/crates/ra_ide_api/src/completion/completion_context.rs index 8abab02215..82bd4d6061 100644 --- a/crates/ra_ide_api/src/completion/completion_context.rs +++ b/crates/ra_ide_api/src/completion/completion_context.rs @@ -108,12 +108,8 @@ impl<'a> CompletionContext<'a> { } fn classify_name_ref(&mut self, original_file: &'a SourceFile, name_ref: &ast::NameRef) { let name_range = name_ref.syntax().range(); - let top_node = name_ref - .syntax() - .ancestors() - .take_while(|it| it.range() == name_range) - .last() - .unwrap(); + let top_node = + name_ref.syntax().ancestors().take_while(|it| it.range() == name_range).last().unwrap(); match top_node.parent().map(|it| it.kind()) { Some(SOURCE_FILE) | Some(ITEM_LIST) => { diff --git a/crates/ra_ide_api/src/completion/completion_item.rs b/crates/ra_ide_api/src/completion/completion_item.rs index 92e6e78bf9..7b8972af0e 100644 --- a/crates/ra_ide_api/src/completion/completion_item.rs +++ b/crates/ra_ide_api/src/completion/completion_item.rs @@ -105,10 +105,7 @@ impl CompletionItem { } /// What string is used for filtering. pub fn lookup(&self) -> &str { - self.lookup - .as_ref() - .map(|it| it.as_str()) - .unwrap_or_else(|| self.label()) + self.lookup.as_ref().map(|it| it.as_str()).unwrap_or_else(|| self.label()) } pub fn insert_text_format(&self) -> InsertTextFormat { @@ -214,10 +211,7 @@ impl Builder { ) -> Builder { use hir::ModuleDef::*; - let def = resolution - .as_ref() - .take_types() - .or_else(|| resolution.as_ref().take_values()); + let def = resolution.as_ref().take_types().or_else(|| resolution.as_ref().take_values()); let def = match def { None => return self, Some(it) => it, @@ -323,10 +317,8 @@ pub(crate) fn check_completion(test_name: &str, code: &str, kind: CompletionKind }; let completions = completions(&analysis.db, position).unwrap(); let completion_items: Vec = completions.into(); - let mut kind_completions: Vec = completion_items - .into_iter() - .filter(|c| c.completion_kind == kind) - .collect(); + let mut kind_completions: Vec = + completion_items.into_iter().filter(|c| c.completion_kind == kind).collect(); kind_completions.sort_by_key(|c| c.label.clone()); assert_debug_snapshot_matches!(test_name, kind_completions); } diff --git a/crates/ra_ide_api/src/diagnostics.rs b/crates/ra_ide_api/src/diagnostics.rs index a499ac7c60..53d95fb4c6 100644 --- a/crates/ra_ide_api/src/diagnostics.rs +++ b/crates/ra_ide_api/src/diagnostics.rs @@ -21,10 +21,8 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec let source_root = db.file_source_root(file_id); let diag = match problem { Problem::UnresolvedModule { candidate } => { - let create_file = FileSystemEdit::CreateFile { - source_root, - path: candidate.clone(), - }; + let create_file = + FileSystemEdit::CreateFile { source_root, path: candidate.clone() }; let fix = SourceChange { label: "create module".to_string(), source_file_edits: Vec::new(), @@ -44,10 +42,8 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec dst_source_root: source_root, dst_path: move_to.clone(), }; - let create_file = FileSystemEdit::CreateFile { - source_root, - path: move_to.join(candidate), - }; + let create_file = + FileSystemEdit::CreateFile { source_root, path: move_to.join(candidate) }; let fix = SourceChange { label: "move file and create module".to_string(), source_file_edits: Vec::new(), diff --git a/crates/ra_ide_api/src/extend_selection.rs b/crates/ra_ide_api/src/extend_selection.rs index f61feaf1b3..4051728e16 100644 --- a/crates/ra_ide_api/src/extend_selection.rs +++ b/crates/ra_ide_api/src/extend_selection.rs @@ -31,9 +31,7 @@ fn extend_selection_in_macro( } fn find_macro_call(node: &SyntaxNode, range: TextRange) -> Option<&ast::MacroCall> { - find_covering_node(node, range) - .ancestors() - .find_map(ast::MacroCall::cast) + find_covering_node(node, range).ancestors().find_map(ast::MacroCall::cast) } #[cfg(test)] diff --git a/crates/ra_ide_api/src/goto_definition.rs b/crates/ra_ide_api/src/goto_definition.rs index 69f2d2bf62..413720960a 100644 --- a/crates/ra_ide_api/src/goto_definition.rs +++ b/crates/ra_ide_api/src/goto_definition.rs @@ -50,18 +50,13 @@ pub(crate) fn reference_definition( hir::source_binder::function_from_child_node(db, file_id, name_ref.syntax()) { // Check if it is a method - if let Some(method_call) = name_ref - .syntax() - .parent() - .and_then(ast::MethodCallExpr::cast) - { + if let Some(method_call) = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast) { tested_by!(goto_definition_works_for_methods); let infer_result = function.infer(db); let syntax_mapping = function.body_syntax_mapping(db); let expr = ast::Expr::cast(method_call.syntax()).unwrap(); - if let Some(func) = syntax_mapping - .node_expr(expr) - .and_then(|it| infer_result.method_resolution(it)) + if let Some(func) = + syntax_mapping.node_expr(expr).and_then(|it| infer_result.method_resolution(it)) { return Exact(NavigationTarget::from_function(db, func)); }; @@ -72,9 +67,8 @@ pub(crate) fn reference_definition( let infer_result = function.infer(db); let syntax_mapping = function.body_syntax_mapping(db); let expr = ast::Expr::cast(field_expr.syntax()).unwrap(); - if let Some(field) = syntax_mapping - .node_expr(expr) - .and_then(|it| infer_result.field_resolution(it)) + if let Some(field) = + syntax_mapping.node_expr(expr).and_then(|it| infer_result.field_resolution(it)) { return Exact(NavigationTarget::from_field(db, field)); }; @@ -82,29 +76,19 @@ pub(crate) fn reference_definition( } // Try name resolution let resolver = hir::source_binder::resolver_for_node(db, file_id, name_ref.syntax()); - if let Some(path) = name_ref - .syntax() - .ancestors() - .find_map(ast::Path::cast) - .and_then(hir::Path::from_ast) + if let Some(path) = + name_ref.syntax().ancestors().find_map(ast::Path::cast).and_then(hir::Path::from_ast) { let resolved = resolver.resolve_path(db, &path); - match resolved - .clone() - .take_types() - .or_else(|| resolved.take_values()) - { + match resolved.clone().take_types().or_else(|| resolved.take_values()) { Some(Resolution::Def(def)) => return Exact(NavigationTarget::from_def(db, def)), Some(Resolution::LocalBinding(pat)) => { let body = resolver.body().expect("no body for local binding"); let syntax_mapping = body.syntax_mapping(db); - let ptr = syntax_mapping - .pat_syntax(pat) - .expect("pattern not found in syntax mapping"); - let name = path - .as_ident() - .cloned() - .expect("local binding from a multi-segment path"); + let ptr = + syntax_mapping.pat_syntax(pat).expect("pattern not found in syntax mapping"); + let name = + path.as_ident().cloned().expect("local binding from a multi-segment path"); let nav = NavigationTarget::from_scope_entry(file_id, name, ptr); return Exact(nav); } diff --git a/crates/ra_ide_api/src/hover.rs b/crates/ra_ide_api/src/hover.rs index f993a461c3..60b81567c7 100644 --- a/crates/ra_ide_api/src/hover.rs +++ b/crates/ra_ide_api/src/hover.rs @@ -33,13 +33,9 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option Analysis { - Analysis { - db: self.db.snapshot(), - } + Analysis { db: self.db.snapshot() } } /// Applies changes to the current state of the world. If there are @@ -401,17 +392,12 @@ impl Analysis { impl SourceChange { pub(crate) fn from_local_edit(file_id: FileId, edit: LocalEdit) -> SourceChange { - let file_edit = SourceFileEdit { - file_id, - edit: edit.edit, - }; + let file_edit = SourceFileEdit { file_id, edit: edit.edit }; SourceChange { label: edit.label, source_file_edits: vec![file_edit], file_system_edits: vec![], - cursor_position: edit - .cursor_position - .map(|offset| FilePosition { offset, file_id }), + cursor_position: edit.cursor_position.map(|offset| FilePosition { offset, file_id }), } } } diff --git a/crates/ra_ide_api/src/mock_analysis.rs b/crates/ra_ide_api/src/mock_analysis.rs index 834b305415..8d86030625 100644 --- a/crates/ra_ide_api/src/mock_analysis.rs +++ b/crates/ra_ide_api/src/mock_analysis.rs @@ -41,10 +41,7 @@ impl MockAnalysis { let mut res = MockAnalysis::new(); for entry in parse_fixture(fixture) { if entry.text.contains(CURSOR_MARKER) { - assert!( - position.is_none(), - "only one marker (<|>) per fixture is allowed" - ); + assert!(position.is_none(), "only one marker (<|>) per fixture is allowed"); position = Some(res.add_file_with_position(&entry.meta, &entry.text)); } else { res.add_file(&entry.meta, &entry.text); @@ -97,9 +94,7 @@ impl MockAnalysis { let other_crate = crate_graph.add_crate_root(file_id); let crate_name = path.parent().unwrap().file_name().unwrap(); if let Some(root_crate) = root_crate { - crate_graph - .add_dep(root_crate, crate_name.into(), other_crate) - .unwrap(); + crate_graph.add_dep(root_crate, crate_name.into(), other_crate).unwrap(); } } change.add_file(source_root, file_id, path, Arc::new(contents)); diff --git a/crates/ra_ide_api/src/navigation_target.rs b/crates/ra_ide_api/src/navigation_target.rs index 617908aedf..a2e4b65068 100644 --- a/crates/ra_ide_api/src/navigation_target.rs +++ b/crates/ra_ide_api/src/navigation_target.rs @@ -72,10 +72,7 @@ impl NavigationTarget { pub(crate) fn from_module(db: &RootDatabase, module: hir::Module) -> NavigationTarget { let (file_id, source) = module.definition_source(db); - let name = module - .name(db) - .map(|it| it.to_string().into()) - .unwrap_or_default(); + let name = module.name(db).map(|it| it.to_string().into()).unwrap_or_default(); match source { ModuleSource::SourceFile(node) => { NavigationTarget::from_syntax(file_id, name, None, node.syntax()) @@ -87,10 +84,7 @@ impl NavigationTarget { } pub(crate) fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> NavigationTarget { - let name = module - .name(db) - .map(|it| it.to_string().into()) - .unwrap_or_default(); + let name = module.name(db).map(|it| it.to_string().into()).unwrap_or_default(); if let Some((file_id, source)) = module.declaration_source(db) { return NavigationTarget::from_syntax(file_id, name, None, source.syntax()); } diff --git a/crates/ra_ide_api/src/references.rs b/crates/ra_ide_api/src/references.rs index b129f31349..2cb1cc9be0 100644 --- a/crates/ra_ide_api/src/references.rs +++ b/crates/ra_ide_api/src/references.rs @@ -305,9 +305,7 @@ mod tests { } } } - let result = text_edit_bulder - .finish() - .apply(&*analysis.file_text(file_id.unwrap())); + let result = text_edit_bulder.finish().apply(&*analysis.file_text(file_id.unwrap())); assert_eq_text!(expected, &*result); } } diff --git a/crates/ra_ide_api/src/runnables.rs b/crates/ra_ide_api/src/runnables.rs index dc8c40ea64..d64b5a4e06 100644 --- a/crates/ra_ide_api/src/runnables.rs +++ b/crates/ra_ide_api/src/runnables.rs @@ -23,11 +23,7 @@ pub enum RunnableKind { pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec { let source_file = db.parse(file_id); - source_file - .syntax() - .descendants() - .filter_map(|i| runnable(db, file_id, i)) - .collect() + source_file.syntax().descendants().filter_map(|i| runnable(db, file_id, i)).collect() } fn runnable(db: &RootDatabase, file_id: FileId, item: &SyntaxNode) -> Option { @@ -45,20 +41,13 @@ fn runnable_fn(fn_def: &ast::FnDef) -> Option { let kind = if name == "main" { RunnableKind::Bin } else if fn_def.has_atom_attr("test") { - RunnableKind::Test { - name: name.to_string(), - } + RunnableKind::Test { name: name.to_string() } } else if fn_def.has_atom_attr("bench") { - RunnableKind::Bench { - name: name.to_string(), - } + RunnableKind::Bench { name: name.to_string() } } else { return None; }; - Some(Runnable { - range: fn_def.syntax().range(), - kind, - }) + Some(Runnable { range: fn_def.syntax().range(), kind }) } fn runnable_mod(db: &RootDatabase, file_id: FileId, module: &ast::Module) -> Option { @@ -77,16 +66,8 @@ fn runnable_mod(db: &RootDatabase, file_id: FileId, module: &ast::Module) -> Opt let module = hir::source_binder::module_from_child_node(db, file_id, module.syntax())?; // FIXME: thread cancellation instead of `.ok`ing - let path = module - .path_to_root(db) - .into_iter() - .rev() - .filter_map(|it| it.name(db)) - .join("::"); - Some(Runnable { - range, - kind: RunnableKind::TestMod { path }, - }) + let path = module.path_to_root(db).into_iter().rev().filter_map(|it| it.name(db)).join("::"); + Some(Runnable { range, kind: RunnableKind::TestMod { path } }) } #[cfg(test)] diff --git a/crates/ra_ide_api/src/status.rs b/crates/ra_ide_api/src/status.rs index bd355dd781..e0fc1c123c 100644 --- a/crates/ra_ide_api/src/status.rs +++ b/crates/ra_ide_api/src/status.rs @@ -22,9 +22,7 @@ pub(crate) fn syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { pub(crate) fn status(db: &RootDatabase) -> String { let files_stats = db.query(FileTextQuery).entries::(); let syntax_tree_stats = syntax_tree_stats(db); - let symbols_stats = db - .query(LibrarySymbolsQuery) - .entries::(); + let symbols_stats = db.query(LibrarySymbolsQuery).entries::(); let n_defs = { let interner: &hir::HirInterner = db.as_ref(); interner.len() @@ -75,11 +73,7 @@ pub(crate) struct SyntaxTreeStats { impl fmt::Display for SyntaxTreeStats { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - write!( - fmt, - "{} trees, {} ({}) retained", - self.total, self.retained, self.retained_size, - ) + write!(fmt, "{} trees, {} ({}) retained", self.total, self.retained, self.retained_size,) } } @@ -144,20 +138,13 @@ impl MemoryStats { #[cfg(not(feature = "jemalloc"))] fn current() -> MemoryStats { - MemoryStats { - allocated: Bytes(0), - resident: Bytes(0), - } + MemoryStats { allocated: Bytes(0), resident: Bytes(0) } } } impl fmt::Display for MemoryStats { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - write!( - fmt, - "{} allocated {} resident", - self.allocated, self.resident, - ) + write!(fmt, "{} allocated {} resident", self.allocated, self.resident,) } } diff --git a/crates/ra_ide_api/src/symbol_index.rs b/crates/ra_ide_api/src/symbol_index.rs index 3d0b2369ed..de0f461340 100644 --- a/crates/ra_ide_api/src/symbol_index.rs +++ b/crates/ra_ide_api/src/symbol_index.rs @@ -101,10 +101,7 @@ pub(crate) fn world_symbols(db: &RootDatabase, query: Query) -> Vec } let snap = Snap(db.snapshot()); - files - .par_iter() - .map_with(snap, |db, &file_id| db.0.file_symbols(file_id)) - .collect() + files.par_iter().map_with(snap, |db, &file_id| db.0.file_symbols(file_id)).collect() }; query.search(&buf) } diff --git a/crates/ra_ide_api/src/syntax_highlighting.rs b/crates/ra_ide_api/src/syntax_highlighting.rs index 6c4391e1e8..a435fe56e6 100644 --- a/crates/ra_ide_api/src/syntax_highlighting.rs +++ b/crates/ra_ide_api/src/syntax_highlighting.rs @@ -9,20 +9,12 @@ use crate::{ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec { let source_file = db.parse(file_id); let mut res = ra_ide_api_light::highlight(source_file.syntax()); - for macro_call in source_file - .syntax() - .descendants() - .filter_map(ast::MacroCall::cast) - { + for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) { if let Some((off, exp)) = hir::MacroDef::ast_expand(macro_call) { - let mapped_ranges = ra_ide_api_light::highlight(&exp.syntax()) - .into_iter() - .filter_map(|r| { + let mapped_ranges = + ra_ide_api_light::highlight(&exp.syntax()).into_iter().filter_map(|r| { let mapped_range = exp.map_range_back(r.range)?; - let res = HighlightedRange { - range: mapped_range + off, - tag: r.tag, - }; + let res = HighlightedRange { range: mapped_range + off, tag: r.tag }; Some(res) }); res.extend(mapped_ranges); diff --git a/crates/ra_ide_api_light/src/diagnostics.rs b/crates/ra_ide_api_light/src/diagnostics.rs index 2b695dfdfc..7c383ca2af 100644 --- a/crates/ra_ide_api_light/src/diagnostics.rs +++ b/crates/ra_ide_api_light/src/diagnostics.rs @@ -72,14 +72,7 @@ fn text_edit_for_remove_unnecessary_braces_with_self_in_use_statement( single_use_tree: &ast::UseTree, ) -> Option { let use_tree_list_node = single_use_tree.syntax().parent()?; - if single_use_tree - .path()? - .segment()? - .syntax() - .first_child()? - .kind() - == SyntaxKind::SELF_KW - { + if single_use_tree.path()?.segment()?.syntax().first_child()?.kind() == SyntaxKind::SELF_KW { let start = use_tree_list_node.prev_sibling()?.range().start(); let end = use_tree_list_node.range().end(); let range = TextRange::from_to(start, end); @@ -145,9 +138,8 @@ mod tests { for node in file.syntax().descendants() { func(&mut diagnostics, node); } - let diagnostic = diagnostics - .pop() - .unwrap_or_else(|| panic!("no diagnostics for:\n{}\n", before)); + let diagnostic = + diagnostics.pop().unwrap_or_else(|| panic!("no diagnostics for:\n{}\n", before)); let fix = diagnostic.fix.unwrap(); let actual = fix.edit.apply(&before); assert_eq_text!(after, &actual); @@ -162,21 +154,9 @@ mod tests { ", check_unnecessary_braces_in_use_statement, ); - check_apply( - "use {b};", - "use b;", - check_unnecessary_braces_in_use_statement, - ); - check_apply( - "use a::{c};", - "use a::c;", - check_unnecessary_braces_in_use_statement, - ); - check_apply( - "use a::{self};", - "use a;", - check_unnecessary_braces_in_use_statement, - ); + check_apply("use {b};", "use b;", check_unnecessary_braces_in_use_statement); + check_apply("use a::{c};", "use a::c;", check_unnecessary_braces_in_use_statement); + check_apply("use a::{self};", "use a;", check_unnecessary_braces_in_use_statement); check_apply( "use a::{c, d::{e}};", "use a::{c, d::e};", diff --git a/crates/ra_ide_api_light/src/extend_selection.rs b/crates/ra_ide_api_light/src/extend_selection.rs index f396dfe3fd..28d62f2905 100644 --- a/crates/ra_ide_api_light/src/extend_selection.rs +++ b/crates/ra_ide_api_light/src/extend_selection.rs @@ -43,11 +43,7 @@ pub fn extend_selection(root: &SyntaxNode, range: TextRange) -> Option Option { } if let Some(comma_node) = nearby_comma(node, Direction::Prev) { - return Some(TextRange::from_to( - comma_node.range().start(), - node.range().end(), - )); + return Some(TextRange::from_to(comma_node.range().start(), node.range().end())); } if let Some(comma_node) = nearby_comma(node, Direction::Next) { @@ -160,10 +153,7 @@ fn extend_list_item(node: &SyntaxNode) -> Option { .filter(|node| is_single_line_ws(node)) .unwrap_or(comma_node); - return Some(TextRange::from_to( - node.range().start(), - final_node.range().end(), - )); + return Some(TextRange::from_to(node.range().start(), final_node.range().end())); } return None; @@ -217,36 +207,15 @@ mod tests { #[test] fn test_extend_selection_list() { do_check(r#"fn foo(<|>x: i32) {}"#, &["x", "x: i32"]); - do_check( - r#"fn foo(<|>x: i32, y: i32) {}"#, - &["x", "x: i32", "x: i32, "], - ); - do_check( - r#"fn foo(<|>x: i32,y: i32) {}"#, - &["x", "x: i32", "x: i32,"], - ); - do_check( - r#"fn foo(x: i32, <|>y: i32) {}"#, - &["y", "y: i32", ", y: i32"], - ); - do_check( - r#"fn foo(x: i32, <|>y: i32, ) {}"#, - &["y", "y: i32", ", y: i32"], - ); - do_check( - r#"fn foo(x: i32,<|>y: i32) {}"#, - &["y", "y: i32", ",y: i32"], - ); + do_check(r#"fn foo(<|>x: i32, y: i32) {}"#, &["x", "x: i32", "x: i32, "]); + do_check(r#"fn foo(<|>x: i32,y: i32) {}"#, &["x", "x: i32", "x: i32,"]); + do_check(r#"fn foo(x: i32, <|>y: i32) {}"#, &["y", "y: i32", ", y: i32"]); + do_check(r#"fn foo(x: i32, <|>y: i32, ) {}"#, &["y", "y: i32", ", y: i32"]); + do_check(r#"fn foo(x: i32,<|>y: i32) {}"#, &["y", "y: i32", ",y: i32"]); - do_check( - r#"const FOO: [usize; 2] = [ 22<|> , 33];"#, - &["22", "22 , "], - ); + do_check(r#"const FOO: [usize; 2] = [ 22<|> , 33];"#, &["22", "22 , "]); do_check(r#"const FOO: [usize; 2] = [ 22 , 33<|>];"#, &["33", ", 33"]); - do_check( - r#"const FOO: [usize; 2] = [ 22 , 33<|> ,];"#, - &["33", ", 33"], - ); + do_check(r#"const FOO: [usize; 2] = [ 22 , 33<|> ,];"#, &["33", ", 33"]); do_check( r#" @@ -292,11 +261,7 @@ struct B { <|> } "#, - &[ - "\n \n", - "{\n \n}", - "/// bla\n/// bla\nstruct B {\n \n}", - ], + &["\n \n", "{\n \n}", "/// bla\n/// bla\nstruct B {\n \n}"], ) } diff --git a/crates/ra_ide_api_light/src/folding_ranges.rs b/crates/ra_ide_api_light/src/folding_ranges.rs index c73637323c..357a7dee1e 100644 --- a/crates/ra_ide_api_light/src/folding_ranges.rs +++ b/crates/ra_ide_api_light/src/folding_ranges.rs @@ -30,30 +30,21 @@ pub fn folding_ranges(file: &SourceFile) -> Vec { // Fold items that span multiple lines if let Some(kind) = fold_kind(node.kind()) { if node.text().contains('\n') { - res.push(Fold { - range: node.range(), - kind, - }); + res.push(Fold { range: node.range(), kind }); } } // Fold groups of comments if node.kind() == COMMENT && !visited_comments.contains(&node) { if let Some(range) = contiguous_range_for_comment(node, &mut visited_comments) { - res.push(Fold { - range, - kind: FoldKind::Comment, - }) + res.push(Fold { range, kind: FoldKind::Comment }) } } // Fold groups of imports if node.kind() == USE_ITEM && !visited_imports.contains(&node) { if let Some(range) = contiguous_range_for_group(node, &mut visited_imports) { - res.push(Fold { - range, - kind: FoldKind::Imports, - }) + res.push(Fold { range, kind: FoldKind::Imports }) } } @@ -62,10 +53,7 @@ pub fn folding_ranges(file: &SourceFile) -> Vec { if let Some(range) = contiguous_range_for_group_unless(node, has_visibility, &mut visited_mods) { - res.push(Fold { - range, - kind: FoldKind::Mods, - }) + res.push(Fold { range, kind: FoldKind::Mods }) } } } @@ -84,9 +72,7 @@ fn fold_kind(kind: SyntaxKind) -> Option { } fn has_visibility(node: &SyntaxNode) -> bool { - ast::Module::cast(node) - .and_then(|m| m.visibility()) - .is_some() + ast::Module::cast(node).and_then(|m| m.visibility()).is_some() } fn contiguous_range_for_group<'a>( @@ -125,10 +111,7 @@ fn contiguous_range_for_group_unless<'a>( } if first != last { - Some(TextRange::from_to( - first.range().start(), - last.range().end(), - )) + Some(TextRange::from_to(first.range().start(), last.range().end())) } else { // The group consists of only one element, therefore it cannot be folded None @@ -169,10 +152,7 @@ fn contiguous_range_for_comment<'a>( } if first != last { - Some(TextRange::from_to( - first.range().start(), - last.range().end(), - )) + Some(TextRange::from_to(first.range().start(), last.range().end())) } else { // The group consists of only one element, therefore it cannot be folded None @@ -199,10 +179,8 @@ mod tests { fold_kinds.len(), "The amount of fold kinds is different than the expected amount" ); - for ((fold, range), fold_kind) in folds - .into_iter() - .zip(ranges.into_iter()) - .zip(fold_kinds.into_iter()) + for ((fold, range), fold_kind) in + folds.into_iter().zip(ranges.into_iter()).zip(fold_kinds.into_iter()) { assert_eq!(fold.range.start(), range.start()); assert_eq!(fold.range.end(), range.end()); @@ -280,12 +258,7 @@ mod with_attribute_next; fn main() { }"#; - let folds = &[ - FoldKind::Mods, - FoldKind::Mods, - FoldKind::Mods, - FoldKind::Block, - ]; + let folds = &[FoldKind::Mods, FoldKind::Mods, FoldKind::Mods, FoldKind::Block]; do_check(text, folds); } diff --git a/crates/ra_ide_api_light/src/formatting.rs b/crates/ra_ide_api_light/src/formatting.rs index 46ffa7d960..8bc03f974a 100644 --- a/crates/ra_ide_api_light/src/formatting.rs +++ b/crates/ra_ide_api_light/src/formatting.rs @@ -32,10 +32,7 @@ fn prev_leaves(node: &SyntaxNode) -> impl Iterator { } fn prev_leaf(node: &SyntaxNode) -> Option<&SyntaxNode> { - generate(node.ancestors().find_map(SyntaxNode::prev_sibling), |it| { - it.last_child() - }) - .last() + generate(node.ancestors().find_map(SyntaxNode::prev_sibling), |it| it.last_child()).last() } pub fn extract_trivial_expression(block: &ast::Block) -> Option<&ast::Expr> { diff --git a/crates/ra_ide_api_light/src/join_lines.rs b/crates/ra_ide_api_light/src/join_lines.rs index ab7c5b4b5e..03770c52e4 100644 --- a/crates/ra_ide_api_light/src/join_lines.rs +++ b/crates/ra_ide_api_light/src/join_lines.rs @@ -50,11 +50,7 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> LocalEdit { } } - LocalEdit { - label: "join lines".to_string(), - edit: edit.finish(), - cursor_position: None, - } + LocalEdit { label: "join lines".to_string(), edit: edit.finish(), cursor_position: None } } fn remove_newline( @@ -71,10 +67,7 @@ fn remove_newline( )]; let spaces = suff.bytes().take_while(|&b| b == b' ').count(); - edit.replace( - TextRange::offset_len(offset, ((spaces + 1) as u32).into()), - " ".to_string(), - ); + edit.replace(TextRange::offset_len(offset, ((spaces + 1) as u32).into()), " ".to_string()); return; } @@ -109,11 +102,7 @@ fn remove_newline( edit.delete(TextRange::from_to(prev.range().start(), node.range().end())); } else if prev.kind() == COMMA && next.kind() == R_CURLY { // Removes: comma, newline (incl. surrounding whitespace) - let space = if let Some(left) = prev.prev_sibling() { - compute_ws(left, next) - } else { - " " - }; + let space = if let Some(left) = prev.prev_sibling() { compute_ws(left, next) } else { " " }; edit.replace( TextRange::from_to(prev.range().start(), node.range().end()), space.to_string(), @@ -134,20 +123,14 @@ fn join_single_expr_block(edit: &mut TextEditBuilder, node: &SyntaxNode) -> Opti let block = ast::Block::cast(node.parent()?)?; let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?; let expr = extract_trivial_expression(block)?; - edit.replace( - block_expr.syntax().range(), - expr.syntax().text().to_string(), - ); + edit.replace(block_expr.syntax().range(), expr.syntax().text().to_string()); Some(()) } fn join_single_use_tree(edit: &mut TextEditBuilder, node: &SyntaxNode) -> Option<()> { let use_tree_list = ast::UseTreeList::cast(node.parent()?)?; let (tree,) = use_tree_list.use_trees().collect_tuple()?; - edit.replace( - use_tree_list.syntax().range(), - tree.syntax().text().to_string(), - ); + edit.replace(use_tree_list.syntax().range(), tree.syntax().text().to_string()); Some(()) } diff --git a/crates/ra_ide_api_light/src/lib.rs b/crates/ra_ide_api_light/src/lib.rs index 17044270c6..f3078f51e5 100644 --- a/crates/ra_ide_api_light/src/lib.rs +++ b/crates/ra_ide_api_light/src/lib.rs @@ -63,9 +63,8 @@ pub struct Diagnostic { } pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option { - const BRACES: &[SyntaxKind] = &[ - L_CURLY, R_CURLY, L_BRACK, R_BRACK, L_PAREN, R_PAREN, L_ANGLE, R_ANGLE, - ]; + const BRACES: &[SyntaxKind] = + &[L_CURLY, R_CURLY, L_BRACK, R_BRACK, L_PAREN, R_PAREN, L_ANGLE, R_ANGLE]; let (brace_node, brace_idx) = find_leaf_at_offset(file.syntax(), offset) .filter_map(|node| { let idx = BRACES.iter().position(|&brace| brace == node.kind())?; @@ -74,9 +73,7 @@ pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option { .next()?; let parent = brace_node.parent()?; let matching_kind = BRACES[brace_idx ^ 1]; - let matching_node = parent - .children() - .find(|node| node.kind() == matching_kind)?; + let matching_node = parent.children().find(|node| node.kind() == matching_kind)?; Some(matching_node.range().start()) } @@ -122,10 +119,7 @@ pub fn highlight(root: &SyntaxNode) -> Vec { continue; } }; - res.push(HighlightedRange { - range: node.range(), - tag, - }) + res.push(HighlightedRange { range: node.range(), tag }) } res } diff --git a/crates/ra_ide_api_light/src/line_index.rs b/crates/ra_ide_api_light/src/line_index.rs index 1317377434..bf004c33ad 100644 --- a/crates/ra_ide_api_light/src/line_index.rs +++ b/crates/ra_ide_api_light/src/line_index.rs @@ -54,10 +54,7 @@ impl LineIndex { let char_len = TextUnit::of_char(c); if char_len.to_usize() > 1 { - utf16_chars.push(Utf16Char { - start: curr_col, - end: curr_col + char_len, - }); + utf16_chars.push(Utf16Char { start: curr_col, end: curr_col + char_len }); } curr_col += char_len; @@ -68,10 +65,7 @@ impl LineIndex { utf16_lines.insert(line, utf16_chars); } - LineIndex { - newlines, - utf16_lines, - } + LineIndex { newlines, utf16_lines } } pub fn line_col(&self, offset: TextUnit) -> LineCol { @@ -79,10 +73,7 @@ impl LineIndex { let line_start_offset = self.newlines[line]; let col = offset - line_start_offset; - LineCol { - line: line as u32, - col_utf16: self.utf8_to_utf16_col(line as u32, col) as u32, - } + LineCol { line: line as u32, col_utf16: self.utf8_to_utf16_col(line as u32, col) as u32 } } pub fn offset(&self, line_col: LineCol) -> TextUnit { @@ -131,10 +122,7 @@ impl LineIndex { #[cfg(test)] /// Simple reference implementation to use in proptests pub fn to_line_col(text: &str, offset: TextUnit) -> LineCol { - let mut res = LineCol { - line: 0, - col_utf16: 0, - }; + let mut res = LineCol { line: 0, col_utf16: 0 }; for (i, c) in text.char_indices() { if i + c.len_utf8() > offset.to_usize() { // if it's an invalid offset, inside a multibyte char @@ -161,120 +149,31 @@ mod test_line_index { fn test_line_index() { let text = "hello\nworld"; let index = LineIndex::new(text); - assert_eq!( - index.line_col(0.into()), - LineCol { - line: 0, - col_utf16: 0 - } - ); - assert_eq!( - index.line_col(1.into()), - LineCol { - line: 0, - col_utf16: 1 - } - ); - assert_eq!( - index.line_col(5.into()), - LineCol { - line: 0, - col_utf16: 5 - } - ); - assert_eq!( - index.line_col(6.into()), - LineCol { - line: 1, - col_utf16: 0 - } - ); - assert_eq!( - index.line_col(7.into()), - LineCol { - line: 1, - col_utf16: 1 - } - ); - assert_eq!( - index.line_col(8.into()), - LineCol { - line: 1, - col_utf16: 2 - } - ); - assert_eq!( - index.line_col(10.into()), - LineCol { - line: 1, - col_utf16: 4 - } - ); - assert_eq!( - index.line_col(11.into()), - LineCol { - line: 1, - col_utf16: 5 - } - ); - assert_eq!( - index.line_col(12.into()), - LineCol { - line: 1, - col_utf16: 6 - } - ); + assert_eq!(index.line_col(0.into()), LineCol { line: 0, col_utf16: 0 }); + assert_eq!(index.line_col(1.into()), LineCol { line: 0, col_utf16: 1 }); + assert_eq!(index.line_col(5.into()), LineCol { line: 0, col_utf16: 5 }); + assert_eq!(index.line_col(6.into()), LineCol { line: 1, col_utf16: 0 }); + assert_eq!(index.line_col(7.into()), LineCol { line: 1, col_utf16: 1 }); + assert_eq!(index.line_col(8.into()), LineCol { line: 1, col_utf16: 2 }); + assert_eq!(index.line_col(10.into()), LineCol { line: 1, col_utf16: 4 }); + assert_eq!(index.line_col(11.into()), LineCol { line: 1, col_utf16: 5 }); + assert_eq!(index.line_col(12.into()), LineCol { line: 1, col_utf16: 6 }); let text = "\nhello\nworld"; let index = LineIndex::new(text); - assert_eq!( - index.line_col(0.into()), - LineCol { - line: 0, - col_utf16: 0 - } - ); - assert_eq!( - index.line_col(1.into()), - LineCol { - line: 1, - col_utf16: 0 - } - ); - assert_eq!( - index.line_col(2.into()), - LineCol { - line: 1, - col_utf16: 1 - } - ); - assert_eq!( - index.line_col(6.into()), - LineCol { - line: 1, - col_utf16: 5 - } - ); - assert_eq!( - index.line_col(7.into()), - LineCol { - line: 2, - col_utf16: 0 - } - ); + assert_eq!(index.line_col(0.into()), LineCol { line: 0, col_utf16: 0 }); + assert_eq!(index.line_col(1.into()), LineCol { line: 1, col_utf16: 0 }); + assert_eq!(index.line_col(2.into()), LineCol { line: 1, col_utf16: 1 }); + assert_eq!(index.line_col(6.into()), LineCol { line: 1, col_utf16: 5 }); + assert_eq!(index.line_col(7.into()), LineCol { line: 2, col_utf16: 0 }); } fn arb_text_with_offset() -> BoxedStrategy<(TextUnit, String)> { - arb_text() - .prop_flat_map(|text| (arb_offset(&text), Just(text))) - .boxed() + arb_text().prop_flat_map(|text| (arb_offset(&text), Just(text))).boxed() } fn to_line_col(text: &str, offset: TextUnit) -> LineCol { - let mut res = LineCol { - line: 0, - col_utf16: 0, - }; + let mut res = LineCol { line: 0, col_utf16: 0 }; for (i, c) in text.char_indices() { if i + c.len_utf8() > offset.to_usize() { // if it's an invalid offset, inside a multibyte char @@ -333,13 +232,7 @@ const C: char = 'メ'; assert_eq!(col_index.utf16_lines.len(), 1); assert_eq!(col_index.utf16_lines[&1].len(), 1); - assert_eq!( - col_index.utf16_lines[&1][0], - Utf16Char { - start: 17.into(), - end: 20.into() - } - ); + assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() }); // UTF-8 to UTF-16, no changes assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15); @@ -364,20 +257,8 @@ const C: char = \"メ メ\"; assert_eq!(col_index.utf16_lines.len(), 1); assert_eq!(col_index.utf16_lines[&1].len(), 2); - assert_eq!( - col_index.utf16_lines[&1][0], - Utf16Char { - start: 17.into(), - end: 20.into() - } - ); - assert_eq!( - col_index.utf16_lines[&1][1], - Utf16Char { - start: 21.into(), - end: 24.into() - } - ); + assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() }); + assert_eq!(col_index.utf16_lines[&1][1], Utf16Char { start: 21.into(), end: 24.into() }); // UTF-8 to UTF-16 assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15); diff --git a/crates/ra_ide_api_light/src/line_index_utils.rs b/crates/ra_ide_api_light/src/line_index_utils.rs index 5d9ab6fd27..799a920ad7 100644 --- a/crates/ra_ide_api_light/src/line_index_utils.rs +++ b/crates/ra_ide_api_light/src/line_index_utils.rs @@ -17,11 +17,7 @@ struct LineIndexStepIter<'a> { impl<'a> LineIndexStepIter<'a> { fn from(line_index: &LineIndex) -> LineIndexStepIter { - let mut x = LineIndexStepIter { - line_index, - next_newline_idx: 0, - utf16_chars: None, - }; + let mut x = LineIndexStepIter { line_index, next_newline_idx: 0, utf16_chars: None }; // skip first newline since it's not real x.next(); x @@ -35,10 +31,7 @@ impl<'a> Iterator for LineIndexStepIter<'a> { .as_mut() .and_then(|(newline, x)| { let x = x.next()?; - Some(Step::Utf16Char(TextRange::from_to( - *newline + x.start, - *newline + x.end, - ))) + Some(Step::Utf16Char(TextRange::from_to(*newline + x.start, *newline + x.end))) }) .or_else(|| { let next_newline = *self.line_index.newlines.get(self.next_newline_idx)?; @@ -113,11 +106,7 @@ struct Edits<'a> { impl<'a> Edits<'a> { fn from_text_edit(text_edit: &'a TextEdit) -> Edits<'a> { - let mut x = Edits { - edits: text_edit.as_atoms(), - current: None, - acc_diff: 0, - }; + let mut x = Edits { edits: text_edit.as_atoms(), current: None, acc_diff: 0 }; x.advance_edit(); x } @@ -127,11 +116,7 @@ impl<'a> Edits<'a> { Some((next, rest)) => { let delete = self.translate_range(next.delete); let diff = next.insert.len() as i64 - next.delete.len().to_usize() as i64; - self.current = Some(TranslatedEdit { - delete, - insert: &next.insert, - diff, - }); + self.current = Some(TranslatedEdit { delete, insert: &next.insert, diff }); self.edits = rest; } None => { @@ -142,10 +127,7 @@ impl<'a> Edits<'a> { fn next_inserted_steps(&mut self) -> Option> { let cur = self.current.as_ref()?; - let res = Some(OffsetStepIter { - offset: cur.delete.start(), - text: &cur.insert, - }); + let res = Some(OffsetStepIter { offset: cur.delete.start(), text: &cur.insert }); self.advance_edit(); res } @@ -160,18 +142,12 @@ impl<'a> Edits<'a> { if step_pos <= edit.delete.start() { NextSteps::Use } else if step_pos <= edit.delete.end() { - let iter = OffsetStepIter { - offset: edit.delete.start(), - text: &edit.insert, - }; + let iter = OffsetStepIter { offset: edit.delete.start(), text: &edit.insert }; // empty slice to avoid returning steps again edit.insert = &edit.insert[edit.insert.len()..]; NextSteps::ReplaceMany(iter) } else { - let iter = OffsetStepIter { - offset: edit.delete.start(), - text: &edit.insert, - }; + let iter = OffsetStepIter { offset: edit.delete.start(), text: &edit.insert }; // empty slice to avoid returning steps again edit.insert = &edit.insert[edit.insert.len()..]; self.advance_edit(); @@ -222,11 +198,7 @@ struct RunningLineCol { impl RunningLineCol { fn new() -> RunningLineCol { - RunningLineCol { - line: 0, - last_newline: TextUnit::from(0), - col_adjust: TextUnit::from(0), - } + RunningLineCol { line: 0, last_newline: TextUnit::from(0), col_adjust: TextUnit::from(0) } } fn to_line_col(&self, offset: TextUnit) -> LineCol { @@ -339,12 +311,7 @@ mod test { let edited_text = x.edit.apply(&x.text); let arb_offset = arb_offset(&edited_text); (Just(x), Just(edited_text), arb_offset).prop_map(|(x, edited_text, offset)| { - ArbTextWithEditAndOffset { - text: x.text, - edit: x.edit, - edited_text, - offset, - } + ArbTextWithEditAndOffset { text: x.text, edit: x.edit, edited_text, offset } }) }) .boxed() diff --git a/crates/ra_ide_api_light/src/structure.rs b/crates/ra_ide_api_light/src/structure.rs index 330a3694cd..75afd11814 100644 --- a/crates/ra_ide_api_light/src/structure.rs +++ b/crates/ra_ide_api_light/src/structure.rs @@ -70,10 +70,7 @@ fn structure_node(node: &SyntaxNode) -> Option { node_range: node.syntax().range(), kind: node.syntax().kind(), detail, - deprecated: node - .attrs() - .filter_map(|x| x.as_named()) - .any(|x| x == "deprecated"), + deprecated: node.attrs().filter_map(|x| x.as_named()).any(|x| x == "deprecated"), }) } @@ -123,11 +120,9 @@ fn structure_node(node: &SyntaxNode) -> Option { let target_trait = im.target_trait(); let label = match target_trait { None => format!("impl {}", target_type.syntax().text()), - Some(t) => format!( - "impl {} for {}", - t.syntax().text(), - target_type.syntax().text(), - ), + Some(t) => { + format!("impl {} for {}", t.syntax().text(), target_type.syntax().text(),) + } }; let node = StructureNode { diff --git a/crates/ra_ide_api_light/src/typing.rs b/crates/ra_ide_api_light/src/typing.rs index 861027b9fb..a08a5a8c56 100644 --- a/crates/ra_ide_api_light/src/typing.rs +++ b/crates/ra_ide_api_light/src/typing.rs @@ -8,9 +8,8 @@ use ra_syntax::{ use crate::{LocalEdit, TextEditBuilder, formatting::leading_indent}; pub fn on_enter(file: &SourceFile, offset: TextUnit) -> Option { - let comment = find_leaf_at_offset(file.syntax(), offset) - .left_biased() - .and_then(ast::Comment::cast)?; + let comment = + find_leaf_at_offset(file.syntax(), offset).left_biased().and_then(ast::Comment::cast)?; if let ast::CommentFlavor::Multiline = comment.flavor() { return None; @@ -64,12 +63,7 @@ pub fn on_eq_typed(file: &SourceFile, eq_offset: TextUnit) -> Option if expr_range.contains(eq_offset) && eq_offset != expr_range.start() { return None; } - if file - .syntax() - .text() - .slice(eq_offset..expr_range.start()) - .contains('\n') - { + if file.syntax().text().slice(eq_offset..expr_range.start()).contains('\n') { return None; } } else { @@ -100,10 +94,7 @@ pub fn on_dot_typed(file: &SourceFile, dot_offset: TextUnit) -> Option ServerCapabilities { ServerCapabilities { - text_document_sync: Some(TextDocumentSyncCapability::Options( - TextDocumentSyncOptions { - open_close: Some(true), - change: Some(TextDocumentSyncKind::Full), - will_save: None, - will_save_wait_until: None, - save: None, - }, - )), + text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions { + open_close: Some(true), + change: Some(TextDocumentSyncKind::Full), + will_save: None, + will_save_wait_until: None, + save: None, + })), hover_provider: Some(true), completion_provider: Some(CompletionOptions { resolve_provider: None, @@ -32,9 +30,7 @@ pub fn server_capabilities() -> ServerCapabilities { document_symbol_provider: Some(true), workspace_symbol_provider: Some(true), code_action_provider: Some(CodeActionProviderCapability::Simple(true)), - code_lens_provider: Some(CodeLensOptions { - resolve_provider: Some(true), - }), + code_lens_provider: Some(CodeLensOptions { resolve_provider: Some(true) }), document_formatting_provider: Some(true), document_range_formatting_provider: None, document_on_type_formatting_provider: Some(DocumentOnTypeFormattingOptions { diff --git a/crates/ra_lsp_server/src/cargo_target_spec.rs b/crates/ra_lsp_server/src/cargo_target_spec.rs index db9496bbe0..e011eab7cb 100644 --- a/crates/ra_lsp_server/src/cargo_target_spec.rs +++ b/crates/ra_lsp_server/src/cargo_target_spec.rs @@ -64,10 +64,7 @@ impl CargoTargetSpec { None => return Ok(None), }; let file_id = world.analysis().crate_root(crate_id)?; - let path = world - .vfs - .read() - .file2path(ra_vfs::VfsFile(file_id.0.into())); + let path = world.vfs.read().file2path(ra_vfs::VfsFile(file_id.0.into())); let res = world.workspaces.iter().find_map(|ws| { let tgt = ws.cargo.target_by_root(&path)?; let res = CargoTargetSpec { diff --git a/crates/ra_lsp_server/src/conv.rs b/crates/ra_lsp_server/src/conv.rs index 9813854662..20077a48af 100644 --- a/crates/ra_lsp_server/src/conv.rs +++ b/crates/ra_lsp_server/src/conv.rs @@ -82,11 +82,8 @@ impl ConvWith for CompletionItem { fn conv_with(mut self, ctx: &LineIndex) -> ::lsp_types::CompletionItem { let atom_text_edit = AtomTextEdit::replace(self.source_range(), self.insert_text()); let text_edit = (&atom_text_edit).conv_with(ctx); - let additional_text_edits = if let Some(edit) = self.take_text_edit() { - Some(edit.conv_with(ctx)) - } else { - None - }; + let additional_text_edits = + if let Some(edit) = self.take_text_edit() { Some(edit.conv_with(ctx)) } else { None }; let mut res = lsp_types::CompletionItem { label: self.label().to_string(), @@ -112,10 +109,7 @@ impl ConvWith for Position { type Output = TextUnit; fn conv_with(self, line_index: &LineIndex) -> TextUnit { - let line_col = LineCol { - line: self.line as u32, - col_utf16: self.character as u32, - }; + let line_col = LineCol { line: self.line as u32, col_utf16: self.character as u32 }; line_index.offset(line_col) } } @@ -135,10 +129,7 @@ impl ConvWith for TextRange { type Output = Range; fn conv_with(self, line_index: &LineIndex) -> Range { - Range::new( - self.start().conv_with(line_index), - self.end().conv_with(line_index), - ) + Range::new(self.start().conv_with(line_index), self.end().conv_with(line_index)) } } @@ -147,10 +138,7 @@ impl ConvWith for Range { type Output = TextRange; fn conv_with(self, line_index: &LineIndex) -> TextRange { - TextRange::from_to( - self.start.conv_with(line_index), - self.end.conv_with(line_index), - ) + TextRange::from_to(self.start.conv_with(line_index), self.end.conv_with(line_index)) } } @@ -303,11 +291,7 @@ impl TryConvWith for SourceChange { changes: None, document_changes: Some(DocumentChanges::Operations(document_changes)), }; - Ok(req::SourceChange { - label: self.label, - workspace_edit, - cursor_position, - }) + Ok(req::SourceChange { label: self.label, workspace_edit, cursor_position }) } } @@ -320,16 +304,8 @@ impl TryConvWith for SourceFileEdit { version: None, }; let line_index = world.analysis().file_line_index(self.file_id); - let edits = self - .edit - .as_atoms() - .iter() - .map_conv_with(&line_index) - .collect(); - Ok(TextDocumentEdit { - text_document, - edits, - }) + let edits = self.edit.as_atoms().iter().map_conv_with(&line_index).collect(); + Ok(TextDocumentEdit { text_document, edits }) } } @@ -342,18 +318,10 @@ impl TryConvWith for FileSystemEdit { let uri = world.path_to_uri(source_root, &path)?; ResourceOp::Create(CreateFile { uri, options: None }) } - FileSystemEdit::MoveFile { - src, - dst_source_root, - dst_path, - } => { + FileSystemEdit::MoveFile { src, dst_source_root, dst_path } => { let old_uri = world.file_id_to_uri(src)?; let new_uri = world.path_to_uri(dst_source_root, &dst_path)?; - ResourceOp::Rename(RenameFile { - old_uri, - new_uri, - options: None, - }) + ResourceOp::Rename(RenameFile { old_uri, new_uri, options: None }) } }; Ok(res) @@ -381,11 +349,8 @@ pub fn to_location_link( let target_range = target.info.full_range().conv_with(&tgt_line_index); - let target_selection_range = target - .info - .focus_range() - .map(|it| it.conv_with(&tgt_line_index)) - .unwrap_or(target_range); + let target_selection_range = + target.info.focus_range().map(|it| it.conv_with(&tgt_line_index)).unwrap_or(target_range); let res = LocationLink { origin_selection_range: Some(target.range.conv_with(line_index)), diff --git a/crates/ra_lsp_server/src/main.rs b/crates/ra_lsp_server/src/main.rs index 33aa30d70b..03f83c7bef 100644 --- a/crates/ra_lsp_server/src/main.rs +++ b/crates/ra_lsp_server/src/main.rs @@ -36,23 +36,15 @@ struct InitializationOptions { fn main_inner() -> Result<()> { let (receiver, sender, threads) = stdio_transport(); let cwd = ::std::env::current_dir()?; - run_server( - ra_lsp_server::server_capabilities(), - receiver, - sender, - |params, r, s| { - let root = params - .root_uri - .and_then(|it| it.to_file_path().ok()) - .unwrap_or(cwd); - let supports_decorations = params - .initialization_options - .and_then(|v| InitializationOptions::deserialize(v).ok()) - .and_then(|it| it.publish_decorations) - == Some(true); - ra_lsp_server::main_loop(false, root, supports_decorations, r, s) - }, - )?; + run_server(ra_lsp_server::server_capabilities(), receiver, sender, |params, r, s| { + let root = params.root_uri.and_then(|it| it.to_file_path().ok()).unwrap_or(cwd); + let supports_decorations = params + .initialization_options + .and_then(|v| InitializationOptions::deserialize(v).ok()) + .and_then(|it| it.publish_decorations) + == Some(true); + ra_lsp_server::main_loop(false, root, supports_decorations, r, s) + })?; log::info!("shutting down IO..."); threads.join()?; log::info!("... IO is down"); diff --git a/crates/ra_lsp_server/src/main_loop.rs b/crates/ra_lsp_server/src/main_loop.rs index 26b6fe54aa..a512998518 100644 --- a/crates/ra_lsp_server/src/main_loop.rs +++ b/crates/ra_lsp_server/src/main_loop.rs @@ -25,10 +25,7 @@ use crate::{ }; #[derive(Debug, Fail)] -#[fail( - display = "Language Server request failed with {}. ({})", - code, message -)] +#[fail(display = "Language Server request failed with {}. ({})", code, message)] pub struct LspError { pub code: i32, pub message: String, @@ -69,9 +66,7 @@ pub fn main_loop( } }; ws_worker.shutdown(); - ws_watcher - .shutdown() - .map_err(|_| format_err!("ws watcher died"))?; + ws_watcher.shutdown().map_err(|_| format_err!("ws watcher died"))?; let mut state = ServerWorldState::new(ws_root.clone(), workspaces); log::info!("server initialized, serving requests"); @@ -92,9 +87,7 @@ pub fn main_loop( ); log::info!("waiting for tasks to finish..."); - task_receiver - .into_iter() - .for_each(|task| on_task(task, msg_sender, &mut pending_requests)); + task_receiver.into_iter().for_each(|task| on_task(task, msg_sender, &mut pending_requests)); log::info!("...tasks have finished"); log::info!("joining threadpool..."); drop(pool); @@ -119,9 +112,7 @@ enum Event { impl fmt::Debug for Event { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let debug_verbose_not = |not: &RawNotification, f: &mut fmt::Formatter| { - f.debug_struct("RawNotification") - .field("method", ¬.method) - .finish() + f.debug_struct("RawNotification").field("method", ¬.method).finish() }; match self { @@ -287,13 +278,7 @@ fn on_request( sender: &Sender, req: RawRequest, ) -> Result> { - let mut pool_dispatcher = PoolDispatcher { - req: Some(req), - res: None, - pool, - world, - sender, - }; + let mut pool_dispatcher = PoolDispatcher { req: Some(req), res: None, pool, world, sender }; let req = pool_dispatcher .on::(handlers::handle_analyzer_status)? .on::(handlers::handle_syntax_tree)? @@ -362,13 +347,9 @@ fn on_notification( let not = match not.cast::() { Ok(params) => { let uri = params.text_document.uri; - let path = uri - .to_file_path() - .map_err(|()| format_err!("invalid uri: {}", uri))?; - if let Some(file_id) = state - .vfs - .write() - .add_file_overlay(&path, params.text_document.text) + let path = uri.to_file_path().map_err(|()| format_err!("invalid uri: {}", uri))?; + if let Some(file_id) = + state.vfs.write().add_file_overlay(&path, params.text_document.text) { subs.add_sub(FileId(file_id.0.into())); } @@ -379,14 +360,9 @@ fn on_notification( let not = match not.cast::() { Ok(mut params) => { let uri = params.text_document.uri; - let path = uri - .to_file_path() - .map_err(|()| format_err!("invalid uri: {}", uri))?; - let text = params - .content_changes - .pop() - .ok_or_else(|| format_err!("empty changes"))? - .text; + let path = uri.to_file_path().map_err(|()| format_err!("invalid uri: {}", uri))?; + let text = + params.content_changes.pop().ok_or_else(|| format_err!("empty changes"))?.text; state.vfs.write().change_file_overlay(path.as_path(), text); return Ok(()); } @@ -395,16 +371,11 @@ fn on_notification( let not = match not.cast::() { Ok(params) => { let uri = params.text_document.uri; - let path = uri - .to_file_path() - .map_err(|()| format_err!("invalid uri: {}", uri))?; + let path = uri.to_file_path().map_err(|()| format_err!("invalid uri: {}", uri))?; if let Some(file_id) = state.vfs.write().remove_file_overlay(path.as_path()) { subs.remove_sub(FileId(file_id.0.into())); } - let params = req::PublishDiagnosticsParams { - uri, - diagnostics: Vec::new(), - }; + let params = req::PublishDiagnosticsParams { uri, diagnostics: Vec::new() }; let not = RawNotification::new::(¶ms); msg_sender.send(RawMessage::Notification(not)).unwrap(); return Ok(()); diff --git a/crates/ra_lsp_server/src/main_loop/handlers.rs b/crates/ra_lsp_server/src/main_loop/handlers.rs index aa55d12553..0cdb39c32f 100644 --- a/crates/ra_lsp_server/src/main_loop/handlers.rs +++ b/crates/ra_lsp_server/src/main_loop/handlers.rs @@ -46,12 +46,7 @@ pub fn handle_extend_selection( .into_iter() .map_conv_with(&line_index) .map(|range| FileRange { file_id, range }) - .map(|frange| { - world - .analysis() - .extend_selection(frange) - .map(|it| it.conv_with(&line_index)) - }) + .map(|frange| world.analysis().extend_selection(frange).map(|it| it.conv_with(&line_index))) .collect::>>()?; Ok(req::ExtendSelectionResult { selections }) } @@ -67,10 +62,7 @@ pub fn handle_find_matching_brace( .into_iter() .map_conv_with(&line_index) .map(|offset| { - world - .analysis() - .matching_brace(FilePosition { file_id, offset }) - .unwrap_or(offset) + world.analysis().matching_brace(FilePosition { file_id, offset }).unwrap_or(offset) }) .map_conv_with(&line_index) .collect(); @@ -171,11 +163,7 @@ pub fn handle_workspace_symbol( let all_symbols = params.query.contains('#'); let libs = params.query.contains('*'); let query = { - let query: String = params - .query - .chars() - .filter(|&c| c != '#' && c != '*') - .collect(); + let query: String = params.query.chars().filter(|&c| c != '#' && c != '*').collect(); let mut q = Query::new(query); if !all_symbols { q.only_types(); @@ -367,10 +355,7 @@ pub fn handle_completion( Some(items) => items, }; let line_index = world.analysis().file_line_index(position.file_id); - let items = items - .into_iter() - .map(|item| item.conv_with(&line_index)) - .collect(); + let items = items.into_iter().map(|item| item.conv_with(&line_index)).collect(); Ok(Some(req::CompletionResponse::Array(items))) } @@ -496,9 +481,8 @@ pub fn handle_rename(world: ServerWorld, params: RenameParams) -> Result return Ok(None), Some(it) => it, @@ -517,14 +501,10 @@ pub fn handle_references( let line_index = world.analysis().file_line_index(file_id); let offset = params.position.conv_with(&line_index); - let refs = world - .analysis() - .find_all_refs(FilePosition { file_id, offset })?; + let refs = world.analysis().find_all_refs(FilePosition { file_id, offset })?; Ok(Some( - refs.into_iter() - .filter_map(|r| to_location(r.0, r.1, &world, &line_index).ok()) - .collect(), + refs.into_iter().filter_map(|r| to_location(r.0, r.1, &world, &line_index).ok()).collect(), )) } @@ -540,9 +520,7 @@ pub fn handle_formatting( use std::process; let mut rustfmt = process::Command::new("rustfmt"); - rustfmt - .stdin(process::Stdio::piped()) - .stdout(process::Stdio::piped()); + rustfmt.stdin(process::Stdio::piped()).stdout(process::Stdio::piped()); if let Ok(path) = params.text_document.uri.to_file_path() { if let Some(parent) = path.parent() { @@ -582,10 +560,7 @@ pub fn handle_code_action( let line_index = world.analysis().file_line_index(file_id); let range = params.range.conv_with(&line_index); - let assists = world - .analysis() - .assists(FileRange { file_id, range })? - .into_iter(); + let assists = world.analysis().assists(FileRange { file_id, range })?.into_iter(); let fixes = world .analysis() .diagnostics(file_id)? @@ -720,18 +695,11 @@ pub fn handle_code_lens_resolve(world: ServerWorld, code_lens: CodeLens) -> Resu to_value(locations).unwrap(), ]), }; - Ok(CodeLens { - range: code_lens.range, - command: Some(cmd), - data: None, - }) + Ok(CodeLens { range: code_lens.range, command: Some(cmd), data: None }) } None => Ok(CodeLens { range: code_lens.range, - command: Some(Command { - title: "Error".into(), - ..Default::default() - }), + command: Some(Command { title: "Error".into(), ..Default::default() }), data: None, }), } @@ -744,16 +712,11 @@ pub fn handle_document_highlight( let file_id = params.text_document.try_conv_with(&world)?; let line_index = world.analysis().file_line_index(file_id); - let refs = world - .analysis() - .find_all_refs(params.try_conv_with(&world)?)?; + let refs = world.analysis().find_all_refs(params.try_conv_with(&world)?)?; Ok(Some( refs.into_iter() - .map(|r| DocumentHighlight { - range: r.1.conv_with(&line_index), - kind: None, - }) + .map(|r| DocumentHighlight { range: r.1.conv_with(&line_index), kind: None }) .collect(), )) } @@ -785,10 +748,7 @@ pub fn publish_decorations( file_id: FileId, ) -> Result { let uri = world.file_id_to_uri(file_id)?; - Ok(req::PublishDecorationsParams { - uri, - decorations: highlight(&world, file_id)?, - }) + Ok(req::PublishDecorationsParams { uri, decorations: highlight(&world, file_id)? }) } fn highlight(world: &ServerWorld, file_id: FileId) -> Result> { @@ -797,10 +757,7 @@ fn highlight(world: &ServerWorld, file_id: FileId) -> Result> { .analysis() .highlight(file_id)? .into_iter() - .map(|h| Decoration { - range: h.range.conv_with(&line_index), - tag: h.tag, - }) + .map(|h| Decoration { range: h.range.conv_with(&line_index), tag: h.tag }) .collect(); Ok(res) } diff --git a/crates/ra_lsp_server/src/main_loop/subscriptions.rs b/crates/ra_lsp_server/src/main_loop/subscriptions.rs index a83e015573..11bd952d97 100644 --- a/crates/ra_lsp_server/src/main_loop/subscriptions.rs +++ b/crates/ra_lsp_server/src/main_loop/subscriptions.rs @@ -7,9 +7,7 @@ pub struct Subscriptions { impl Subscriptions { pub fn new() -> Subscriptions { - Subscriptions { - subs: FxHashSet::default(), - } + Subscriptions { subs: FxHashSet::default() } } pub fn add_sub(&mut self, file_id: FileId) { self.subs.insert(file_id); diff --git a/crates/ra_lsp_server/src/project_model/cargo_workspace.rs b/crates/ra_lsp_server/src/project_model/cargo_workspace.rs index 8cf99d5865..3b76389d23 100644 --- a/crates/ra_lsp_server/src/project_model/cargo_workspace.rs +++ b/crates/ra_lsp_server/src/project_model/cargo_workspace.rs @@ -118,14 +118,11 @@ impl Target { impl CargoWorkspace { pub fn from_cargo_metadata(cargo_toml: &Path) -> Result { let mut meta = MetadataCommand::new(); - meta.manifest_path(cargo_toml) - .features(CargoOpt::AllFeatures); + meta.manifest_path(cargo_toml).features(CargoOpt::AllFeatures); if let Some(parent) = cargo_toml.parent() { meta.current_dir(parent); } - let meta = meta - .exec() - .map_err(|e| format_err!("cargo metadata failed: {}", e))?; + let meta = meta.exec().map_err(|e| format_err!("cargo metadata failed: {}", e))?; let mut pkg_by_id = FxHashMap::default(); let mut packages = Arena::default(); let mut targets = Arena::default(); @@ -157,10 +154,8 @@ impl CargoWorkspace { for node in resolve.nodes { let source = pkg_by_id[&node.id]; for dep_node in node.deps { - let dep = PackageDependency { - name: dep_node.name.into(), - pkg: pkg_by_id[&dep_node.pkg], - }; + let dep = + PackageDependency { name: dep_node.name.into(), pkg: pkg_by_id[&dep_node.pkg] }; packages[source].dependencies.push(dep); } } @@ -171,8 +166,6 @@ impl CargoWorkspace { self.packages.iter().map(|(id, _pkg)| id) } pub fn target_by_root(&self, root: &Path) -> Option { - self.packages() - .filter_map(|pkg| pkg.targets(self).find(|it| it.root(self) == root)) - .next() + self.packages().filter_map(|pkg| pkg.targets(self).find(|it| it.root(self) == root)).next() } } diff --git a/crates/ra_lsp_server/src/project_model/sysroot.rs b/crates/ra_lsp_server/src/project_model/sysroot.rs index fb46856716..49210ac7aa 100644 --- a/crates/ra_lsp_server/src/project_model/sysroot.rs +++ b/crates/ra_lsp_server/src/project_model/sysroot.rs @@ -53,9 +53,7 @@ impl Sysroot { ); } - let mut sysroot = Sysroot { - crates: Arena::default(), - }; + let mut sysroot = Sysroot { crates: Arena::default() }; for name in SYSROOT_CRATES.trim().lines() { let root = src.join(format!("lib{}", name)).join("lib.rs"); if root.exists() { @@ -77,10 +75,7 @@ impl Sysroot { } fn by_name(&self, name: &str) -> Option { - self.crates - .iter() - .find(|(_id, data)| data.name == name) - .map(|(id, _data)| id) + self.crates.iter().find(|(_id, data)| data.name == name).map(|(id, _data)| id) } } diff --git a/crates/ra_lsp_server/src/server_world.rs b/crates/ra_lsp_server/src/server_world.rs index c2167c5d80..02f2a37a8c 100644 --- a/crates/ra_lsp_server/src/server_world.rs +++ b/crates/ra_lsp_server/src/server_world.rs @@ -80,10 +80,7 @@ impl ServerWorldState { } } - let libstd = ws - .sysroot - .std() - .and_then(|it| sysroot_crates.get(&it).map(|&it| it)); + let libstd = ws.sysroot.std().and_then(|it| sysroot_crates.get(&it).map(|&it| it)); let mut pkg_to_lib_crate = FxHashMap::default(); let mut pkg_crates = FxHashMap::default(); @@ -99,10 +96,7 @@ impl ServerWorldState { lib_tgt = Some(crate_id); pkg_to_lib_crate.insert(pkg, crate_id); } - pkg_crates - .entry(pkg) - .or_insert_with(Vec::new) - .push(crate_id); + pkg_crates.entry(pkg).or_insert_with(Vec::new).push(crate_id); } } @@ -192,18 +186,8 @@ impl ServerWorldState { libs.push((SourceRootId(root.0.into()), files)); } } - VfsChange::AddFile { - root, - file, - path, - text, - } => { - change.add_file( - SourceRootId(root.0.into()), - FileId(file.0.into()), - path, - text, - ); + VfsChange::AddFile { root, file, path, text } => { + change.add_file(SourceRootId(root.0.into()), FileId(file.0.into()), path, text); } VfsChange::RemoveFile { root, file, path } => { change.remove_file(SourceRootId(root.0.into()), FileId(file.0.into()), path) @@ -247,9 +231,7 @@ impl ServerWorld { } pub fn uri_to_file_id(&self, uri: &Url) -> Result { - let path = uri - .to_file_path() - .map_err(|()| format_err!("invalid uri: {}", uri))?; + let path = uri.to_file_path().map_err(|()| format_err!("invalid uri: {}", uri))?; let file = self .vfs .read() diff --git a/crates/ra_lsp_server/tests/heavy_tests/main.rs b/crates/ra_lsp_server/tests/heavy_tests/main.rs index bfb0645a8d..e49c871691 100644 --- a/crates/ra_lsp_server/tests/heavy_tests/main.rs +++ b/crates/ra_lsp_server/tests/heavy_tests/main.rs @@ -55,10 +55,7 @@ fn foo() { ); server.wait_for_feedback("workspace loaded"); server.request::( - RunnablesParams { - text_document: server.doc_id("lib.rs"), - position: None, - }, + RunnablesParams { text_document: server.doc_id("lib.rs"), position: None }, json!([ { "args": [ "test", "--", "foo", "--nocapture" ], @@ -220,10 +217,7 @@ fn main() {} "#, ); server.wait_for_feedback("workspace loaded"); - let empty_context = || CodeActionContext { - diagnostics: Vec::new(), - only: None, - }; + let empty_context = || CodeActionContext { diagnostics: Vec::new(), only: None }; server.request::( CodeActionParams { text_document: server.doc_id("src/lib.rs"), diff --git a/crates/ra_lsp_server/tests/heavy_tests/support.rs b/crates/ra_lsp_server/tests/heavy_tests/support.rs index 57a8b4f4df..eee85f8c89 100644 --- a/crates/ra_lsp_server/tests/heavy_tests/support.rs +++ b/crates/ra_lsp_server/tests/heavy_tests/support.rs @@ -83,9 +83,7 @@ impl Server { pub fn doc_id(&self, rel_path: &str) -> TextDocumentIdentifier { let path = self.dir.path().join(rel_path); - TextDocumentIdentifier { - uri: Url::from_file_path(path).unwrap(), - } + TextDocumentIdentifier { uri: Url::from_file_path(path).unwrap() } } pub fn request(&self, params: R::Params, expected_resp: Value) @@ -119,11 +117,7 @@ impl Server { } fn send_request_(&self, r: RawRequest) -> Value { let id = r.id; - self.worker - .as_ref() - .unwrap() - .send(RawMessage::Request(r)) - .unwrap(); + self.worker.as_ref().unwrap().send(RawMessage::Request(r)).unwrap(); while let Some(msg) = self.recv() { match msg { RawMessage::Request(req) => panic!("unexpected request: {:?}", req), @@ -169,11 +163,7 @@ impl Server { }) } fn send_notification(&self, not: RawNotification) { - self.worker - .as_ref() - .unwrap() - .send(RawMessage::Notification(not)) - .unwrap(); + self.worker.as_ref().unwrap().send(RawMessage::Notification(not)).unwrap(); } } diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs index 2c8ad4429d..b098378314 100644 --- a/crates/ra_mbe/src/lib.rs +++ b/crates/ra_mbe/src/lib.rs @@ -137,18 +137,12 @@ impl_froms!(TokenTree: Leaf, Subtree); "#; let source_file = ast::SourceFile::parse(macro_definition); - let macro_definition = source_file - .syntax() - .descendants() - .find_map(ast::MacroCall::cast) - .unwrap(); + let macro_definition = + source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); let source_file = ast::SourceFile::parse(macro_invocation); - let macro_invocation = source_file - .syntax() - .descendants() - .find_map(ast::MacroCall::cast) - .unwrap(); + let macro_invocation = + source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); let definition_tt = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap(); let invocation_tt = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap(); @@ -163,11 +157,8 @@ impl_froms!(TokenTree: Leaf, Subtree); fn create_rules(macro_definition: &str) -> MacroRules { let source_file = ast::SourceFile::parse(macro_definition); - let macro_definition = source_file - .syntax() - .descendants() - .find_map(ast::MacroCall::cast) - .unwrap(); + let macro_definition = + source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); let definition_tt = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap(); crate::MacroRules::parse(&definition_tt).unwrap() @@ -175,11 +166,8 @@ impl_froms!(TokenTree: Leaf, Subtree); fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) { let source_file = ast::SourceFile::parse(invocation); - let macro_invocation = source_file - .syntax() - .descendants() - .find_map(ast::MacroCall::cast) - .unwrap(); + let macro_invocation = + source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); let invocation_tt = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap(); diff --git a/crates/ra_mbe/src/mbe_expander.rs b/crates/ra_mbe/src/mbe_expander.rs index fb1066eec6..31531f4c9e 100644 --- a/crates/ra_mbe/src/mbe_expander.rs +++ b/crates/ra_mbe/src/mbe_expander.rs @@ -133,11 +133,7 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Option } _ => return None, }, - crate::TokenTree::Repeat(crate::Repeat { - subtree, - kind: _, - separator, - }) => { + crate::TokenTree::Repeat(crate::Repeat { subtree, kind: _, separator }) => { while let Some(nested) = match_lhs(subtree, input) { res.push_nested(nested)?; if let Some(separator) = *separator { @@ -166,10 +162,7 @@ fn expand_subtree( .map(|it| expand_tt(it, bindings, nesting)) .collect::>>()?; - Some(tt::Subtree { - token_trees, - delimiter: template.delimiter, - }) + Some(tt::Subtree { token_trees, delimiter: template.delimiter }) } fn expand_tt( @@ -188,23 +181,15 @@ fn expand_tt( token_trees.push(t.into()) } nesting.pop().unwrap(); - tt::Subtree { - token_trees, - delimiter: tt::Delimiter::None, - } - .into() + tt::Subtree { token_trees, delimiter: tt::Delimiter::None }.into() } crate::TokenTree::Leaf(leaf) => match leaf { - crate::Leaf::Ident(ident) => tt::Leaf::from(tt::Ident { - text: ident.text.clone(), - }) - .into(), + crate::Leaf::Ident(ident) => { + tt::Leaf::from(tt::Ident { text: ident.text.clone() }).into() + } crate::Leaf::Punct(punct) => tt::Leaf::from(punct.clone()).into(), crate::Leaf::Var(v) => bindings.get(&v.text, nesting)?.clone(), - crate::Leaf::Literal(l) => tt::Leaf::from(tt::Literal { - text: l.text.clone(), - }) - .into(), + crate::Leaf::Literal(l) => tt::Leaf::from(tt::Literal { text: l.text.clone() }).into(), }, }; Some(res) diff --git a/crates/ra_mbe/src/mbe_parser.rs b/crates/ra_mbe/src/mbe_parser.rs index abad2e8c80..60e566ed23 100644 --- a/crates/ra_mbe/src/mbe_parser.rs +++ b/crates/ra_mbe/src/mbe_parser.rs @@ -52,10 +52,7 @@ fn parse_subtree(tt: &tt::Subtree) -> Option { }; token_trees.push(child); } - Some(crate::Subtree { - token_trees, - delimiter: tt.delimiter, - }) + Some(crate::Subtree { token_trees, delimiter: tt.delimiter }) } fn parse_var(p: &mut TtCursor) -> Option { @@ -92,9 +89,5 @@ fn parse_repeat(p: &mut TtCursor) -> Option { _ => return None, }; p.bump(); - Some(crate::Repeat { - subtree, - kind, - separator, - }) + Some(crate::Repeat { subtree, kind, separator }) } diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 2dc04d4e73..9a2eceabac 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs @@ -23,23 +23,14 @@ fn convert_tt(tt: &SyntaxNode) -> Option { for char in child.leaf_text().unwrap().chars() { if let Some(char) = prev { token_trees.push( - tt::Leaf::from(tt::Punct { - char, - spacing: tt::Spacing::Joint, - }) - .into(), + tt::Leaf::from(tt::Punct { char, spacing: tt::Spacing::Joint }).into(), ); } prev = Some(char) } if let Some(char) = prev { - token_trees.push( - tt::Leaf::from(tt::Punct { - char, - spacing: tt::Spacing::Alone, - }) - .into(), - ); + token_trees + .push(tt::Leaf::from(tt::Punct { char, spacing: tt::Spacing::Alone }).into()); } } else { let child: tt::TokenTree = if child.kind() == TOKEN_TREE { @@ -48,10 +39,7 @@ fn convert_tt(tt: &SyntaxNode) -> Option { let text = child.leaf_text().unwrap().clone(); tt::Leaf::from(tt::Ident { text }).into() } else if child.kind().is_literal() { - tt::Leaf::from(tt::Literal { - text: child.leaf_text().unwrap().clone(), - }) - .into() + tt::Leaf::from(tt::Literal { text: child.leaf_text().unwrap().clone() }).into() } else { return None; }; @@ -59,9 +47,6 @@ fn convert_tt(tt: &SyntaxNode) -> Option { } } - let res = tt::Subtree { - delimiter, - token_trees, - }; + let res = tt::Subtree { delimiter, token_trees }; Some(res) } diff --git a/crates/ra_syntax/src/algo/visit.rs b/crates/ra_syntax/src/algo/visit.rs index 38f21594cd..81a99228fd 100644 --- a/crates/ra_syntax/src/algo/visit.rs +++ b/crates/ra_syntax/src/algo/visit.rs @@ -7,10 +7,7 @@ pub fn visitor<'a, T>() -> impl Visitor<'a, Output = T> { } pub fn visitor_ctx<'a, T, C>(ctx: C) -> impl VisitorCtx<'a, Output = T, Ctx = C> { - EmptyVisitorCtx { - ph: PhantomData, - ctx, - } + EmptyVisitorCtx { ph: PhantomData, ctx } } pub trait Visitor<'a>: Sized { @@ -21,11 +18,7 @@ pub trait Visitor<'a>: Sized { N: AstNode + 'a, F: FnOnce(&'a N) -> Self::Output, { - Vis { - inner: self, - f, - ph: PhantomData, - } + Vis { inner: self, f, ph: PhantomData } } } @@ -38,11 +31,7 @@ pub trait VisitorCtx<'a>: Sized { N: AstNode + 'a, F: FnOnce(&'a N, Self::Ctx) -> Self::Output, { - VisCtx { - inner: self, - f, - ph: PhantomData, - } + VisCtx { inner: self, f, ph: PhantomData } } } diff --git a/crates/ra_syntax/src/ast.rs b/crates/ra_syntax/src/ast.rs index d6237532b0..cf5cfecc27 100644 --- a/crates/ra_syntax/src/ast.rs +++ b/crates/ra_syntax/src/ast.rs @@ -127,16 +127,12 @@ pub trait DocCommentsOwner: AstNode { let line = comment.text().as_str(); // Determine if the prefix or prefix + 1 char is stripped - let pos = if line - .chars() - .nth(prefix_len) - .map(|c| c.is_whitespace()) - .unwrap_or(false) - { - prefix_len + 1 - } else { - prefix_len - }; + let pos = + if line.chars().nth(prefix_len).map(|c| c.is_whitespace()).unwrap_or(false) { + prefix_len + 1 + } else { + prefix_len + }; line[pos..].to_owned() }) @@ -357,10 +353,7 @@ pub enum PathSegmentKind<'a> { impl PathSegment { pub fn parent_path(&self) -> &Path { - self.syntax() - .parent() - .and_then(Path::cast) - .expect("segments are always nested in paths") + self.syntax().parent().and_then(Path::cast).expect("segments are always nested in paths") } pub fn kind(&self) -> Option { @@ -428,10 +421,7 @@ pub struct AstChildren<'a, N> { impl<'a, N> AstChildren<'a, N> { fn new(parent: &'a SyntaxNode) -> Self { - AstChildren { - inner: parent.children(), - ph: PhantomData, - } + AstChildren { inner: parent.children(), ph: PhantomData } } } @@ -658,11 +648,7 @@ impl SelfParam { let borrowed = self.syntax().children().any(|n| n.kind() == AMP); if borrowed { // check for a `mut` coming after the & -- `mut &self` != `&mut self` - if self - .syntax() - .children() - .skip_while(|n| n.kind() != AMP) - .any(|n| n.kind() == MUT_KW) + if self.syntax().children().skip_while(|n| n.kind() != AMP).any(|n| n.kind() == MUT_KW) { SelfParamFlavor::MutRef } else { @@ -769,8 +755,5 @@ fn test_doc_comment_preserves_indents() { "#, ); let module = file.syntax().descendants().find_map(Module::cast).unwrap(); - assert_eq!( - "doc1\n```\nfn foo() {\n // ...\n}\n```", - module.doc_comment_text().unwrap() - ); + assert_eq!("doc1\n```\nfn foo() {\n // ...\n}\n```", module.doc_comment_text().unwrap()); } diff --git a/crates/ra_syntax/src/grammar/expressions.rs b/crates/ra_syntax/src/grammar/expressions.rs index 6b88c56853..28fcb1f7d3 100644 --- a/crates/ra_syntax/src/grammar/expressions.rs +++ b/crates/ra_syntax/src/grammar/expressions.rs @@ -7,26 +7,17 @@ use super::*; const EXPR_FIRST: TokenSet = LHS_FIRST; pub(super) fn expr(p: &mut Parser) -> BlockLike { - let r = Restrictions { - forbid_structs: false, - prefer_stmt: false, - }; + let r = Restrictions { forbid_structs: false, prefer_stmt: false }; expr_bp(p, r, 1) } pub(super) fn expr_stmt(p: &mut Parser) -> BlockLike { - let r = Restrictions { - forbid_structs: false, - prefer_stmt: true, - }; + let r = Restrictions { forbid_structs: false, prefer_stmt: true }; expr_bp(p, r, 1) } fn expr_no_struct(p: &mut Parser) { - let r = Restrictions { - forbid_structs: true, - prefer_stmt: false, - }; + let r = Restrictions { forbid_structs: true, prefer_stmt: false }; expr_bp(p, r, 1); } diff --git a/crates/ra_syntax/src/grammar/expressions/atom.rs b/crates/ra_syntax/src/grammar/expressions/atom.rs index 600774afd7..27ba87657e 100644 --- a/crates/ra_syntax/src/grammar/expressions/atom.rs +++ b/crates/ra_syntax/src/grammar/expressions/atom.rs @@ -141,14 +141,7 @@ fn tuple_expr(p: &mut Parser) -> CompletedMarker { } } p.expect(R_PAREN); - m.complete( - p, - if saw_expr && !saw_comma { - PAREN_EXPR - } else { - TUPLE_EXPR - }, - ) + m.complete(p, if saw_expr && !saw_comma { PAREN_EXPR } else { TUPLE_EXPR }) } // test array_expr diff --git a/crates/ra_syntax/src/grammar/items.rs b/crates/ra_syntax/src/grammar/items.rs index 84c18a2932..a61f260cf1 100644 --- a/crates/ra_syntax/src/grammar/items.rs +++ b/crates/ra_syntax/src/grammar/items.rs @@ -155,11 +155,7 @@ pub(super) fn maybe_item(p: &mut Parser, flavor: ItemFlavor) -> MaybeItem { IMPL_BLOCK } _ => { - return if has_mods { - MaybeItem::Modifiers - } else { - MaybeItem::None - }; + return if has_mods { MaybeItem::Modifiers } else { MaybeItem::None }; } }; diff --git a/crates/ra_syntax/src/grammar/params.rs b/crates/ra_syntax/src/grammar/params.rs index 13158429a9..185386569a 100644 --- a/crates/ra_syntax/src/grammar/params.rs +++ b/crates/ra_syntax/src/grammar/params.rs @@ -36,11 +36,7 @@ impl Flavor { } fn list_(p: &mut Parser, flavor: Flavor) { - let (bra, ket) = if flavor.type_required() { - (L_PAREN, R_PAREN) - } else { - (PIPE, PIPE) - }; + let (bra, ket) = if flavor.type_required() { (L_PAREN, R_PAREN) } else { (PIPE, PIPE) }; assert!(p.at(bra)); let m = p.start(); p.bump(); diff --git a/crates/ra_syntax/src/grammar/patterns.rs b/crates/ra_syntax/src/grammar/patterns.rs index 1ac5efdf6e..f3f400ae0c 100644 --- a/crates/ra_syntax/src/grammar/patterns.rs +++ b/crates/ra_syntax/src/grammar/patterns.rs @@ -2,9 +2,7 @@ use super::*; pub(super) const PATTERN_FIRST: TokenSet = expressions::LITERAL_FIRST .union(paths::PATH_FIRST) - .union(token_set![ - REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE - ]); + .union(token_set![REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE]); pub(super) fn pattern(p: &mut Parser) { pattern_r(p, PAT_RECOVERY_SET) diff --git a/crates/ra_syntax/src/lexer/ptr.rs b/crates/ra_syntax/src/lexer/ptr.rs index 0a473c9911..c341c4176b 100644 --- a/crates/ra_syntax/src/lexer/ptr.rs +++ b/crates/ra_syntax/src/lexer/ptr.rs @@ -11,10 +11,7 @@ pub(crate) struct Ptr<'s> { impl<'s> Ptr<'s> { /// Creates a new `Ptr` from a string. pub fn new(text: &'s str) -> Ptr<'s> { - Ptr { - text, - len: 0.into(), - } + Ptr { text, len: 0.into() } } /// Gets the length of the remaining string. diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs index 104f328515..088b2f5d70 100644 --- a/crates/ra_syntax/src/lib.rs +++ b/crates/ra_syntax/src/lib.rs @@ -11,11 +11,7 @@ //! [rfc#2256]: //! [RFC.md]: -#![forbid( - missing_debug_implementations, - unconditional_recursion, - future_incompatible -)] +#![forbid(missing_debug_implementations, unconditional_recursion, future_incompatible)] #![deny(bad_style, missing_docs)] #![allow(missing_docs)] //#![warn(unreachable_pub)] // rust-lang/rust#47816 @@ -70,8 +66,7 @@ impl SourceFile { } pub fn reparse(&self, edit: &AtomTextEdit) -> TreeArc { - self.incremental_reparse(edit) - .unwrap_or_else(|| self.full_reparse(edit)) + self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit)) } pub fn incremental_reparse(&self, edit: &AtomTextEdit) -> Option> { diff --git a/crates/ra_syntax/src/parser_api.rs b/crates/ra_syntax/src/parser_api.rs index 3148371c51..504df753e4 100644 --- a/crates/ra_syntax/src/parser_api.rs +++ b/crates/ra_syntax/src/parser_api.rs @@ -136,10 +136,7 @@ pub(crate) struct Marker { impl Marker { fn new(pos: u32) -> Marker { - Marker { - pos, - bomb: DropBomb::new("Marker must be either completed or abandoned"), - } + Marker { pos, bomb: DropBomb::new("Marker must be either completed or abandoned") } } /// Finishes the syntax tree node and assigns `kind` to it, diff --git a/crates/ra_syntax/src/parser_impl.rs b/crates/ra_syntax/src/parser_impl.rs index 01a51cd8d6..f255dc23b1 100644 --- a/crates/ra_syntax/src/parser_impl.rs +++ b/crates/ra_syntax/src/parser_impl.rs @@ -54,9 +54,7 @@ pub(crate) fn parse_with( parser(&mut parser_api); parser_api.0.into_events() }; - EventProcessor::new(sink, text, tokens, &mut events) - .process() - .finish() + EventProcessor::new(sink, text, tokens, &mut events).process().finish() } /// Implementation details of `Parser`, extracted @@ -160,17 +158,13 @@ impl<'t> ParserImpl<'t> { /// Append one Error event to the back of events. pub(super) fn error(&mut self, msg: String) { - self.push_event(Event::Error { - msg: ParseError(msg), - }) + self.push_event(Event::Error { msg: ParseError(msg) }) } /// Complete an event with appending a `Finish` event. pub(super) fn complete(&mut self, pos: u32, kind: SyntaxKind) { match self.events[pos as usize] { - Event::Start { - kind: ref mut slot, .. - } => { + Event::Start { kind: ref mut slot, .. } => { *slot = kind; } _ => unreachable!(), @@ -183,10 +177,7 @@ impl<'t> ParserImpl<'t> { let idx = pos as usize; if idx == self.events.len() - 1 { match self.events.pop() { - Some(Event::Start { - kind: TOMBSTONE, - forward_parent: None, - }) => (), + Some(Event::Start { kind: TOMBSTONE, forward_parent: None }) => (), _ => unreachable!(), } } @@ -196,10 +187,7 @@ impl<'t> ParserImpl<'t> { pub(super) fn precede(&mut self, pos: u32) -> u32 { let new_pos = self.start(); match self.events[pos as usize] { - Event::Start { - ref mut forward_parent, - .. - } => { + Event::Start { ref mut forward_parent, .. } => { *forward_parent = Some(new_pos - pos); } _ => unreachable!(), diff --git a/crates/ra_syntax/src/parser_impl/event.rs b/crates/ra_syntax/src/parser_impl/event.rs index 33e10ef85d..677876ab5d 100644 --- a/crates/ra_syntax/src/parser_impl/event.rs +++ b/crates/ra_syntax/src/parser_impl/event.rs @@ -86,10 +86,7 @@ pub(crate) enum Event { impl Event { pub(crate) fn tombstone() -> Self { - Event::Start { - kind: TOMBSTONE, - forward_parent: None, - } + Event::Start { kind: TOMBSTONE, forward_parent: None } } } @@ -109,14 +106,7 @@ impl<'a, S: Sink> EventProcessor<'a, S> { tokens: &'a [Token], events: &'a mut [Event], ) -> EventProcessor<'a, S> { - EventProcessor { - sink, - text_pos: 0.into(), - text, - token_pos: 0, - tokens, - events, - } + EventProcessor { sink, text_pos: 0.into(), text, token_pos: 0, tokens, events } } /// Generate the syntax tree with the control of events. @@ -125,14 +115,9 @@ impl<'a, S: Sink> EventProcessor<'a, S> { for i in 0..self.events.len() { match mem::replace(&mut self.events[i], Event::tombstone()) { - Event::Start { - kind: TOMBSTONE, .. - } => (), + Event::Start { kind: TOMBSTONE, .. } => (), - Event::Start { - kind, - forward_parent, - } => { + Event::Start { kind, forward_parent } => { // For events[A, B, C], B is A's forward_parent, C is B's forward_parent, // in the normal control flow, the parent-child relation: `A -> B -> C`, // while with the magic forward_parent, it writes: `C <- B <- A`. @@ -145,10 +130,7 @@ impl<'a, S: Sink> EventProcessor<'a, S> { idx += fwd as usize; // append `A`'s forward_parent `B` fp = match mem::replace(&mut self.events[idx], Event::tombstone()) { - Event::Start { - kind, - forward_parent, - } => { + Event::Start { kind, forward_parent } => { forward_parents.push(kind); forward_parent } @@ -174,10 +156,9 @@ impl<'a, S: Sink> EventProcessor<'a, S> { .sum::(); self.leaf(kind, len, n_raw_tokens); } - Event::Error { msg } => self.sink.error(SyntaxError::new( - SyntaxErrorKind::ParseError(msg), - self.text_pos, - )), + Event::Error { msg } => self + .sink + .error(SyntaxError::new(SyntaxErrorKind::ParseError(msg), self.text_pos)), } } self.sink @@ -189,10 +170,8 @@ impl<'a, S: Sink> EventProcessor<'a, S> { self.sink.start_branch(kind); return; } - let n_trivias = self.tokens[self.token_pos..] - .iter() - .take_while(|it| it.kind.is_trivia()) - .count(); + let n_trivias = + self.tokens[self.token_pos..].iter().take_while(|it| it.kind.is_trivia()).count(); let leading_trivias = &self.tokens[self.token_pos..self.token_pos + n_trivias]; let mut trivia_end = self.text_pos + leading_trivias.iter().map(|it| it.len).sum::(); diff --git a/crates/ra_syntax/src/parser_impl/input.rs b/crates/ra_syntax/src/parser_impl/input.rs index 7fde5b3ab1..616a26fdc8 100644 --- a/crates/ra_syntax/src/parser_impl/input.rs +++ b/crates/ra_syntax/src/parser_impl/input.rs @@ -36,11 +36,7 @@ impl<'t> ParserInput<'t> { len += token.len; } - ParserInput { - text, - start_offsets, - tokens, - } + ParserInput { text, start_offsets, tokens } } /// Get the syntax kind of token at given input position. diff --git a/crates/ra_syntax/src/ptr.rs b/crates/ra_syntax/src/ptr.rs index 13ee1305f7..aae590cb62 100644 --- a/crates/ra_syntax/src/ptr.rs +++ b/crates/ra_syntax/src/ptr.rs @@ -15,16 +15,12 @@ pub struct SyntaxNodePtr { impl SyntaxNodePtr { pub fn new(node: &SyntaxNode) -> SyntaxNodePtr { - SyntaxNodePtr { - range: node.range(), - kind: node.kind(), - } + SyntaxNodePtr { range: node.range(), kind: node.kind() } } pub fn to_node(self, source_file: &SourceFile) -> &SyntaxNode { generate(Some(source_file.syntax()), |&node| { - node.children() - .find(|it| self.range.is_subrange(&it.range())) + node.children().find(|it| self.range.is_subrange(&it.range())) }) .find(|it| it.range() == self.range && it.kind() == self.kind) .unwrap_or_else(|| panic!("can't resolve local ptr to SyntaxNode: {:?}", self)) @@ -55,10 +51,7 @@ impl Clone for AstPtr { impl AstPtr { pub fn new(node: &N) -> AstPtr { - AstPtr { - raw: SyntaxNodePtr::new(node.syntax()), - _ty: PhantomData, - } + AstPtr { raw: SyntaxNodePtr::new(node.syntax()), _ty: PhantomData } } pub fn to_node(self, source_file: &SourceFile) -> &N { @@ -76,11 +69,7 @@ fn test_local_syntax_ptr() { use crate::{ast, AstNode}; let file = SourceFile::parse("struct Foo { f: u32, }"); - let field = file - .syntax() - .descendants() - .find_map(ast::NamedFieldDef::cast) - .unwrap(); + let field = file.syntax().descendants().find_map(ast::NamedFieldDef::cast).unwrap(); let ptr = SyntaxNodePtr::new(field.syntax()); let field_syntax = ptr.to_node(&file); assert_eq!(field.syntax(), &*field_syntax); diff --git a/crates/ra_syntax/src/reparsing.rs b/crates/ra_syntax/src/reparsing.rs index 2f1de6b02d..c5c609ad54 100644 --- a/crates/ra_syntax/src/reparsing.rs +++ b/crates/ra_syntax/src/reparsing.rs @@ -75,10 +75,7 @@ fn is_contextual_kw(text: &str) -> bool { type ParseFn = fn(&mut Parser); fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(&SyntaxNode, ParseFn)> { let node = algo::find_covering_node(node, range); - return node - .ancestors() - .filter_map(|node| reparser(node).map(|r| (node, r))) - .next(); + return node.ancestors().filter_map(|node| reparser(node).map(|r| (node, r))).next(); fn reparser(node: &SyntaxNode) -> Option { let res = match node.kind() { @@ -169,10 +166,7 @@ mod tests { let fully_reparsed = SourceFile::parse(&after); let incrementally_reparsed = { let f = SourceFile::parse(&before); - let edit = AtomTextEdit { - delete: range, - insert: replace_with.to_string(), - }; + let edit = AtomTextEdit { delete: range, insert: replace_with.to_string() }; let (node, green, new_errors) = reparser(f.syntax(), &edit).expect("cannot incrementally reparse"); let green_root = node.replace_with(green); diff --git a/crates/ra_syntax/src/string_lexing/parser.rs b/crates/ra_syntax/src/string_lexing/parser.rs index e835382fc5..7469eb903e 100644 --- a/crates/ra_syntax/src/string_lexing/parser.rs +++ b/crates/ra_syntax/src/string_lexing/parser.rs @@ -24,9 +24,7 @@ impl<'a> Parser<'a> { } pub fn advance(&mut self) -> char { - let next = self - .peek() - .expect("cannot advance if end of input is reached"); + let next = self.peek().expect("cannot advance if end of input is reached"); self.pos += next.len_utf8(); next } @@ -133,10 +131,7 @@ impl<'a> Parser<'a> { Some(self.parse_escape(start)) } else { let end = self.get_pos(); - Some(StringComponent::new( - TextRange::from_to(start, end), - CodePoint, - )) + Some(StringComponent::new(TextRange::from_to(start, end), CodePoint)) } } diff --git a/crates/ra_syntax/src/string_lexing/string.rs b/crates/ra_syntax/src/string_lexing/string.rs index 064f085447..a4742a0d1f 100644 --- a/crates/ra_syntax/src/string_lexing/string.rs +++ b/crates/ra_syntax/src/string_lexing/string.rs @@ -120,12 +120,7 @@ mod tests { fn closed_char_component(src: &str) -> StringComponent { let (has_closing_quote, components) = parse(src); assert!(has_closing_quote, "char should have closing quote"); - assert!( - components.len() == 1, - "Literal: {}\nComponents: {:#?}", - src, - components - ); + assert!(components.len() == 1, "Literal: {}\nComponents: {:#?}", src, components); components[0].clone() } diff --git a/crates/ra_syntax/src/validation/block.rs b/crates/ra_syntax/src/validation/block.rs index 9e1949124f..4e77c15b6b 100644 --- a/crates/ra_syntax/src/validation/block.rs +++ b/crates/ra_syntax/src/validation/block.rs @@ -17,8 +17,6 @@ pub(crate) fn validate_block_node(node: &ast::Block, errors: &mut Vec {} } } - errors.extend( - node.attrs() - .map(|attr| SyntaxError::new(InvalidBlockAttr, attr.syntax().range())), - ) + errors + .extend(node.attrs().map(|attr| SyntaxError::new(InvalidBlockAttr, attr.syntax().range()))) } diff --git a/crates/ra_syntax/src/validation/byte.rs b/crates/ra_syntax/src/validation/byte.rs index 9bddabc801..d51fabcf96 100644 --- a/crates/ra_syntax/src/validation/byte.rs +++ b/crates/ra_syntax/src/validation/byte.rs @@ -28,10 +28,7 @@ pub(super) fn validate_byte_node(node: &ast::Byte, errors: &mut Vec } if let Some(range) = components.suffix { - errors.push(SyntaxError::new( - InvalidSuffix, - range + literal_range.start(), - )); + errors.push(SyntaxError::new(InvalidSuffix, range + literal_range.start())); } if len == 0 { @@ -55,10 +52,7 @@ pub(super) fn validate_byte_component( AsciiCodeEscape => validate_byte_code_escape(text, range, errors), UnicodeEscape => errors.push(SyntaxError::new(UnicodeEscapeForbidden, range)), CodePoint => { - let c = text - .chars() - .next() - .expect("Code points should be one character long"); + let c = text.chars().next().expect("Code points should be one character long"); // These bytes must always be escaped if c == '\t' || c == '\r' || c == '\n' { @@ -93,10 +87,7 @@ fn validate_byte_code_escape(text: &str, range: TextRange, errors: &mut Vec } if let Some(range) = components.suffix { - errors.push(SyntaxError::new( - InvalidSuffix, - range + literal_range.start(), - )); + errors.push(SyntaxError::new(InvalidSuffix, range + literal_range.start())); } if len == 0 { @@ -184,12 +181,7 @@ mod test { fn assert_valid_char(literal: &str) { let file = build_file(literal); - assert!( - file.errors().len() == 0, - "Errors for literal '{}': {:?}", - literal, - file.errors() - ); + assert!(file.errors().len() == 0, "Errors for literal '{}': {:?}", literal, file.errors()); } fn assert_invalid_char(literal: &str) { @@ -258,13 +250,7 @@ mod test { #[test] fn test_valid_unicode_escape() { - let valid = [ - r"\u{FF}", - r"\u{0}", - r"\u{F}", - r"\u{10FFFF}", - r"\u{1_0__FF___FF_____}", - ]; + let valid = [r"\u{FF}", r"\u{0}", r"\u{F}", r"\u{10FFFF}", r"\u{1_0__FF___FF_____}"]; for c in &valid { assert_valid_char(c); } diff --git a/crates/ra_syntax/src/validation/string.rs b/crates/ra_syntax/src/validation/string.rs index 365fe8d2dd..4fd7fffdf4 100644 --- a/crates/ra_syntax/src/validation/string.rs +++ b/crates/ra_syntax/src/validation/string.rs @@ -29,10 +29,7 @@ pub(crate) fn validate_string_node(node: &ast::String, errors: &mut Vec GreenBuilder { - GreenBuilder { - errors: Vec::new(), - inner: GreenNodeBuilder::new(), - } + GreenBuilder { errors: Vec::new(), inner: GreenNodeBuilder::new() } } } diff --git a/crates/ra_syntax/src/yellow/syntax_error.rs b/crates/ra_syntax/src/yellow/syntax_error.rs index c52c44cc38..412cf82cc7 100644 --- a/crates/ra_syntax/src/yellow/syntax_error.rs +++ b/crates/ra_syntax/src/yellow/syntax_error.rs @@ -28,10 +28,7 @@ impl Into for TextRange { impl SyntaxError { pub fn new>(kind: SyntaxErrorKind, loc: L) -> SyntaxError { - SyntaxError { - kind, - location: loc.into(), - } + SyntaxError { kind, location: loc.into() } } pub fn kind(&self) -> SyntaxErrorKind { @@ -119,10 +116,9 @@ impl fmt::Display for SyntaxErrorKind { InvalidByteEscape => write!(f, "Invalid escape sequence"), TooShortByteCodeEscape => write!(f, "Escape sequence should have two digits"), MalformedByteCodeEscape => write!(f, "Escape sequence should be a hexadecimal number"), - UnicodeEscapeForbidden => write!( - f, - "Unicode escapes are not allowed in byte literals or byte strings" - ), + UnicodeEscapeForbidden => { + write!(f, "Unicode escapes are not allowed in byte literals or byte strings") + } TooShortAsciiCodeEscape => write!(f, "Escape sequence should have two digits"), AsciiCodeEscapeOutOfRange => { write!(f, "Escape sequence should be between \\x00 and \\x7F") diff --git a/crates/ra_syntax/src/yellow/syntax_text.rs b/crates/ra_syntax/src/yellow/syntax_text.rs index 378cd1b2e2..84e5b231ac 100644 --- a/crates/ra_syntax/src/yellow/syntax_text.rs +++ b/crates/ra_syntax/src/yellow/syntax_text.rs @@ -10,10 +10,7 @@ pub struct SyntaxText<'a> { impl<'a> SyntaxText<'a> { pub(crate) fn new(node: &'a SyntaxNode) -> SyntaxText<'a> { - SyntaxText { - node, - range: node.range(), - } + SyntaxText { node, range: node.range() } } pub fn chunks(&self) -> impl Iterator { @@ -58,10 +55,7 @@ impl<'a> SyntaxText<'a> { let range = range.restrict(self.range).unwrap_or_else(|| { panic!("invalid slice, range: {:?}, slice: {:?}", self.range, range) }); - SyntaxText { - node: self.node, - range, - } + SyntaxText { node: self.node, range } } pub fn char_at(&self, offset: impl Into) -> Option { diff --git a/crates/ra_syntax/tests/test.rs b/crates/ra_syntax/tests/test.rs index 3243b27aec..168d0623dc 100644 --- a/crates/ra_syntax/tests/test.rs +++ b/crates/ra_syntax/tests/test.rs @@ -23,36 +23,28 @@ fn lexer_tests() { #[test] fn parser_tests() { - dir_tests( - &test_data_dir(), - &["parser/inline/ok", "parser/ok"], - |text, path| { - let file = SourceFile::parse(text); - let errors = file.errors(); - assert_eq!( - &*errors, - &[] as &[ra_syntax::SyntaxError], - "There should be no errors in the file {:?}", - path.display() - ); - dump_tree(file.syntax()) - }, - ); - dir_tests( - &test_data_dir(), - &["parser/err", "parser/inline/err"], - |text, path| { - let file = SourceFile::parse(text); - let errors = file.errors(); - assert_ne!( - &*errors, - &[] as &[ra_syntax::SyntaxError], - "There should be errors in the file {:?}", - path.display() - ); - dump_tree(file.syntax()) - }, - ); + dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| { + let file = SourceFile::parse(text); + let errors = file.errors(); + assert_eq!( + &*errors, + &[] as &[ra_syntax::SyntaxError], + "There should be no errors in the file {:?}", + path.display() + ); + dump_tree(file.syntax()) + }); + dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| { + let file = SourceFile::parse(text); + let errors = file.errors(); + assert_ne!( + &*errors, + &[] as &[ra_syntax::SyntaxError], + "There should be errors in the file {:?}", + path.display() + ); + dump_tree(file.syntax()) + }); } #[test] @@ -87,12 +79,7 @@ fn self_hosting_parsing() { let text = read_text(entry.path()); let node = SourceFile::parse(&text); let errors = node.errors(); - assert_eq!( - &*errors, - &[], - "There should be no errors in the file {:?}", - entry - ); + assert_eq!(&*errors, &[], "There should be no errors in the file {:?}", entry); } assert!( count > 30, diff --git a/crates/ra_text_edit/src/lib.rs b/crates/ra_text_edit/src/lib.rs index fb693b3ae0..df673ba185 100644 --- a/crates/ra_text_edit/src/lib.rs +++ b/crates/ra_text_edit/src/lib.rs @@ -15,10 +15,7 @@ pub struct AtomTextEdit { impl AtomTextEdit { pub fn replace(range: TextRange, replace_with: String) -> AtomTextEdit { - AtomTextEdit { - delete: range, - insert: replace_with, - } + AtomTextEdit { delete: range, insert: replace_with } } pub fn delete(range: TextRange) -> AtomTextEdit { diff --git a/crates/ra_text_edit/src/test_utils.rs b/crates/ra_text_edit/src/test_utils.rs index 745f21c931..9e21b24f67 100644 --- a/crates/ra_text_edit/src/test_utils.rs +++ b/crates/ra_text_edit/src/test_utils.rs @@ -8,9 +8,7 @@ pub fn arb_text() -> proptest::string::RegexGeneratorStrategy { } fn text_offsets(text: &str) -> Vec { - text.char_indices() - .map(|(i, _)| TextUnit::from_usize(i)) - .collect() + text.char_indices().map(|(i, _)| TextUnit::from_usize(i)).collect() } pub fn arb_offset(text: &str) -> BoxedStrategy { @@ -56,9 +54,7 @@ pub fn arb_text_edit(text: &str) -> BoxedStrategy { ) .boxed() } - &[x] => arb_text() - .prop_map(move |text| AtomTextEdit::insert(x, text)) - .boxed(), + &[x] => arb_text().prop_map(move |text| AtomTextEdit::insert(x, text)).boxed(), _ => unreachable!(), }) .collect(); diff --git a/crates/ra_text_edit/src/text_edit.rs b/crates/ra_text_edit/src/text_edit.rs index 363b3d8c05..8522f99bdd 100644 --- a/crates/ra_text_edit/src/text_edit.rs +++ b/crates/ra_text_edit/src/text_edit.rs @@ -25,9 +25,7 @@ impl TextEditBuilder { TextEdit::from_atoms(self.atoms) } pub fn invalidates_offset(&self, offset: TextUnit) -> bool { - self.atoms - .iter() - .any(|atom| atom.delete.contains_inclusive(offset)) + self.atoms.iter().any(|atom| atom.delete.contains_inclusive(offset)) } } diff --git a/crates/ra_vfs/src/io.rs b/crates/ra_vfs/src/io.rs index d764c534aa..ff5ae3a198 100644 --- a/crates/ra_vfs/src/io.rs +++ b/crates/ra_vfs/src/io.rs @@ -14,32 +14,15 @@ use notify::{DebouncedEvent, RecommendedWatcher, RecursiveMode, Watcher as _Watc use crate::{RootConfig, Roots, VfsRoot}; pub(crate) enum Task { - AddRoot { - root: VfsRoot, - config: Arc, - }, + AddRoot { root: VfsRoot, config: Arc }, } #[derive(Debug)] pub enum TaskResult { - BulkLoadRoot { - root: VfsRoot, - files: Vec<(RelativePathBuf, String)>, - }, - AddSingleFile { - root: VfsRoot, - path: RelativePathBuf, - text: String, - }, - ChangeSingleFile { - root: VfsRoot, - path: RelativePathBuf, - text: String, - }, - RemoveSingleFile { - root: VfsRoot, - path: RelativePathBuf, - }, + BulkLoadRoot { root: VfsRoot, files: Vec<(RelativePathBuf, String)> }, + AddSingleFile { root: VfsRoot, path: RelativePathBuf, text: String }, + ChangeSingleFile { root: VfsRoot, path: RelativePathBuf, text: String }, + RemoveSingleFile { root: VfsRoot, path: RelativePathBuf }, } #[derive(Debug)] @@ -127,10 +110,7 @@ impl Worker { }, ); - Worker { - worker, - worker_handle, - } + Worker { worker, worker_handle } } pub(crate) fn sender(&self) -> &Sender { @@ -162,9 +142,7 @@ fn watch_root( Some((path, text)) }) .collect(); - sender - .send(TaskResult::BulkLoadRoot { root, files }) - .unwrap(); + sender.send(TaskResult::BulkLoadRoot { root, files }).unwrap(); log::debug!("... loaded {}", config.root.as_path().display()); } @@ -233,21 +211,12 @@ fn handle_change( } ChangeKind::Write => { if let Some(text) = read_to_string(&path) { - sender - .send(TaskResult::ChangeSingleFile { - root, - path: rel_path, - text, - }) - .unwrap(); + sender.send(TaskResult::ChangeSingleFile { root, path: rel_path, text }).unwrap(); } } - ChangeKind::Remove => sender - .send(TaskResult::RemoveSingleFile { - root, - path: rel_path, - }) - .unwrap(), + ChangeKind::Remove => { + sender.send(TaskResult::RemoveSingleFile { root, path: rel_path }).unwrap() + } } } @@ -282,7 +251,5 @@ fn watch_one(watcher: &mut RecommendedWatcher, dir: &Path) { } fn read_to_string(path: &Path) -> Option { - fs::read_to_string(&path) - .map_err(|e| log::warn!("failed to read file {}", e)) - .ok() + fs::read_to_string(&path).map_err(|e| log::warn!("failed to read file {}", e)).ok() } diff --git a/crates/ra_vfs/src/lib.rs b/crates/ra_vfs/src/lib.rs index 71a3f807d5..6b4eb68429 100644 --- a/crates/ra_vfs/src/lib.rs +++ b/crates/ra_vfs/src/lib.rs @@ -58,10 +58,7 @@ impl std::ops::Deref for Roots { impl RootConfig { fn new(root: PathBuf, excluded_dirs: Vec) -> RootConfig { - RootConfig { - root, - excluded_dirs, - } + RootConfig { root, excluded_dirs } } /// Cheks if root contains a path and returns a root-relative path. pub(crate) fn contains(&self, path: &Path) -> Option { @@ -111,9 +108,7 @@ impl Roots { Roots { roots } } pub(crate) fn find(&self, path: &Path) -> Option<(VfsRoot, RelativePathBuf)> { - self.roots - .iter() - .find_map(|(root, data)| data.contains(path).map(|it| (root, it))) + self.roots.iter().find_map(|(root, data)| data.contains(path).map(|it| (root, it))) } } @@ -154,21 +149,10 @@ impl Vfs { for (root, config) in roots.iter() { root2files.insert(root, Default::default()); - worker - .sender() - .send(io::Task::AddRoot { - root, - config: Arc::clone(config), - }) - .unwrap(); + worker.sender().send(io::Task::AddRoot { root, config: Arc::clone(config) }).unwrap(); } - let res = Vfs { - roots, - files: Arena::default(), - root2files, - worker, - pending_changes: Vec::new(), - }; + let res = + Vfs { roots, files: Arena::default(), root2files, worker, pending_changes: Vec::new() }; let vfs_roots = res.roots.iter().map(|(id, _)| id).collect(); (res, vfs_roots) } @@ -205,12 +189,7 @@ impl Vfs { let text = fs::read_to_string(path).unwrap_or_default(); let text = Arc::new(text); let file = self.add_file(root, rel_path.clone(), Arc::clone(&text), false); - let change = VfsChange::AddFile { - file, - text, - root, - path: rel_path, - }; + let change = VfsChange::AddFile { file, text, root, path: rel_path }; self.pending_changes.push(change); Some(file) }; @@ -243,10 +222,7 @@ impl Vfs { cur_files.push((file, path, text)); } - let change = VfsChange::AddRoot { - root, - files: cur_files, - }; + let change = VfsChange::AddRoot { root, files: cur_files }; self.pending_changes.push(change); } TaskResult::AddSingleFile { root, path, text } => { @@ -278,12 +254,7 @@ impl Vfs { ) -> Option { let text = Arc::new(text); let file = self.add_file(root, path.clone(), text.clone(), is_overlay); - self.pending_changes.push(VfsChange::AddFile { - file, - root, - path, - text, - }); + self.pending_changes.push(VfsChange::AddFile { file, root, path, text }); Some(file) } @@ -293,8 +264,7 @@ impl Vfs { } let text = Arc::new(text); self.change_file(file, text.clone(), is_overlay); - self.pending_changes - .push(VfsChange::ChangeFile { file, text }); + self.pending_changes.push(VfsChange::ChangeFile { file, text }); } fn do_remove_file( @@ -308,8 +278,7 @@ impl Vfs { return; } self.remove_file(file); - self.pending_changes - .push(VfsChange::RemoveFile { root, path, file }); + self.pending_changes.push(VfsChange::RemoveFile { root, path, file }); } pub fn add_file_overlay(&mut self, path: &Path, text: String) -> Option { @@ -363,12 +332,7 @@ impl Vfs { text: Arc, is_overlayed: bool, ) -> VfsFile { - let data = VfsFileData { - root, - path, - text, - is_overlayed, - }; + let data = VfsFileData { root, path, text, is_overlayed }; let file = self.files.alloc(data); self.root2files.get_mut(root).unwrap().insert(file); file @@ -396,32 +360,14 @@ impl Vfs { } fn find_file(&self, root: VfsRoot, path: &RelativePath) -> Option { - self.root2files[root] - .iter() - .map(|&it| it) - .find(|&file| self.files[file].path == path) + self.root2files[root].iter().map(|&it| it).find(|&file| self.files[file].path == path) } } #[derive(Debug, Clone)] pub enum VfsChange { - AddRoot { - root: VfsRoot, - files: Vec<(VfsFile, RelativePathBuf, Arc)>, - }, - AddFile { - root: VfsRoot, - file: VfsFile, - path: RelativePathBuf, - text: Arc, - }, - RemoveFile { - root: VfsRoot, - file: VfsFile, - path: RelativePathBuf, - }, - ChangeFile { - file: VfsFile, - text: Arc, - }, + AddRoot { root: VfsRoot, files: Vec<(VfsFile, RelativePathBuf, Arc)> }, + AddFile { root: VfsRoot, file: VfsFile, path: RelativePathBuf, text: Arc }, + RemoveFile { root: VfsRoot, file: VfsFile, path: RelativePathBuf }, + ChangeFile { file: VfsFile, text: Arc }, } diff --git a/crates/ra_vfs/tests/vfs.rs b/crates/ra_vfs/tests/vfs.rs index 545e1dbdd2..649ef96c9a 100644 --- a/crates/ra_vfs/tests/vfs.rs +++ b/crates/ra_vfs/tests/vfs.rs @@ -7,10 +7,7 @@ use tempfile::tempdir; fn process_tasks(vfs: &mut Vfs, num_tasks: u32) { for _ in 0..num_tasks { - let task = vfs - .task_receiver() - .recv_timeout(Duration::from_secs(3)) - .unwrap(); + let task = vfs.task_receiver().recv_timeout(Duration::from_secs(3)).unwrap(); log::debug!("{:?}", task); vfs.handle_task(task); } @@ -32,11 +29,7 @@ macro_rules! assert_match { fn test_vfs_works() -> std::io::Result<()> { // Logger::with_str("vfs=debug,ra_vfs=debug").start().unwrap(); - let files = [ - ("a/foo.rs", "hello"), - ("a/bar.rs", "world"), - ("a/b/baz.rs", "nested hello"), - ]; + let files = [("a/foo.rs", "hello"), ("a/bar.rs", "world"), ("a/b/baz.rs", "nested hello")]; let dir = tempdir().unwrap(); for (path, text) in files.iter() { @@ -66,14 +59,10 @@ fn test_vfs_works() -> std::io::Result<()> { }) .collect::>(); - let expected_files = [ - ("foo.rs", "hello"), - ("bar.rs", "world"), - ("baz.rs", "nested hello"), - ] - .iter() - .map(|(path, text)| (path.to_string(), text.to_string())) - .collect::>(); + let expected_files = [("foo.rs", "hello"), ("bar.rs", "world"), ("baz.rs", "nested hello")] + .iter() + .map(|(path, text)| (path.to_string(), text.to_string())) + .collect::>(); assert_eq!(files, expected_files); } @@ -107,14 +96,10 @@ fn test_vfs_works() -> std::io::Result<()> { ); vfs.add_file_overlay(&dir.path().join("a/b/spam.rs"), "spam".to_string()); - assert_match!( - vfs.commit_changes().as_slice(), - [VfsChange::AddFile { text, path, .. }], - { - assert_eq!(text.as_str(), "spam"); - assert_eq!(path, "spam.rs"); - } - ); + assert_match!(vfs.commit_changes().as_slice(), [VfsChange::AddFile { text, path, .. }], { + assert_eq!(text.as_str(), "spam"); + assert_eq!(path, "spam.rs"); + }); vfs.remove_file_overlay(&dir.path().join("a/b/spam.rs")); assert_match!( @@ -126,30 +111,17 @@ fn test_vfs_works() -> std::io::Result<()> { fs::create_dir_all(dir.path().join("a/sub1/sub2")).unwrap(); fs::write(dir.path().join("a/sub1/sub2/new.rs"), "new hello").unwrap(); process_tasks(&mut vfs, 1); - assert_match!( - vfs.commit_changes().as_slice(), - [VfsChange::AddFile { text, path, .. }], - { - assert_eq!(text.as_str(), "new hello"); - assert_eq!(path, "sub1/sub2/new.rs"); - } - ); + assert_match!(vfs.commit_changes().as_slice(), [VfsChange::AddFile { text, path, .. }], { + assert_eq!(text.as_str(), "new hello"); + assert_eq!(path, "sub1/sub2/new.rs"); + }); - fs::rename( - &dir.path().join("a/sub1/sub2/new.rs"), - &dir.path().join("a/sub1/sub2/new1.rs"), - ) - .unwrap(); + fs::rename(&dir.path().join("a/sub1/sub2/new.rs"), &dir.path().join("a/sub1/sub2/new1.rs")) + .unwrap(); process_tasks(&mut vfs, 2); assert_match!( vfs.commit_changes().as_slice(), - [VfsChange::RemoveFile { - path: removed_path, .. - }, VfsChange::AddFile { - text, - path: added_path, - .. - }], + [VfsChange::RemoveFile { path: removed_path, .. }, VfsChange::AddFile { text, path: added_path, .. }], { assert_eq!(removed_path, "sub1/sub2/new.rs"); assert_eq!(added_path, "sub1/sub2/new1.rs"); diff --git a/crates/test_utils/src/lib.rs b/crates/test_utils/src/lib.rs index 35a679aea5..09fc2e6597 100644 --- a/crates/test_utils/src/lib.rs +++ b/crates/test_utils/src/lib.rs @@ -85,9 +85,7 @@ pub fn extract_ranges(mut text: &str, tag: &str) -> (Vec, String) { stack.push(from); } else if text.starts_with(&close) { text = &text[close.len()..]; - let from = stack - .pop() - .unwrap_or_else(|| panic!("unmatched ", tag)); + let from = stack.pop().unwrap_or_else(|| panic!("unmatched ", tag)); let to = TextUnit::of_str(&res); ranges.push(TextRange::from_to(from, to)); } @@ -131,10 +129,7 @@ pub fn parse_fixture(fixture: &str) -> Vec { macro_rules! flush { () => { if let Some(meta) = meta { - res.push(FixtureEntry { - meta: meta.to_string(), - text: buf.clone(), - }); + res.push(FixtureEntry { meta: meta.to_string(), text: buf.clone() }); buf.clear(); } }; @@ -226,15 +221,13 @@ pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a let mut l = l.iter().collect::>(); let mut r = r.iter().collect::>(); - l.retain( - |l| match r.iter().position(|r| find_mismatch(l, r).is_none()) { - Some(i) => { - r.remove(i); - false - } - None => true, - }, - ); + l.retain(|l| match r.iter().position(|r| find_mismatch(l, r).is_none()) { + Some(i) => { + r.remove(i); + false + } + None => true, + }); if !l.is_empty() { assert!(!r.is_empty()); @@ -250,10 +243,7 @@ pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a return Some((expected, actual)); } - l.values() - .zip(r.values()) - .filter_map(|(l, r)| find_mismatch(l, r)) - .nth(0) + l.values().zip(r.values()).filter_map(|(l, r)| find_mismatch(l, r)).nth(0) } (&Null, &Null) => None, // magic string literal "{...}" acts as wildcard for any sub-JSON @@ -312,12 +302,7 @@ fn test_from_dir(dir: &Path) -> Vec { pub fn project_dir() -> PathBuf { let dir = env!("CARGO_MANIFEST_DIR"); - PathBuf::from(dir) - .parent() - .unwrap() - .parent() - .unwrap() - .to_owned() + PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned() } /// Read file and normalize newlines. diff --git a/crates/test_utils/src/marks.rs b/crates/test_utils/src/marks.rs index ee47b52198..d2a84643c8 100644 --- a/crates/test_utils/src/marks.rs +++ b/crates/test_utils/src/marks.rs @@ -64,10 +64,7 @@ pub struct MarkChecker { impl MarkChecker { pub fn new(mark: &'static AtomicUsize) -> MarkChecker { let value_on_entry = mark.load(Ordering::SeqCst); - MarkChecker { - mark, - value_on_entry, - } + MarkChecker { mark, value_on_entry } } } diff --git a/crates/thread_worker/src/lib.rs b/crates/thread_worker/src/lib.rs index 5e46f62fe9..ca0aad1360 100644 --- a/crates/thread_worker/src/lib.rs +++ b/crates/thread_worker/src/lib.rs @@ -71,12 +71,5 @@ impl WorkerHandle { fn worker_chan(buf: usize) -> (Worker, Receiver, Sender) { let (input_sender, input_receiver) = bounded::(buf); let (output_sender, output_receiver) = unbounded::(); - ( - Worker { - inp: input_sender, - out: output_receiver, - }, - input_receiver, - output_sender, - ) + (Worker { inp: input_sender, out: output_receiver }, input_receiver, output_sender) } diff --git a/crates/tools/src/bin/pre-commit.rs b/crates/tools/src/bin/pre-commit.rs index e00bd0d3da..ea18c0863f 100644 --- a/crates/tools/src/bin/pre-commit.rs +++ b/crates/tools/src/bin/pre-commit.rs @@ -19,19 +19,10 @@ fn update_staged() -> Result<()> { .current_dir(&root) .output()?; if !output.status.success() { - bail!( - "`git diff --diff-filter=MAR --name-only --cached` exited with {}", - output.status - ); + bail!("`git diff --diff-filter=MAR --name-only --cached` exited with {}", output.status); } for line in String::from_utf8(output.stdout)?.lines() { - run( - &format!( - "git update-index --add {}", - root.join(line).to_string_lossy() - ), - ".", - )?; + run(&format!("git update-index --add {}", root.join(line).to_string_lossy()), ".")?; } Ok(()) } diff --git a/crates/tools/src/lib.rs b/crates/tools/src/lib.rs index 311bcb4d8f..ef9c613a7d 100644 --- a/crates/tools/src/lib.rs +++ b/crates/tools/src/lib.rs @@ -58,10 +58,8 @@ pub fn collect_tests(s: &str) -> Vec<(usize, Test)> { None => continue 'outer, } }; - let text: String = itertools::join( - block.map(|(_, line)| line).chain(::std::iter::once("")), - "\n", - ); + let text: String = + itertools::join(block.map(|(_, line)| line).chain(::std::iter::once("")), "\n"); assert!(!text.trim().is_empty() && text.ends_with('\n')); res.push((start_line, Test { name, text, ok })) } @@ -78,11 +76,7 @@ pub fn generate(mode: Mode) -> Result<()> { } pub fn project_root() -> PathBuf { - Path::new(&env!("CARGO_MANIFEST_DIR")) - .ancestors() - .nth(2) - .unwrap() - .to_path_buf() + Path::new(&env!("CARGO_MANIFEST_DIR")).ancestors().nth(2).unwrap().to_path_buf() } pub fn run(cmdline: &str, dir: &str) -> Result<()> { @@ -90,10 +84,7 @@ pub fn run(cmdline: &str, dir: &str) -> Result<()> { let project_dir = project_root().join(dir); let mut args = cmdline.split_whitespace(); let exec = args.next().unwrap(); - let status = Command::new(exec) - .args(args) - .current_dir(project_dir) - .status()?; + let status = Command::new(exec).args(args).current_dir(project_dir).status()?; if !status.success() { bail!("`{}` exited with {}", cmdline, status); } @@ -112,10 +103,7 @@ pub fn run_rustfmt(mode: Mode) -> Result<()> { }; if mode == Verify { - run( - &format!("rustup run {} -- cargo fmt -- --check", TOOLCHAIN), - ".", - )?; + run(&format!("rustup run {} -- cargo fmt -- --check", TOOLCHAIN), ".")?; } else { run(&format!("rustup run {} -- cargo fmt", TOOLCHAIN), ".")?; } @@ -124,10 +112,7 @@ pub fn run_rustfmt(mode: Mode) -> Result<()> { pub fn install_rustfmt() -> Result<()> { run(&format!("rustup install {}", TOOLCHAIN), ".")?; - run( - &format!("rustup component add rustfmt --toolchain {}", TOOLCHAIN), - ".", - ) + run(&format!("rustup component add rustfmt --toolchain {}", TOOLCHAIN), ".") } pub fn install_format_hook() -> Result<()> { @@ -156,10 +141,7 @@ pub fn run_fuzzer() -> Result<()> { _ => run("cargo install cargo-fuzz", ".")?, }; - run( - "rustup run nightly -- cargo fuzz run parser", - "./crates/ra_syntax", - ) + run("rustup run nightly -- cargo fuzz run parser", "./crates/ra_syntax") } pub fn gen_tests(mode: Mode) -> Result<()> { @@ -245,11 +227,7 @@ fn existing_tests(dir: &Path, ok: bool) -> Result Result<()> { .subcommand(SubCommand::with_name("format-hook")) .subcommand(SubCommand::with_name("fuzz-tests")) .get_matches(); - match matches - .subcommand_name() - .expect("Subcommand must be specified") - { + match matches.subcommand_name().expect("Subcommand must be specified") { "install-code" => install_code_extension()?, "gen-tests" => gen_tests(Overwrite)?, "gen-syntax" => generate(Overwrite)?, @@ -45,10 +42,7 @@ fn install_code_extension() -> Result<()> { "./editors/code", )?; } else { - run( - r"code --install-extension ./ra-lsp-0.0.1.vsix --force", - "./editors/code", - )?; + run(r"code --install-extension ./ra-lsp-0.0.1.vsix --force", "./editors/code")?; } Ok(()) } diff --git a/crates/tools/tests/cli.rs b/crates/tools/tests/cli.rs index 2ee4b5223a..aab52a4aaa 100644 --- a/crates/tools/tests/cli.rs +++ b/crates/tools/tests/cli.rs @@ -10,19 +10,13 @@ fn generated_grammar_is_fresh() { #[test] fn generated_tests_are_fresh() { if let Err(error) = gen_tests(Verify) { - panic!( - "{}. Please update tests by running `cargo gen-tests`", - error - ); + panic!("{}. Please update tests by running `cargo gen-tests`", error); } } #[test] fn check_code_formatting() { if let Err(error) = run_rustfmt(Verify) { - panic!( - "{}. Please format the code by running `cargo format`", - error - ); + panic!("{}. Please format the code by running `cargo format`", error); } }