diff --git a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs index 2725a97de8..28f645171c 100644 --- a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs +++ b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs @@ -1,4 +1,4 @@ -use crate::{AssistContext, Assists}; +use crate::{utils, AssistContext, Assists}; use hir::DescendPreference; use ide_db::{ assists::{AssistId, AssistKind}, @@ -8,8 +8,12 @@ use ide_db::{ }, }; use itertools::Itertools; -use stdx::format_to; -use syntax::{ast, AstNode, AstToken, NodeOrToken, SyntaxKind::COMMA, TextRange}; +use syntax::{ + ast::{self, make}, + ted, AstNode, AstToken, NodeOrToken, + SyntaxKind::WHITESPACE, + T, +}; // Assist: extract_expressions_from_format_string // @@ -34,6 +38,7 @@ pub(crate) fn extract_expressions_from_format_string( ) -> Option<()> { let fmt_string = ctx.find_token_at_offset::()?; let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?; + let tt_delimiter = tt.left_delimiter_token()?.kind(); let expanded_t = ast::String::cast( ctx.sema @@ -61,72 +66,63 @@ pub(crate) fn extract_expressions_from_format_string( "Extract format expressions", tt.syntax().text_range(), |edit| { - let fmt_range = fmt_string.syntax().text_range(); - - // Replace old format string with new format string whose arguments have been extracted - edit.replace(fmt_range, new_fmt); - - // Insert cursor at end of format string - edit.insert(fmt_range.end(), "$0"); + let tt = edit.make_mut(tt); // Extract existing arguments in macro - let tokens = - tt.token_trees_and_tokens().collect_vec(); + let tokens = tt.token_trees_and_tokens().collect_vec(); - let mut existing_args: Vec = vec![]; - - let mut current_arg = String::new(); - if let [_opening_bracket, NodeOrToken::Token(format_string), _args_start_comma, tokens @ .., NodeOrToken::Token(end_bracket)] = + let existing_args = if let [_opening_bracket, NodeOrToken::Token(_format_string), _args_start_comma, tokens @ .., NodeOrToken::Token(_end_bracket)] = tokens.as_slice() { - for t in tokens { - match t { - NodeOrToken::Node(n) => { - format_to!(current_arg, "{n}"); - }, - NodeOrToken::Token(t) if t.kind() == COMMA => { - existing_args.push(current_arg.trim().into()); - current_arg.clear(); - }, - NodeOrToken::Token(t) => { - current_arg.push_str(t.text()); - }, - } - } - existing_args.push(current_arg.trim().into()); + let args = tokens.split(|it| matches!(it, NodeOrToken::Token(t) if t.kind() == T![,])).map(|arg| { + // Strip off leading and trailing whitespace tokens + let arg = match arg.split_first() { + Some((NodeOrToken::Token(t), rest)) if t.kind() == WHITESPACE => rest, + _ => arg, + }; + let arg = match arg.split_last() { + Some((NodeOrToken::Token(t), rest)) if t.kind() == WHITESPACE => rest, + _ => arg, + }; + arg + }); - // delete everything after the format string till end bracket - // we're going to insert the new arguments later - edit.delete(TextRange::new( - format_string.text_range().end(), - end_bracket.text_range().start(), - )); - } + args.collect() + } else { + vec![] + }; // Start building the new args let mut existing_args = existing_args.into_iter(); - let mut args = String::new(); + let mut new_tt_bits = vec![NodeOrToken::Token(make::tokens::literal(&new_fmt))]; + let mut placeholder_indexes = vec![]; - let mut placeholder_idx = 1; + for arg in extracted_args { + if matches!(arg, Arg::Expr(_) | Arg::Placeholder) { + // insert ", " before each arg + new_tt_bits.extend_from_slice(&[ + NodeOrToken::Token(make::token(T![,])), + NodeOrToken::Token(make::tokens::single_space()), + ]); + } - for extracted_args in extracted_args { - match extracted_args { - Arg::Expr(s)=> { - args.push_str(", "); + match arg { + Arg::Expr(s) => { // insert arg - args.push_str(&s); + // FIXME: use the crate's edition for parsing + let expr = ast::Expr::parse(&s, syntax::Edition::CURRENT).syntax_node(); + let mut expr_tt = utils::tt_from_syntax(expr); + new_tt_bits.append(&mut expr_tt); } Arg::Placeholder => { - args.push_str(", "); // try matching with existing argument match existing_args.next() { - Some(ea) => { - args.push_str(&ea); + Some(arg) => { + new_tt_bits.extend_from_slice(arg); } None => { - // insert placeholder - args.push_str(&format!("${placeholder_idx}")); - placeholder_idx += 1; + placeholder_indexes.push(new_tt_bits.len()); + new_tt_bits.push(NodeOrToken::Token(make::token(T![_]))); } } } @@ -134,8 +130,31 @@ pub(crate) fn extract_expressions_from_format_string( } } + // Insert new args - edit.insert(fmt_range.end(), args); + let new_tt = make::token_tree(tt_delimiter, new_tt_bits).clone_for_update(); + ted::replace(tt.syntax(), new_tt.syntax()); + + if let Some(cap) = ctx.config.snippet_cap { + // Add placeholder snippets over placeholder args + for pos in placeholder_indexes { + // Skip the opening delimiter + let Some(NodeOrToken::Token(placeholder)) = + new_tt.token_trees_and_tokens().skip(1).nth(pos) + else { + continue; + }; + + if stdx::always!(placeholder.kind() == T![_]) { + edit.add_placeholder_snippet_token(cap, placeholder); + } + } + + // Add the final tabstop after the format literal + if let Some(NodeOrToken::Token(literal)) = new_tt.token_trees_and_tokens().nth(1) { + edit.add_tabstop_after_token(cap, literal); + } + } }, ); @@ -145,7 +164,7 @@ pub(crate) fn extract_expressions_from_format_string( #[cfg(test)] mod tests { use super::*; - use crate::tests::check_assist; + use crate::tests::{check_assist, check_assist_no_snippet_cap}; #[test] fn multiple_middle_arg() { @@ -195,7 +214,7 @@ fn main() { "#, r#" fn main() { - print!("{} {:b} {} {}"$0, y + 2, x + 1, 2, $1); + print!("{} {:b} {} {}"$0, y + 2, x + 1, 2, ${1:_}); } "#, ); @@ -292,4 +311,22 @@ fn main() { "#, ); } + + #[test] + fn without_snippets() { + check_assist_no_snippet_cap( + extract_expressions_from_format_string, + r#" +//- minicore: fmt +fn main() { + print!("{} {x + 1:b} {} {}$0", y + 2, 2); +} +"#, + r#" +fn main() { + print!("{} {:b} {} {}", y + 2, x + 1, 2, _); +} +"#, + ); + } } diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs index bc0c9b79c7..ba6ef1921a 100644 --- a/crates/ide-assists/src/utils.rs +++ b/crates/ide-assists/src/utils.rs @@ -14,9 +14,9 @@ use syntax::{ edit_in_place::{AttrsOwnerEdit, Indent, Removable}, make, HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace, }, - ted, AstNode, AstToken, Direction, SourceFile, + ted, AstNode, AstToken, Direction, NodeOrToken, SourceFile, SyntaxKind::*, - SyntaxNode, TextRange, TextSize, T, + SyntaxNode, SyntaxToken, TextRange, TextSize, T, }; use crate::assist_context::{AssistContext, SourceChangeBuilder}; @@ -916,3 +916,46 @@ pub(crate) fn replace_record_field_expr( edit.replace(file_range.range, initializer.syntax().text()); } } + +/// Creates a token tree list from a syntax node, creating the needed delimited sub token trees. +/// Assumes that the input syntax node is a valid syntax tree. +pub(crate) fn tt_from_syntax(node: SyntaxNode) -> Vec> { + let mut tt_stack = vec![(None, vec![])]; + + for element in node.descendants_with_tokens() { + let NodeOrToken::Token(token) = element else { continue }; + + match token.kind() { + T!['('] | T!['{'] | T!['['] => { + // Found an opening delimiter, start a new sub token tree + tt_stack.push((Some(token.kind()), vec![])); + } + T![')'] | T!['}'] | T![']'] => { + // Closing a subtree + let (delimiter, tt) = tt_stack.pop().expect("unbalanced delimiters"); + let (_, parent_tt) = tt_stack + .last_mut() + .expect("parent token tree was closed before it was completed"); + let closing_delimiter = delimiter.map(|it| match it { + T!['('] => T![')'], + T!['{'] => T!['}'], + T!['['] => T![']'], + _ => unreachable!(), + }); + stdx::always!( + closing_delimiter == Some(token.kind()), + "mismatched opening and closing delimiters" + ); + + let sub_tt = make::token_tree(delimiter.expect("unbalanced delimiters"), tt); + parent_tt.push(NodeOrToken::Node(sub_tt)); + } + _ => { + let (_, current_tt) = tt_stack.last_mut().expect("unmatched delimiters"); + current_tt.push(NodeOrToken::Token(token)) + } + } + } + + tt_stack.pop().expect("parent token tree was closed before it was completed").1 +} diff --git a/crates/ide-db/src/source_change.rs b/crates/ide-db/src/source_change.rs index f59d8d08c8..7ef7b7ae1d 100644 --- a/crates/ide-db/src/source_change.rs +++ b/crates/ide-db/src/source_change.rs @@ -338,6 +338,12 @@ impl SourceChangeBuilder { self.add_snippet(PlaceSnippet::Over(node.syntax().clone().into())) } + /// Adds a snippet to move the cursor selected over `token` + pub fn add_placeholder_snippet_token(&mut self, _cap: SnippetCap, token: SyntaxToken) { + assert!(token.parent().is_some()); + self.add_snippet(PlaceSnippet::Over(token.into())) + } + /// Adds a snippet to move the cursor selected over `nodes` /// /// This allows for renaming newly generated items without having to go diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index 186f1b01da..bf5310c082 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs @@ -1159,7 +1159,7 @@ pub mod tokens { pub(super) static SOURCE_FILE: Lazy> = Lazy::new(|| { SourceFile::parse( - "const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let a @ [] })\n;\n\nimpl A for B where: {}", Edition::CURRENT, + "const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let _ @ [] })\n;\n\nimpl A for B where: {}", Edition::CURRENT, ) }); diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index 3a9ebafe87..58f59c384b 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -107,14 +107,22 @@ impl Parse { } impl Parse { + /// Converts this parse result into a parse result for an untyped syntax tree. pub fn to_syntax(self) -> Parse { Parse { green: self.green, errors: self.errors, _ty: PhantomData } } + /// Gets the parsed syntax tree as a typed ast node. + /// + /// # Panics + /// + /// Panics if the root node cannot be casted into the typed ast node + /// (e.g. if it's an `ERROR` node). pub fn tree(&self) -> T { T::cast(self.syntax_node()).unwrap() } + /// Converts from `Parse` to [`Result>`]. pub fn ok(self) -> Result> { match self.errors() { errors if !errors.is_empty() => Err(errors), @@ -167,6 +175,29 @@ impl Parse { } } +impl ast::Expr { + /// Parses an `ast::Expr` from `text`. + /// + /// Note that if the parsed root node is not a valid expression, [`Parse::tree`] will panic. + /// For example: + /// ```rust,should_panic + /// # use syntax::{ast, Edition}; + /// ast::Expr::parse("let fail = true;", Edition::CURRENT).tree(); + /// ``` + pub fn parse(text: &str, edition: Edition) -> Parse { + let _p = tracing::span!(tracing::Level::INFO, "Expr::parse").entered(); + let (green, errors) = parsing::parse_text_at(text, parser::TopEntryPoint::Expr, edition); + let root = SyntaxNode::new_root(green.clone()); + + assert!( + ast::Expr::can_cast(root.kind()) || root.kind() == SyntaxKind::ERROR, + "{:?} isn't an expression", + root.kind() + ); + Parse::new(green, errors) + } +} + /// `SourceFile` represents a parse tree for a single Rust file. pub use crate::ast::SourceFile; @@ -177,11 +208,7 @@ impl SourceFile { let root = SyntaxNode::new_root(green.clone()); assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE); - Parse { - green, - errors: if errors.is_empty() { None } else { Some(errors.into()) }, - _ty: PhantomData, - } + Parse::new(green, errors) } } @@ -290,12 +317,7 @@ impl ast::TokenTree { } let (green, errors) = builder.finish_raw(); - - Parse { - green, - errors: if errors.is_empty() { None } else { Some(errors.into()) }, - _ty: PhantomData, - } + Parse::new(green, errors) } } diff --git a/crates/syntax/src/parsing.rs b/crates/syntax/src/parsing.rs index 420f4938e5..a1ca3b3279 100644 --- a/crates/syntax/src/parsing.rs +++ b/crates/syntax/src/parsing.rs @@ -18,6 +18,19 @@ pub(crate) fn parse_text(text: &str, edition: parser::Edition) -> (GreenNode, Ve (node, errors) } +pub(crate) fn parse_text_at( + text: &str, + entry: parser::TopEntryPoint, + edition: parser::Edition, +) -> (GreenNode, Vec) { + let _p = tracing::span!(tracing::Level::INFO, "parse_text_at").entered(); + let lexed = parser::LexedStr::new(text); + let parser_input = lexed.to_input(); + let parser_output = entry.parse(&parser_input, edition); + let (node, errors, _eof) = build_tree(lexed, parser_output); + (node, errors) +} + pub(crate) fn build_tree( lexed: parser::LexedStr<'_>, parser_output: parser::Output,