mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-29 14:33:29 +00:00
Auto merge of #17333 - DropDemBits:extract-format-args-snippet-cap, r=Veykril
fix: Only generate snippets for `extract_expressions_from_format_string` if snippets are supported Part of #17332 Fixes `extract_expressions_from_format_string` so that it doesn't generate snippets if the client doesn't support it.
This commit is contained in:
commit
5ac4be8167
6 changed files with 189 additions and 68 deletions
|
@ -1,4 +1,4 @@
|
||||||
use crate::{AssistContext, Assists};
|
use crate::{utils, AssistContext, Assists};
|
||||||
use hir::DescendPreference;
|
use hir::DescendPreference;
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
assists::{AssistId, AssistKind},
|
assists::{AssistId, AssistKind},
|
||||||
|
@ -8,8 +8,12 @@ use ide_db::{
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use stdx::format_to;
|
use syntax::{
|
||||||
use syntax::{ast, AstNode, AstToken, NodeOrToken, SyntaxKind::COMMA, TextRange};
|
ast::{self, make},
|
||||||
|
ted, AstNode, AstToken, NodeOrToken,
|
||||||
|
SyntaxKind::WHITESPACE,
|
||||||
|
T,
|
||||||
|
};
|
||||||
|
|
||||||
// Assist: extract_expressions_from_format_string
|
// Assist: extract_expressions_from_format_string
|
||||||
//
|
//
|
||||||
|
@ -34,6 +38,7 @@ pub(crate) fn extract_expressions_from_format_string(
|
||||||
) -> Option<()> {
|
) -> Option<()> {
|
||||||
let fmt_string = ctx.find_token_at_offset::<ast::String>()?;
|
let fmt_string = ctx.find_token_at_offset::<ast::String>()?;
|
||||||
let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?;
|
let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?;
|
||||||
|
let tt_delimiter = tt.left_delimiter_token()?.kind();
|
||||||
|
|
||||||
let expanded_t = ast::String::cast(
|
let expanded_t = ast::String::cast(
|
||||||
ctx.sema
|
ctx.sema
|
||||||
|
@ -61,72 +66,63 @@ pub(crate) fn extract_expressions_from_format_string(
|
||||||
"Extract format expressions",
|
"Extract format expressions",
|
||||||
tt.syntax().text_range(),
|
tt.syntax().text_range(),
|
||||||
|edit| {
|
|edit| {
|
||||||
let fmt_range = fmt_string.syntax().text_range();
|
let tt = edit.make_mut(tt);
|
||||||
|
|
||||||
// Replace old format string with new format string whose arguments have been extracted
|
|
||||||
edit.replace(fmt_range, new_fmt);
|
|
||||||
|
|
||||||
// Insert cursor at end of format string
|
|
||||||
edit.insert(fmt_range.end(), "$0");
|
|
||||||
|
|
||||||
// Extract existing arguments in macro
|
// Extract existing arguments in macro
|
||||||
let tokens =
|
let tokens = tt.token_trees_and_tokens().collect_vec();
|
||||||
tt.token_trees_and_tokens().collect_vec();
|
|
||||||
|
|
||||||
let mut existing_args: Vec<String> = vec![];
|
let existing_args = if let [_opening_bracket, NodeOrToken::Token(_format_string), _args_start_comma, tokens @ .., NodeOrToken::Token(_end_bracket)] =
|
||||||
|
|
||||||
let mut current_arg = String::new();
|
|
||||||
if let [_opening_bracket, NodeOrToken::Token(format_string), _args_start_comma, tokens @ .., NodeOrToken::Token(end_bracket)] =
|
|
||||||
tokens.as_slice()
|
tokens.as_slice()
|
||||||
{
|
{
|
||||||
for t in tokens {
|
let args = tokens.split(|it| matches!(it, NodeOrToken::Token(t) if t.kind() == T![,])).map(|arg| {
|
||||||
match t {
|
// Strip off leading and trailing whitespace tokens
|
||||||
NodeOrToken::Node(n) => {
|
let arg = match arg.split_first() {
|
||||||
format_to!(current_arg, "{n}");
|
Some((NodeOrToken::Token(t), rest)) if t.kind() == WHITESPACE => rest,
|
||||||
},
|
_ => arg,
|
||||||
NodeOrToken::Token(t) if t.kind() == COMMA => {
|
};
|
||||||
existing_args.push(current_arg.trim().into());
|
let arg = match arg.split_last() {
|
||||||
current_arg.clear();
|
Some((NodeOrToken::Token(t), rest)) if t.kind() == WHITESPACE => rest,
|
||||||
},
|
_ => arg,
|
||||||
NodeOrToken::Token(t) => {
|
};
|
||||||
current_arg.push_str(t.text());
|
arg
|
||||||
},
|
});
|
||||||
}
|
|
||||||
}
|
|
||||||
existing_args.push(current_arg.trim().into());
|
|
||||||
|
|
||||||
// delete everything after the format string till end bracket
|
args.collect()
|
||||||
// we're going to insert the new arguments later
|
} else {
|
||||||
edit.delete(TextRange::new(
|
vec![]
|
||||||
format_string.text_range().end(),
|
};
|
||||||
end_bracket.text_range().start(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Start building the new args
|
// Start building the new args
|
||||||
let mut existing_args = existing_args.into_iter();
|
let mut existing_args = existing_args.into_iter();
|
||||||
let mut args = String::new();
|
let mut new_tt_bits = vec![NodeOrToken::Token(make::tokens::literal(&new_fmt))];
|
||||||
|
let mut placeholder_indexes = vec![];
|
||||||
|
|
||||||
let mut placeholder_idx = 1;
|
for arg in extracted_args {
|
||||||
|
if matches!(arg, Arg::Expr(_) | Arg::Placeholder) {
|
||||||
|
// insert ", " before each arg
|
||||||
|
new_tt_bits.extend_from_slice(&[
|
||||||
|
NodeOrToken::Token(make::token(T![,])),
|
||||||
|
NodeOrToken::Token(make::tokens::single_space()),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
for extracted_args in extracted_args {
|
match arg {
|
||||||
match extracted_args {
|
Arg::Expr(s) => {
|
||||||
Arg::Expr(s)=> {
|
|
||||||
args.push_str(", ");
|
|
||||||
// insert arg
|
// insert arg
|
||||||
args.push_str(&s);
|
// FIXME: use the crate's edition for parsing
|
||||||
|
let expr = ast::Expr::parse(&s, syntax::Edition::CURRENT).syntax_node();
|
||||||
|
let mut expr_tt = utils::tt_from_syntax(expr);
|
||||||
|
new_tt_bits.append(&mut expr_tt);
|
||||||
}
|
}
|
||||||
Arg::Placeholder => {
|
Arg::Placeholder => {
|
||||||
args.push_str(", ");
|
|
||||||
// try matching with existing argument
|
// try matching with existing argument
|
||||||
match existing_args.next() {
|
match existing_args.next() {
|
||||||
Some(ea) => {
|
Some(arg) => {
|
||||||
args.push_str(&ea);
|
new_tt_bits.extend_from_slice(arg);
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
// insert placeholder
|
placeholder_indexes.push(new_tt_bits.len());
|
||||||
args.push_str(&format!("${placeholder_idx}"));
|
new_tt_bits.push(NodeOrToken::Token(make::token(T![_])));
|
||||||
placeholder_idx += 1;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -134,8 +130,31 @@ pub(crate) fn extract_expressions_from_format_string(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// Insert new args
|
// Insert new args
|
||||||
edit.insert(fmt_range.end(), args);
|
let new_tt = make::token_tree(tt_delimiter, new_tt_bits).clone_for_update();
|
||||||
|
ted::replace(tt.syntax(), new_tt.syntax());
|
||||||
|
|
||||||
|
if let Some(cap) = ctx.config.snippet_cap {
|
||||||
|
// Add placeholder snippets over placeholder args
|
||||||
|
for pos in placeholder_indexes {
|
||||||
|
// Skip the opening delimiter
|
||||||
|
let Some(NodeOrToken::Token(placeholder)) =
|
||||||
|
new_tt.token_trees_and_tokens().skip(1).nth(pos)
|
||||||
|
else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
|
if stdx::always!(placeholder.kind() == T![_]) {
|
||||||
|
edit.add_placeholder_snippet_token(cap, placeholder);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the final tabstop after the format literal
|
||||||
|
if let Some(NodeOrToken::Token(literal)) = new_tt.token_trees_and_tokens().nth(1) {
|
||||||
|
edit.add_tabstop_after_token(cap, literal);
|
||||||
|
}
|
||||||
|
}
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -145,7 +164,7 @@ pub(crate) fn extract_expressions_from_format_string(
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::tests::check_assist;
|
use crate::tests::{check_assist, check_assist_no_snippet_cap};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn multiple_middle_arg() {
|
fn multiple_middle_arg() {
|
||||||
|
@ -195,7 +214,7 @@ fn main() {
|
||||||
"#,
|
"#,
|
||||||
r#"
|
r#"
|
||||||
fn main() {
|
fn main() {
|
||||||
print!("{} {:b} {} {}"$0, y + 2, x + 1, 2, $1);
|
print!("{} {:b} {} {}"$0, y + 2, x + 1, 2, ${1:_});
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
|
@ -292,4 +311,22 @@ fn main() {
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn without_snippets() {
|
||||||
|
check_assist_no_snippet_cap(
|
||||||
|
extract_expressions_from_format_string,
|
||||||
|
r#"
|
||||||
|
//- minicore: fmt
|
||||||
|
fn main() {
|
||||||
|
print!("{} {x + 1:b} {} {}$0", y + 2, 2);
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
fn main() {
|
||||||
|
print!("{} {:b} {} {}", y + 2, x + 1, 2, _);
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,9 +14,9 @@ use syntax::{
|
||||||
edit_in_place::{AttrsOwnerEdit, Indent, Removable},
|
edit_in_place::{AttrsOwnerEdit, Indent, Removable},
|
||||||
make, HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace,
|
make, HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace,
|
||||||
},
|
},
|
||||||
ted, AstNode, AstToken, Direction, SourceFile,
|
ted, AstNode, AstToken, Direction, NodeOrToken, SourceFile,
|
||||||
SyntaxKind::*,
|
SyntaxKind::*,
|
||||||
SyntaxNode, TextRange, TextSize, T,
|
SyntaxNode, SyntaxToken, TextRange, TextSize, T,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::assist_context::{AssistContext, SourceChangeBuilder};
|
use crate::assist_context::{AssistContext, SourceChangeBuilder};
|
||||||
|
@ -916,3 +916,46 @@ pub(crate) fn replace_record_field_expr(
|
||||||
edit.replace(file_range.range, initializer.syntax().text());
|
edit.replace(file_range.range, initializer.syntax().text());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Creates a token tree list from a syntax node, creating the needed delimited sub token trees.
|
||||||
|
/// Assumes that the input syntax node is a valid syntax tree.
|
||||||
|
pub(crate) fn tt_from_syntax(node: SyntaxNode) -> Vec<NodeOrToken<ast::TokenTree, SyntaxToken>> {
|
||||||
|
let mut tt_stack = vec![(None, vec![])];
|
||||||
|
|
||||||
|
for element in node.descendants_with_tokens() {
|
||||||
|
let NodeOrToken::Token(token) = element else { continue };
|
||||||
|
|
||||||
|
match token.kind() {
|
||||||
|
T!['('] | T!['{'] | T!['['] => {
|
||||||
|
// Found an opening delimiter, start a new sub token tree
|
||||||
|
tt_stack.push((Some(token.kind()), vec![]));
|
||||||
|
}
|
||||||
|
T![')'] | T!['}'] | T![']'] => {
|
||||||
|
// Closing a subtree
|
||||||
|
let (delimiter, tt) = tt_stack.pop().expect("unbalanced delimiters");
|
||||||
|
let (_, parent_tt) = tt_stack
|
||||||
|
.last_mut()
|
||||||
|
.expect("parent token tree was closed before it was completed");
|
||||||
|
let closing_delimiter = delimiter.map(|it| match it {
|
||||||
|
T!['('] => T![')'],
|
||||||
|
T!['{'] => T!['}'],
|
||||||
|
T!['['] => T![']'],
|
||||||
|
_ => unreachable!(),
|
||||||
|
});
|
||||||
|
stdx::always!(
|
||||||
|
closing_delimiter == Some(token.kind()),
|
||||||
|
"mismatched opening and closing delimiters"
|
||||||
|
);
|
||||||
|
|
||||||
|
let sub_tt = make::token_tree(delimiter.expect("unbalanced delimiters"), tt);
|
||||||
|
parent_tt.push(NodeOrToken::Node(sub_tt));
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
let (_, current_tt) = tt_stack.last_mut().expect("unmatched delimiters");
|
||||||
|
current_tt.push(NodeOrToken::Token(token))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tt_stack.pop().expect("parent token tree was closed before it was completed").1
|
||||||
|
}
|
||||||
|
|
|
@ -338,6 +338,12 @@ impl SourceChangeBuilder {
|
||||||
self.add_snippet(PlaceSnippet::Over(node.syntax().clone().into()))
|
self.add_snippet(PlaceSnippet::Over(node.syntax().clone().into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Adds a snippet to move the cursor selected over `token`
|
||||||
|
pub fn add_placeholder_snippet_token(&mut self, _cap: SnippetCap, token: SyntaxToken) {
|
||||||
|
assert!(token.parent().is_some());
|
||||||
|
self.add_snippet(PlaceSnippet::Over(token.into()))
|
||||||
|
}
|
||||||
|
|
||||||
/// Adds a snippet to move the cursor selected over `nodes`
|
/// Adds a snippet to move the cursor selected over `nodes`
|
||||||
///
|
///
|
||||||
/// This allows for renaming newly generated items without having to go
|
/// This allows for renaming newly generated items without having to go
|
||||||
|
|
|
@ -1159,7 +1159,7 @@ pub mod tokens {
|
||||||
|
|
||||||
pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| {
|
pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| {
|
||||||
SourceFile::parse(
|
SourceFile::parse(
|
||||||
"const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let a @ [] })\n;\n\nimpl A for B where: {}", Edition::CURRENT,
|
"const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let _ @ [] })\n;\n\nimpl A for B where: {}", Edition::CURRENT,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -107,14 +107,22 @@ impl<T> Parse<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: AstNode> Parse<T> {
|
impl<T: AstNode> Parse<T> {
|
||||||
|
/// Converts this parse result into a parse result for an untyped syntax tree.
|
||||||
pub fn to_syntax(self) -> Parse<SyntaxNode> {
|
pub fn to_syntax(self) -> Parse<SyntaxNode> {
|
||||||
Parse { green: self.green, errors: self.errors, _ty: PhantomData }
|
Parse { green: self.green, errors: self.errors, _ty: PhantomData }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Gets the parsed syntax tree as a typed ast node.
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
///
|
||||||
|
/// Panics if the root node cannot be casted into the typed ast node
|
||||||
|
/// (e.g. if it's an `ERROR` node).
|
||||||
pub fn tree(&self) -> T {
|
pub fn tree(&self) -> T {
|
||||||
T::cast(self.syntax_node()).unwrap()
|
T::cast(self.syntax_node()).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Converts from `Parse<T>` to [`Result<T, Vec<SyntaxError>>`].
|
||||||
pub fn ok(self) -> Result<T, Vec<SyntaxError>> {
|
pub fn ok(self) -> Result<T, Vec<SyntaxError>> {
|
||||||
match self.errors() {
|
match self.errors() {
|
||||||
errors if !errors.is_empty() => Err(errors),
|
errors if !errors.is_empty() => Err(errors),
|
||||||
|
@ -167,6 +175,29 @@ impl Parse<SourceFile> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ast::Expr {
|
||||||
|
/// Parses an `ast::Expr` from `text`.
|
||||||
|
///
|
||||||
|
/// Note that if the parsed root node is not a valid expression, [`Parse::tree`] will panic.
|
||||||
|
/// For example:
|
||||||
|
/// ```rust,should_panic
|
||||||
|
/// # use syntax::{ast, Edition};
|
||||||
|
/// ast::Expr::parse("let fail = true;", Edition::CURRENT).tree();
|
||||||
|
/// ```
|
||||||
|
pub fn parse(text: &str, edition: Edition) -> Parse<ast::Expr> {
|
||||||
|
let _p = tracing::span!(tracing::Level::INFO, "Expr::parse").entered();
|
||||||
|
let (green, errors) = parsing::parse_text_at(text, parser::TopEntryPoint::Expr, edition);
|
||||||
|
let root = SyntaxNode::new_root(green.clone());
|
||||||
|
|
||||||
|
assert!(
|
||||||
|
ast::Expr::can_cast(root.kind()) || root.kind() == SyntaxKind::ERROR,
|
||||||
|
"{:?} isn't an expression",
|
||||||
|
root.kind()
|
||||||
|
);
|
||||||
|
Parse::new(green, errors)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// `SourceFile` represents a parse tree for a single Rust file.
|
/// `SourceFile` represents a parse tree for a single Rust file.
|
||||||
pub use crate::ast::SourceFile;
|
pub use crate::ast::SourceFile;
|
||||||
|
|
||||||
|
@ -177,11 +208,7 @@ impl SourceFile {
|
||||||
let root = SyntaxNode::new_root(green.clone());
|
let root = SyntaxNode::new_root(green.clone());
|
||||||
|
|
||||||
assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
|
assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
|
||||||
Parse {
|
Parse::new(green, errors)
|
||||||
green,
|
|
||||||
errors: if errors.is_empty() { None } else { Some(errors.into()) },
|
|
||||||
_ty: PhantomData,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -290,12 +317,7 @@ impl ast::TokenTree {
|
||||||
}
|
}
|
||||||
|
|
||||||
let (green, errors) = builder.finish_raw();
|
let (green, errors) = builder.finish_raw();
|
||||||
|
Parse::new(green, errors)
|
||||||
Parse {
|
|
||||||
green,
|
|
||||||
errors: if errors.is_empty() { None } else { Some(errors.into()) },
|
|
||||||
_ty: PhantomData,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -18,6 +18,19 @@ pub(crate) fn parse_text(text: &str, edition: parser::Edition) -> (GreenNode, Ve
|
||||||
(node, errors)
|
(node, errors)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn parse_text_at(
|
||||||
|
text: &str,
|
||||||
|
entry: parser::TopEntryPoint,
|
||||||
|
edition: parser::Edition,
|
||||||
|
) -> (GreenNode, Vec<SyntaxError>) {
|
||||||
|
let _p = tracing::span!(tracing::Level::INFO, "parse_text_at").entered();
|
||||||
|
let lexed = parser::LexedStr::new(text);
|
||||||
|
let parser_input = lexed.to_input();
|
||||||
|
let parser_output = entry.parse(&parser_input, edition);
|
||||||
|
let (node, errors, _eof) = build_tree(lexed, parser_output);
|
||||||
|
(node, errors)
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn build_tree(
|
pub(crate) fn build_tree(
|
||||||
lexed: parser::LexedStr<'_>,
|
lexed: parser::LexedStr<'_>,
|
||||||
parser_output: parser::Output,
|
parser_output: parser::Output,
|
||||||
|
|
Loading…
Reference in a new issue