Auto merge of #15345 - lowr:fix/add_missing_match_arms-upmap-failure, r=Veykril

Don't provide `add_missing_match_arms` assist when upmapping match arm list failed

Fixes #15310

We shouldn't provide the assist when we fail to find the original match arm list.

Note that this PR will temporarily make the assist not applicable when attribute macro operates on the match expression in question, just like the case in #15310, for most of the current stable toolchain users. This is because the sysroot-abi proc-macro-srv on the current stable [discards] spans for `Group` delimiters in some code paths, which the popular `proc-macro2` crate almost always calls, and it makes the identity of match arm list's brackets lost, leading to the upmapping failure. This has been fixed by #14960, which will land in the next stable, 1.71.

[discards]: 8ede3aae28/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs (L231)
This commit is contained in:
bors 2023-08-01 08:58:43 +00:00
commit f6bffa4dd3
3 changed files with 37 additions and 17 deletions

View file

@ -37,9 +37,9 @@ use crate::{utils, AssistContext, AssistId, AssistKind, Assists};
pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let match_expr = ctx.find_node_at_offset_with_descend::<ast::MatchExpr>()?;
let match_arm_list = match_expr.match_arm_list()?;
let target_range = ctx.sema.original_range(match_expr.syntax()).range;
let arm_list_range = ctx.sema.original_range_opt(match_arm_list.syntax())?;
if let None = cursor_at_trivial_match_arm_list(ctx, &match_expr, &match_arm_list) {
if cursor_at_trivial_match_arm_list(ctx, &match_expr, &match_arm_list).is_none() {
let arm_list_range = ctx.sema.original_range(match_arm_list.syntax()).range;
let cursor_in_range = arm_list_range.contains_range(ctx.selection_trimmed());
if cursor_in_range {
@ -198,7 +198,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
acc.add(
AssistId("add_missing_match_arms", AssistKind::QuickFix),
"Fill match arms",
target_range,
ctx.sema.original_range(match_expr.syntax()).range,
|edit| {
let new_match_arm_list = match_arm_list.clone_for_update();
@ -262,9 +262,8 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
// Just replace the element that the original range came from
let old_place = {
// Find the original element
let old_file_range = ctx.sema.original_range(match_arm_list.syntax());
let file = ctx.sema.parse(old_file_range.file_id);
let old_place = file.syntax().covering_element(old_file_range.range);
let file = ctx.sema.parse(arm_list_range.file_id);
let old_place = file.syntax().covering_element(arm_list_range.range);
// Make `old_place` mut
match old_place {
@ -1922,4 +1921,24 @@ fn foo(t: E) {
}"#,
);
}
#[test]
fn not_applicable_when_match_arm_list_cannot_be_upmapped() {
check_assist_not_applicable(
add_missing_match_arms,
r#"
macro_rules! foo {
($($t:tt)*) => {
$($t)* {}
}
}
enum E { A }
fn main() {
foo!(match E::A$0);
}
"#,
);
}
}

View file

@ -17,7 +17,10 @@ use token_stream::TokenStreamBuilder;
mod symbol;
pub use symbol::*;
use std::ops::{Bound, Range};
use std::{
iter,
ops::{Bound, Range},
};
use crate::tt;
@ -80,9 +83,7 @@ impl server::TokenStream for RustAnalyzer {
stream.is_empty()
}
fn from_str(&mut self, src: &str) -> Self::TokenStream {
use std::str::FromStr;
Self::TokenStream::from_str(src).expect("cannot parse string")
src.parse().expect("cannot parse string")
}
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
stream.to_string()
@ -101,7 +102,7 @@ impl server::TokenStream for RustAnalyzer {
},
};
let tree = TokenTree::from(group);
Self::TokenStream::from_iter(vec![tree])
Self::TokenStream::from_iter(iter::once(tree))
}
bridge::TokenTree::Ident(ident) => {
@ -111,7 +112,7 @@ impl server::TokenStream for RustAnalyzer {
let ident: tt::Ident = tt::Ident { text, span: ident.span };
let leaf = tt::Leaf::from(ident);
let tree = TokenTree::from(leaf);
Self::TokenStream::from_iter(vec![tree])
Self::TokenStream::from_iter(iter::once(tree))
}
bridge::TokenTree::Literal(literal) => {
@ -123,7 +124,7 @@ impl server::TokenStream for RustAnalyzer {
let literal = tt::Literal { text, span: literal.0.span };
let leaf = tt::Leaf::from(literal);
let tree = TokenTree::from(leaf);
Self::TokenStream::from_iter(vec![tree])
Self::TokenStream::from_iter(iter::once(tree))
}
bridge::TokenTree::Punct(p) => {
@ -134,7 +135,7 @@ impl server::TokenStream for RustAnalyzer {
};
let leaf = tt::Leaf::from(punct);
let tree = TokenTree::from(leaf);
Self::TokenStream::from_iter(vec![tree])
Self::TokenStream::from_iter(iter::once(tree))
}
}
}
@ -355,12 +356,12 @@ impl server::Server for RustAnalyzer {
}
fn intern_symbol(ident: &str) -> Self::Symbol {
// FIXME: should be self.interner once the proc-macro api allows is
// FIXME: should be `self.interner` once the proc-macro api allows it.
Symbol::intern(&SYMBOL_INTERNER, &::tt::SmolStr::from(ident))
}
fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) {
// FIXME: should be self.interner once the proc-macro api allows is
// FIXME: should be `self.interner` once the proc-macro api allows it.
f(symbol.text(&SYMBOL_INTERNER).as_str())
}
}

View file

@ -65,7 +65,7 @@ pub mod token_id {
}
impl TokenTree {
pub const fn empty() -> Self {
Self::Subtree(Subtree { delimiter: Delimiter::unspecified(), token_trees: vec![] })
Self::Subtree(Subtree::empty())
}
}