mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-12 21:28:51 +00:00
Merge #5564
5564: SSR: Restrict to current selection if any r=davidlattimore a=davidlattimore The selection is also used to avoid unnecessary work, but only to the file level. Further restricting unnecessary work is left for later. Co-authored-by: David Lattimore <dml@google.com>
This commit is contained in:
commit
9a9ddcc297
11 changed files with 186 additions and 55 deletions
|
@ -510,9 +510,10 @@ impl Analysis {
|
||||||
query: &str,
|
query: &str,
|
||||||
parse_only: bool,
|
parse_only: bool,
|
||||||
position: FilePosition,
|
position: FilePosition,
|
||||||
|
selections: Vec<FileRange>,
|
||||||
) -> Cancelable<Result<SourceChange, SsrError>> {
|
) -> Cancelable<Result<SourceChange, SsrError>> {
|
||||||
self.with_db(|db| {
|
self.with_db(|db| {
|
||||||
let edits = ssr::parse_search_replace(query, parse_only, db, position)?;
|
let edits = ssr::parse_search_replace(query, parse_only, db, position, selections)?;
|
||||||
Ok(SourceChange::from(edits))
|
Ok(SourceChange::from(edits))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use ra_db::FilePosition;
|
use ra_db::{FilePosition, FileRange};
|
||||||
use ra_ide_db::RootDatabase;
|
use ra_ide_db::RootDatabase;
|
||||||
|
|
||||||
use crate::SourceFileEdit;
|
use crate::SourceFileEdit;
|
||||||
|
@ -24,6 +24,9 @@ use ra_ssr::{MatchFinder, SsrError, SsrRule};
|
||||||
// Method calls should generally be written in UFCS form. e.g. `foo::Bar::baz($s, $a)` will match
|
// Method calls should generally be written in UFCS form. e.g. `foo::Bar::baz($s, $a)` will match
|
||||||
// `$s.baz($a)`, provided the method call `baz` resolves to the method `foo::Bar::baz`.
|
// `$s.baz($a)`, provided the method call `baz` resolves to the method `foo::Bar::baz`.
|
||||||
//
|
//
|
||||||
|
// The scope of the search / replace will be restricted to the current selection if any, otherwise
|
||||||
|
// it will apply to the whole workspace.
|
||||||
|
//
|
||||||
// Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`.
|
// Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`.
|
||||||
//
|
//
|
||||||
// Supported constraints:
|
// Supported constraints:
|
||||||
|
@ -56,10 +59,11 @@ pub fn parse_search_replace(
|
||||||
rule: &str,
|
rule: &str,
|
||||||
parse_only: bool,
|
parse_only: bool,
|
||||||
db: &RootDatabase,
|
db: &RootDatabase,
|
||||||
position: FilePosition,
|
resolve_context: FilePosition,
|
||||||
|
selections: Vec<FileRange>,
|
||||||
) -> Result<Vec<SourceFileEdit>, SsrError> {
|
) -> Result<Vec<SourceFileEdit>, SsrError> {
|
||||||
let rule: SsrRule = rule.parse()?;
|
let rule: SsrRule = rule.parse()?;
|
||||||
let mut match_finder = MatchFinder::in_context(db, position);
|
let mut match_finder = MatchFinder::in_context(db, resolve_context, selections);
|
||||||
match_finder.add_rule(rule)?;
|
match_finder.add_rule(rule)?;
|
||||||
if parse_only {
|
if parse_only {
|
||||||
return Ok(Vec::new());
|
return Ok(Vec::new());
|
||||||
|
|
|
@ -52,6 +52,7 @@ pub struct MatchFinder<'db> {
|
||||||
sema: Semantics<'db, ra_ide_db::RootDatabase>,
|
sema: Semantics<'db, ra_ide_db::RootDatabase>,
|
||||||
rules: Vec<ResolvedRule>,
|
rules: Vec<ResolvedRule>,
|
||||||
resolution_scope: resolving::ResolutionScope<'db>,
|
resolution_scope: resolving::ResolutionScope<'db>,
|
||||||
|
restrict_ranges: Vec<FileRange>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'db> MatchFinder<'db> {
|
impl<'db> MatchFinder<'db> {
|
||||||
|
@ -60,10 +61,17 @@ impl<'db> MatchFinder<'db> {
|
||||||
pub fn in_context(
|
pub fn in_context(
|
||||||
db: &'db ra_ide_db::RootDatabase,
|
db: &'db ra_ide_db::RootDatabase,
|
||||||
lookup_context: FilePosition,
|
lookup_context: FilePosition,
|
||||||
|
mut restrict_ranges: Vec<FileRange>,
|
||||||
) -> MatchFinder<'db> {
|
) -> MatchFinder<'db> {
|
||||||
|
restrict_ranges.retain(|range| !range.range.is_empty());
|
||||||
let sema = Semantics::new(db);
|
let sema = Semantics::new(db);
|
||||||
let resolution_scope = resolving::ResolutionScope::new(&sema, lookup_context);
|
let resolution_scope = resolving::ResolutionScope::new(&sema, lookup_context);
|
||||||
MatchFinder { sema: Semantics::new(db), rules: Vec::new(), resolution_scope }
|
MatchFinder {
|
||||||
|
sema: Semantics::new(db),
|
||||||
|
rules: Vec::new(),
|
||||||
|
resolution_scope,
|
||||||
|
restrict_ranges,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Constructs an instance using the start of the first file in `db` as the lookup context.
|
/// Constructs an instance using the start of the first file in `db` as the lookup context.
|
||||||
|
@ -79,6 +87,7 @@ impl<'db> MatchFinder<'db> {
|
||||||
Ok(MatchFinder::in_context(
|
Ok(MatchFinder::in_context(
|
||||||
db,
|
db,
|
||||||
FilePosition { file_id: first_file_id, offset: 0.into() },
|
FilePosition { file_id: first_file_id, offset: 0.into() },
|
||||||
|
vec![],
|
||||||
))
|
))
|
||||||
} else {
|
} else {
|
||||||
bail!("No files to search");
|
bail!("No files to search");
|
||||||
|
|
|
@ -706,8 +706,8 @@ mod tests {
|
||||||
let rule: SsrRule = "foo($x) ==>> bar($x)".parse().unwrap();
|
let rule: SsrRule = "foo($x) ==>> bar($x)".parse().unwrap();
|
||||||
let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }";
|
let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }";
|
||||||
|
|
||||||
let (db, position) = crate::tests::single_file(input);
|
let (db, position, selections) = crate::tests::single_file(input);
|
||||||
let mut match_finder = MatchFinder::in_context(&db, position);
|
let mut match_finder = MatchFinder::in_context(&db, position, selections);
|
||||||
match_finder.add_rule(rule).unwrap();
|
match_finder.add_rule(rule).unwrap();
|
||||||
let matches = match_finder.matches();
|
let matches = match_finder.matches();
|
||||||
assert_eq!(matches.matches.len(), 1);
|
assert_eq!(matches.matches.len(), 1);
|
||||||
|
|
|
@ -141,14 +141,14 @@ impl Resolver<'_, '_> {
|
||||||
impl<'db> ResolutionScope<'db> {
|
impl<'db> ResolutionScope<'db> {
|
||||||
pub(crate) fn new(
|
pub(crate) fn new(
|
||||||
sema: &hir::Semantics<'db, ra_ide_db::RootDatabase>,
|
sema: &hir::Semantics<'db, ra_ide_db::RootDatabase>,
|
||||||
lookup_context: FilePosition,
|
resolve_context: FilePosition,
|
||||||
) -> ResolutionScope<'db> {
|
) -> ResolutionScope<'db> {
|
||||||
use ra_syntax::ast::AstNode;
|
use ra_syntax::ast::AstNode;
|
||||||
let file = sema.parse(lookup_context.file_id);
|
let file = sema.parse(resolve_context.file_id);
|
||||||
// Find a node at the requested position, falling back to the whole file.
|
// Find a node at the requested position, falling back to the whole file.
|
||||||
let node = file
|
let node = file
|
||||||
.syntax()
|
.syntax()
|
||||||
.token_at_offset(lookup_context.offset)
|
.token_at_offset(resolve_context.offset)
|
||||||
.left_biased()
|
.left_biased()
|
||||||
.map(|token| token.parent())
|
.map(|token| token.parent())
|
||||||
.unwrap_or_else(|| file.syntax().clone());
|
.unwrap_or_else(|| file.syntax().clone());
|
||||||
|
@ -156,7 +156,7 @@ impl<'db> ResolutionScope<'db> {
|
||||||
let scope = sema.scope(&node);
|
let scope = sema.scope(&node);
|
||||||
ResolutionScope {
|
ResolutionScope {
|
||||||
scope,
|
scope,
|
||||||
hygiene: hir::Hygiene::new(sema.db, lookup_context.file_id.into()),
|
hygiene: hir::Hygiene::new(sema.db, resolve_context.file_id.into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,12 +5,13 @@ use crate::{
|
||||||
resolving::{ResolvedPath, ResolvedPattern, ResolvedRule},
|
resolving::{ResolvedPath, ResolvedPattern, ResolvedRule},
|
||||||
Match, MatchFinder,
|
Match, MatchFinder,
|
||||||
};
|
};
|
||||||
use ra_db::FileRange;
|
use ra_db::{FileId, FileRange};
|
||||||
use ra_ide_db::{
|
use ra_ide_db::{
|
||||||
defs::Definition,
|
defs::Definition,
|
||||||
search::{Reference, SearchScope},
|
search::{Reference, SearchScope},
|
||||||
};
|
};
|
||||||
use ra_syntax::{ast, AstNode, SyntaxKind, SyntaxNode};
|
use ra_syntax::{ast, AstNode, SyntaxKind, SyntaxNode};
|
||||||
|
use rustc_hash::FxHashSet;
|
||||||
use test_utils::mark;
|
use test_utils::mark;
|
||||||
|
|
||||||
/// A cache for the results of find_usages. This is for when we have multiple patterns that have the
|
/// A cache for the results of find_usages. This is for when we have multiple patterns that have the
|
||||||
|
@ -54,11 +55,7 @@ impl<'db> MatchFinder<'db> {
|
||||||
mark::hit!(use_declaration_with_braces);
|
mark::hit!(use_declaration_with_braces);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if let Ok(m) =
|
self.try_add_match(rule, &node_to_match, &None, matches_out);
|
||||||
matching::get_match(false, rule, &node_to_match, &None, &self.sema)
|
|
||||||
{
|
|
||||||
matches_out.push(m);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -121,25 +118,39 @@ impl<'db> MatchFinder<'db> {
|
||||||
// FIXME: We should ideally have a test that checks that we edit local roots and not library
|
// FIXME: We should ideally have a test that checks that we edit local roots and not library
|
||||||
// roots. This probably would require some changes to fixtures, since currently everything
|
// roots. This probably would require some changes to fixtures, since currently everything
|
||||||
// seems to get put into a single source root.
|
// seems to get put into a single source root.
|
||||||
use ra_db::SourceDatabaseExt;
|
|
||||||
use ra_ide_db::symbol_index::SymbolsDatabase;
|
|
||||||
let mut files = Vec::new();
|
let mut files = Vec::new();
|
||||||
for &root in self.sema.db.local_roots().iter() {
|
self.search_files_do(|file_id| {
|
||||||
let sr = self.sema.db.source_root(root);
|
files.push(file_id);
|
||||||
files.extend(sr.iter());
|
});
|
||||||
}
|
|
||||||
SearchScope::files(&files)
|
SearchScope::files(&files)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn slow_scan(&self, rule: &ResolvedRule, matches_out: &mut Vec<Match>) {
|
fn slow_scan(&self, rule: &ResolvedRule, matches_out: &mut Vec<Match>) {
|
||||||
|
self.search_files_do(|file_id| {
|
||||||
|
let file = self.sema.parse(file_id);
|
||||||
|
let code = file.syntax();
|
||||||
|
self.slow_scan_node(code, rule, &None, matches_out);
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_files_do(&self, mut callback: impl FnMut(FileId)) {
|
||||||
|
if self.restrict_ranges.is_empty() {
|
||||||
|
// Unrestricted search.
|
||||||
use ra_db::SourceDatabaseExt;
|
use ra_db::SourceDatabaseExt;
|
||||||
use ra_ide_db::symbol_index::SymbolsDatabase;
|
use ra_ide_db::symbol_index::SymbolsDatabase;
|
||||||
for &root in self.sema.db.local_roots().iter() {
|
for &root in self.sema.db.local_roots().iter() {
|
||||||
let sr = self.sema.db.source_root(root);
|
let sr = self.sema.db.source_root(root);
|
||||||
for file_id in sr.iter() {
|
for file_id in sr.iter() {
|
||||||
let file = self.sema.parse(file_id);
|
callback(file_id);
|
||||||
let code = file.syntax();
|
}
|
||||||
self.slow_scan_node(code, rule, &None, matches_out);
|
}
|
||||||
|
} else {
|
||||||
|
// Search is restricted, deduplicate file IDs (generally only one).
|
||||||
|
let mut files = FxHashSet::default();
|
||||||
|
for range in &self.restrict_ranges {
|
||||||
|
if files.insert(range.file_id) {
|
||||||
|
callback(range.file_id);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -154,9 +165,7 @@ impl<'db> MatchFinder<'db> {
|
||||||
if !is_search_permitted(code) {
|
if !is_search_permitted(code) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if let Ok(m) = matching::get_match(false, rule, &code, restrict_range, &self.sema) {
|
self.try_add_match(rule, &code, restrict_range, matches_out);
|
||||||
matches_out.push(m);
|
|
||||||
}
|
|
||||||
// If we've got a macro call, we already tried matching it pre-expansion, which is the only
|
// If we've got a macro call, we already tried matching it pre-expansion, which is the only
|
||||||
// way to match the whole macro, now try expanding it and matching the expansion.
|
// way to match the whole macro, now try expanding it and matching the expansion.
|
||||||
if let Some(macro_call) = ast::MacroCall::cast(code.clone()) {
|
if let Some(macro_call) = ast::MacroCall::cast(code.clone()) {
|
||||||
|
@ -178,6 +187,38 @@ impl<'db> MatchFinder<'db> {
|
||||||
self.slow_scan_node(&child, rule, restrict_range, matches_out);
|
self.slow_scan_node(&child, rule, restrict_range, matches_out);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn try_add_match(
|
||||||
|
&self,
|
||||||
|
rule: &ResolvedRule,
|
||||||
|
code: &SyntaxNode,
|
||||||
|
restrict_range: &Option<FileRange>,
|
||||||
|
matches_out: &mut Vec<Match>,
|
||||||
|
) {
|
||||||
|
if !self.within_range_restrictions(code) {
|
||||||
|
mark::hit!(replace_nonpath_within_selection);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if let Ok(m) = matching::get_match(false, rule, code, restrict_range, &self.sema) {
|
||||||
|
matches_out.push(m);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns whether `code` is within one of our range restrictions if we have any. No range
|
||||||
|
/// restrictions is considered unrestricted and always returns true.
|
||||||
|
fn within_range_restrictions(&self, code: &SyntaxNode) -> bool {
|
||||||
|
if self.restrict_ranges.is_empty() {
|
||||||
|
// There is no range restriction.
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
let node_range = self.sema.original_range(code);
|
||||||
|
for range in &self.restrict_ranges {
|
||||||
|
if range.file_id == node_range.file_id && range.range.contains_range(node_range.range) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns whether we support matching within `node` and all of its ancestors.
|
/// Returns whether we support matching within `node` and all of its ancestors.
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
use crate::{MatchFinder, SsrRule};
|
use crate::{MatchFinder, SsrRule};
|
||||||
use expect::{expect, Expect};
|
use expect::{expect, Expect};
|
||||||
use ra_db::{salsa::Durability, FileId, FilePosition, SourceDatabaseExt};
|
use ra_db::{salsa::Durability, FileId, FilePosition, FileRange, SourceDatabaseExt};
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use test_utils::mark;
|
use test_utils::{mark, RangeOrOffset};
|
||||||
|
|
||||||
fn parse_error_text(query: &str) -> String {
|
fn parse_error_text(query: &str) -> String {
|
||||||
format!("{}", query.parse::<SsrRule>().unwrap_err())
|
format!("{}", query.parse::<SsrRule>().unwrap_err())
|
||||||
|
@ -60,20 +60,32 @@ fn parser_undefined_placeholder_in_replacement() {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `code` may optionally contain a cursor marker `<|>`. If it doesn't, then the position will be
|
/// `code` may optionally contain a cursor marker `<|>`. If it doesn't, then the position will be
|
||||||
/// the start of the file.
|
/// the start of the file. If there's a second cursor marker, then we'll return a single range.
|
||||||
pub(crate) fn single_file(code: &str) -> (ra_ide_db::RootDatabase, FilePosition) {
|
pub(crate) fn single_file(code: &str) -> (ra_ide_db::RootDatabase, FilePosition, Vec<FileRange>) {
|
||||||
use ra_db::fixture::WithFixture;
|
use ra_db::fixture::WithFixture;
|
||||||
use ra_ide_db::symbol_index::SymbolsDatabase;
|
use ra_ide_db::symbol_index::SymbolsDatabase;
|
||||||
let (mut db, position) = if code.contains(test_utils::CURSOR_MARKER) {
|
let (mut db, file_id, range_or_offset) = if code.contains(test_utils::CURSOR_MARKER) {
|
||||||
ra_ide_db::RootDatabase::with_position(code)
|
ra_ide_db::RootDatabase::with_range_or_offset(code)
|
||||||
} else {
|
} else {
|
||||||
let (db, file_id) = ra_ide_db::RootDatabase::with_single_file(code);
|
let (db, file_id) = ra_ide_db::RootDatabase::with_single_file(code);
|
||||||
(db, FilePosition { file_id, offset: 0.into() })
|
(db, file_id, RangeOrOffset::Offset(0.into()))
|
||||||
};
|
};
|
||||||
|
let selections;
|
||||||
|
let position;
|
||||||
|
match range_or_offset {
|
||||||
|
RangeOrOffset::Range(range) => {
|
||||||
|
position = FilePosition { file_id, offset: range.start() };
|
||||||
|
selections = vec![FileRange { file_id, range: range }];
|
||||||
|
}
|
||||||
|
RangeOrOffset::Offset(offset) => {
|
||||||
|
position = FilePosition { file_id, offset };
|
||||||
|
selections = vec![];
|
||||||
|
}
|
||||||
|
}
|
||||||
let mut local_roots = FxHashSet::default();
|
let mut local_roots = FxHashSet::default();
|
||||||
local_roots.insert(ra_db::fixture::WORKSPACE);
|
local_roots.insert(ra_db::fixture::WORKSPACE);
|
||||||
db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH);
|
db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH);
|
||||||
(db, position)
|
(db, position, selections)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) {
|
fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) {
|
||||||
|
@ -81,8 +93,8 @@ fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) {
|
fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) {
|
||||||
let (db, position) = single_file(input);
|
let (db, position, selections) = single_file(input);
|
||||||
let mut match_finder = MatchFinder::in_context(&db, position);
|
let mut match_finder = MatchFinder::in_context(&db, position, selections);
|
||||||
for rule in rules {
|
for rule in rules {
|
||||||
let rule: SsrRule = rule.parse().unwrap();
|
let rule: SsrRule = rule.parse().unwrap();
|
||||||
match_finder.add_rule(rule).unwrap();
|
match_finder.add_rule(rule).unwrap();
|
||||||
|
@ -112,8 +124,8 @@ fn print_match_debug_info(match_finder: &MatchFinder, file_id: FileId, snippet:
|
||||||
}
|
}
|
||||||
|
|
||||||
fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
|
fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
|
||||||
let (db, position) = single_file(code);
|
let (db, position, selections) = single_file(code);
|
||||||
let mut match_finder = MatchFinder::in_context(&db, position);
|
let mut match_finder = MatchFinder::in_context(&db, position, selections);
|
||||||
match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
|
match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
|
||||||
let matched_strings: Vec<String> =
|
let matched_strings: Vec<String> =
|
||||||
match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect();
|
match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect();
|
||||||
|
@ -124,8 +136,8 @@ fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn assert_no_match(pattern: &str, code: &str) {
|
fn assert_no_match(pattern: &str, code: &str) {
|
||||||
let (db, position) = single_file(code);
|
let (db, position, selections) = single_file(code);
|
||||||
let mut match_finder = MatchFinder::in_context(&db, position);
|
let mut match_finder = MatchFinder::in_context(&db, position, selections);
|
||||||
match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
|
match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
|
||||||
let matches = match_finder.matches().flattened().matches;
|
let matches = match_finder.matches().flattened().matches;
|
||||||
if !matches.is_empty() {
|
if !matches.is_empty() {
|
||||||
|
@ -135,8 +147,8 @@ fn assert_no_match(pattern: &str, code: &str) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) {
|
fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) {
|
||||||
let (db, position) = single_file(code);
|
let (db, position, selections) = single_file(code);
|
||||||
let mut match_finder = MatchFinder::in_context(&db, position);
|
let mut match_finder = MatchFinder::in_context(&db, position, selections);
|
||||||
match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
|
match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
|
||||||
let mut reasons = Vec::new();
|
let mut reasons = Vec::new();
|
||||||
for d in match_finder.debug_where_text_equal(position.file_id, snippet) {
|
for d in match_finder.debug_where_text_equal(position.file_id, snippet) {
|
||||||
|
@ -490,9 +502,10 @@ fn no_match_split_expression() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn replace_function_call() {
|
fn replace_function_call() {
|
||||||
|
// This test also makes sure that we ignore empty-ranges.
|
||||||
assert_ssr_transform(
|
assert_ssr_transform(
|
||||||
"foo() ==>> bar()",
|
"foo() ==>> bar()",
|
||||||
"fn foo() {} fn bar() {} fn f1() {foo(); foo();}",
|
"fn foo() {<|><|>} fn bar() {} fn f1() {foo(); foo();}",
|
||||||
expect![["fn foo() {} fn bar() {} fn f1() {bar(); bar();}"]],
|
expect![["fn foo() {} fn bar() {} fn f1() {bar(); bar();}"]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -961,3 +974,52 @@ fn replace_local_variable_reference() {
|
||||||
"#]],
|
"#]],
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn replace_path_within_selection() {
|
||||||
|
assert_ssr_transform(
|
||||||
|
"foo ==>> bar",
|
||||||
|
r#"
|
||||||
|
fn main() {
|
||||||
|
let foo = 41;
|
||||||
|
let bar = 42;
|
||||||
|
do_stuff(foo);
|
||||||
|
do_stuff(foo);<|>
|
||||||
|
do_stuff(foo);
|
||||||
|
do_stuff(foo);<|>
|
||||||
|
do_stuff(foo);
|
||||||
|
}"#,
|
||||||
|
expect![[r#"
|
||||||
|
fn main() {
|
||||||
|
let foo = 41;
|
||||||
|
let bar = 42;
|
||||||
|
do_stuff(foo);
|
||||||
|
do_stuff(foo);
|
||||||
|
do_stuff(bar);
|
||||||
|
do_stuff(bar);
|
||||||
|
do_stuff(foo);
|
||||||
|
}"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn replace_nonpath_within_selection() {
|
||||||
|
mark::check!(replace_nonpath_within_selection);
|
||||||
|
assert_ssr_transform(
|
||||||
|
"$a + $b ==>> $b * $a",
|
||||||
|
r#"
|
||||||
|
fn main() {
|
||||||
|
let v = 1 + 2;<|>
|
||||||
|
let v2 = 3 + 3;
|
||||||
|
let v3 = 4 + 5;<|>
|
||||||
|
let v4 = 6 + 7;
|
||||||
|
}"#,
|
||||||
|
expect![[r#"
|
||||||
|
fn main() {
|
||||||
|
let v = 1 + 2;
|
||||||
|
let v2 = 3 * 3;
|
||||||
|
let v3 = 5 * 4;
|
||||||
|
let v4 = 6 + 7;
|
||||||
|
}"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -1026,9 +1026,18 @@ pub(crate) fn handle_ssr(
|
||||||
params: lsp_ext::SsrParams,
|
params: lsp_ext::SsrParams,
|
||||||
) -> Result<lsp_types::WorkspaceEdit> {
|
) -> Result<lsp_types::WorkspaceEdit> {
|
||||||
let _p = profile("handle_ssr");
|
let _p = profile("handle_ssr");
|
||||||
|
let selections = params
|
||||||
|
.selections
|
||||||
|
.iter()
|
||||||
|
.map(|range| from_proto::file_range(&snap, params.position.text_document.clone(), *range))
|
||||||
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
let position = from_proto::file_position(&snap, params.position)?;
|
let position = from_proto::file_position(&snap, params.position)?;
|
||||||
let source_change =
|
let source_change = snap.analysis.structural_search_replace(
|
||||||
snap.analysis.structural_search_replace(¶ms.query, params.parse_only, position)??;
|
¶ms.query,
|
||||||
|
params.parse_only,
|
||||||
|
position,
|
||||||
|
selections,
|
||||||
|
)??;
|
||||||
to_proto::workspace_edit(&snap, source_change)
|
to_proto::workspace_edit(&snap, source_change)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -221,6 +221,9 @@ pub struct SsrParams {
|
||||||
/// position.
|
/// position.
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub position: lsp_types::TextDocumentPositionParams,
|
pub position: lsp_types::TextDocumentPositionParams,
|
||||||
|
|
||||||
|
/// Current selections. Search/replace will be restricted to these if non-empty.
|
||||||
|
pub selections: Vec<lsp_types::Range>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum StatusNotification {}
|
pub enum StatusNotification {}
|
||||||
|
|
|
@ -190,6 +190,7 @@ export function ssr(ctx: Ctx): Cmd {
|
||||||
if (!editor || !client) return;
|
if (!editor || !client) return;
|
||||||
|
|
||||||
const position = editor.selection.active;
|
const position = editor.selection.active;
|
||||||
|
const selections = editor.selections;
|
||||||
const textDocument = { uri: editor.document.uri.toString() };
|
const textDocument = { uri: editor.document.uri.toString() };
|
||||||
|
|
||||||
const options: vscode.InputBoxOptions = {
|
const options: vscode.InputBoxOptions = {
|
||||||
|
@ -198,7 +199,7 @@ export function ssr(ctx: Ctx): Cmd {
|
||||||
validateInput: async (x: string) => {
|
validateInput: async (x: string) => {
|
||||||
try {
|
try {
|
||||||
await client.sendRequest(ra.ssr, {
|
await client.sendRequest(ra.ssr, {
|
||||||
query: x, parseOnly: true, textDocument, position,
|
query: x, parseOnly: true, textDocument, position, selections,
|
||||||
});
|
});
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
return e.toString();
|
return e.toString();
|
||||||
|
@ -215,7 +216,7 @@ export function ssr(ctx: Ctx): Cmd {
|
||||||
cancellable: false,
|
cancellable: false,
|
||||||
}, async (_progress, _token) => {
|
}, async (_progress, _token) => {
|
||||||
const edit = await client.sendRequest(ra.ssr, {
|
const edit = await client.sendRequest(ra.ssr, {
|
||||||
query: request, parseOnly: false, textDocument, position
|
query: request, parseOnly: false, textDocument, position, selections,
|
||||||
});
|
});
|
||||||
|
|
||||||
await vscode.workspace.applyEdit(client.protocol2CodeConverter.asWorkspaceEdit(edit));
|
await vscode.workspace.applyEdit(client.protocol2CodeConverter.asWorkspaceEdit(edit));
|
||||||
|
|
|
@ -95,6 +95,7 @@ export interface SsrParams {
|
||||||
parseOnly: boolean;
|
parseOnly: boolean;
|
||||||
textDocument: lc.TextDocumentIdentifier;
|
textDocument: lc.TextDocumentIdentifier;
|
||||||
position: lc.Position;
|
position: lc.Position;
|
||||||
|
selections: lc.Range[];
|
||||||
}
|
}
|
||||||
export const ssr = new lc.RequestType<SsrParams, lc.WorkspaceEdit, void>('experimental/ssr');
|
export const ssr = new lc.RequestType<SsrParams, lc.WorkspaceEdit, void>('experimental/ssr');
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue