5518: Use resolved paths in SSR rules r=matklad a=davidlattimore

The main user-visible changes are:
* SSR now matches paths based on whether they resolve to the same thing instead of whether they're written the same.
  * So `foo()` won't match `foo()` if it's a different function `foo()`, but will match `bar::foo()` if it's the same `foo`.
* Paths in the replacement will now be rendered with appropriate qualification for their context.
  * For example `foo::Bar` will render as just `Bar` inside the module `foo`, but might render as `baz::foo::Bar` from elsewhere.
* This means that all paths in the search pattern and replacement template must be able to be resolved.
* It now also matters where you invoke SSR from, since paths are resolved relative to wherever that is.
* Search now uses find-uses on paths to locate places to try matching. This means that when a path is present in the pattern, search will generally be pretty fast.
* Function calls can now match method calls again, but this time only if they resolve to the same function.

Co-authored-by: David Lattimore <dml@google.com>
This commit is contained in:
bors[bot] 2020-07-24 12:46:55 +00:00 committed by GitHub
commit c3defe2532
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
20 changed files with 1437 additions and 399 deletions

1
Cargo.lock generated
View file

@ -1164,6 +1164,7 @@ dependencies = [
name = "ra_ssr"
version = "0.1.0"
dependencies = [
"expect",
"ra_db",
"ra_hir",
"ra_ide_db",

View file

@ -505,9 +505,10 @@ impl Analysis {
&self,
query: &str,
parse_only: bool,
position: FilePosition,
) -> Cancelable<Result<SourceChange, SsrError>> {
self.with_db(|db| {
let edits = ssr::parse_search_replace(query, parse_only, db)?;
let edits = ssr::parse_search_replace(query, parse_only, db, position)?;
Ok(SourceChange::from(edits))
})
}

View file

@ -1,5 +1,5 @@
use ra_db::SourceDatabaseExt;
use ra_ide_db::{symbol_index::SymbolsDatabase, RootDatabase};
use ra_db::FilePosition;
use ra_ide_db::RootDatabase;
use crate::SourceFileEdit;
use ra_ssr::{MatchFinder, SsrError, SsrRule};
@ -11,6 +11,19 @@ use ra_ssr::{MatchFinder, SsrError, SsrRule};
// A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement.
// Within a macro call, a placeholder will match up until whatever token follows the placeholder.
//
// All paths in both the search pattern and the replacement template must resolve in the context
// in which this command is invoked. Paths in the search pattern will then match the code if they
// resolve to the same item, even if they're written differently. For example if we invoke the
// command in the module `foo` with a pattern of `Bar`, then code in the parent module that refers
// to `foo::Bar` will match.
//
// Paths in the replacement template will be rendered appropriately for the context in which the
// replacement occurs. For example if our replacement template is `foo::Bar` and we match some
// code in the `foo` module, we'll insert just `Bar`.
//
// Method calls should generally be written in UFCS form. e.g. `foo::Bar::baz($s, $a)` will match
// `$s.baz($a)`, provided the method call `baz` resolves to the method `foo::Bar::baz`.
//
// Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`.
//
// Supported constraints:
@ -43,21 +56,13 @@ pub fn parse_search_replace(
rule: &str,
parse_only: bool,
db: &RootDatabase,
position: FilePosition,
) -> Result<Vec<SourceFileEdit>, SsrError> {
let mut edits = vec![];
let rule: SsrRule = rule.parse()?;
let mut match_finder = MatchFinder::in_context(db, position);
match_finder.add_rule(rule)?;
if parse_only {
return Ok(edits);
return Ok(Vec::new());
}
let mut match_finder = MatchFinder::new(db);
match_finder.add_rule(rule);
for &root in db.local_roots().iter() {
let sr = db.source_root(root);
for file_id in sr.iter() {
if let Some(edit) = match_finder.edits_for_file(file_id) {
edits.push(SourceFileEdit { file_id, edit });
}
}
}
Ok(edits)
Ok(match_finder.edits())
}

View file

@ -290,20 +290,25 @@ pub fn classify_name_ref(
let path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?;
let resolved = sema.resolve_path(&path)?;
let res = match resolved {
PathResolution::Def(def) => Definition::ModuleDef(def),
PathResolution::AssocItem(item) => {
let def = match item {
hir::AssocItem::Function(it) => it.into(),
hir::AssocItem::Const(it) => it.into(),
hir::AssocItem::TypeAlias(it) => it.into(),
};
Definition::ModuleDef(def)
}
PathResolution::Local(local) => Definition::Local(local),
PathResolution::TypeParam(par) => Definition::TypeParam(par),
PathResolution::Macro(def) => Definition::Macro(def),
PathResolution::SelfType(impl_def) => Definition::SelfType(impl_def),
};
Some(NameRefClass::Definition(res))
Some(NameRefClass::Definition(resolved.into()))
}
impl From<PathResolution> for Definition {
fn from(path_resolution: PathResolution) -> Self {
match path_resolution {
PathResolution::Def(def) => Definition::ModuleDef(def),
PathResolution::AssocItem(item) => {
let def = match item {
hir::AssocItem::Function(it) => it.into(),
hir::AssocItem::Const(it) => it.into(),
hir::AssocItem::TypeAlias(it) => it.into(),
};
Definition::ModuleDef(def)
}
PathResolution::Local(local) => Definition::Local(local),
PathResolution::TypeParam(par) => Definition::TypeParam(par),
PathResolution::Macro(def) => Definition::Macro(def),
PathResolution::SelfType(impl_def) => Definition::SelfType(impl_def),
}
}
}

View file

@ -60,6 +60,10 @@ impl SearchScope {
SearchScope::new(std::iter::once((file, None)).collect())
}
pub fn files(files: &[FileId]) -> SearchScope {
SearchScope::new(files.iter().map(|f| (*f, None)).collect())
}
pub fn intersection(&self, other: &SearchScope) -> SearchScope {
let (mut small, mut large) = (&self.entries, &other.entries);
if small.len() > large.len() {

View file

@ -18,3 +18,6 @@ ra_ide_db = { path = "../ra_ide_db" }
hir = { path = "../ra_hir", package = "ra_hir" }
rustc-hash = "1.1.0"
test_utils = { path = "../test_utils" }
[dev-dependencies]
expect = { path = "../expect" }

View file

@ -4,44 +4,41 @@
//! based on a template.
mod matching;
mod nester;
mod parsing;
mod replacing;
mod resolving;
mod search;
#[macro_use]
mod errors;
#[cfg(test)]
mod tests;
use crate::errors::bail;
pub use crate::errors::SsrError;
pub use crate::matching::Match;
use crate::matching::{record_match_fails_reasons_scope, MatchFailureReason};
use crate::matching::MatchFailureReason;
use hir::Semantics;
use ra_db::{FileId, FileRange};
use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind, SyntaxNode, TextRange};
use ra_text_edit::TextEdit;
use ra_db::{FileId, FilePosition, FileRange};
use ra_ide_db::source_change::SourceFileEdit;
use ra_syntax::{ast, AstNode, SyntaxNode, TextRange};
use resolving::ResolvedRule;
use rustc_hash::FxHashMap;
// A structured search replace rule. Create by calling `parse` on a str.
#[derive(Debug)]
pub struct SsrRule {
/// A structured pattern that we're searching for.
pattern: SsrPattern,
pattern: parsing::RawPattern,
/// What we'll replace it with.
template: parsing::SsrTemplate,
template: parsing::RawPattern,
parsed_rules: Vec<parsing::ParsedRule>,
}
#[derive(Debug)]
pub struct SsrPattern {
raw: parsing::RawSearchPattern,
/// Placeholders keyed by the stand-in ident that we use in Rust source code.
placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
// We store our search pattern, parsed as each different kind of thing we can look for. As we
// traverse the AST, we get the appropriate one of these for the type of node we're on. For many
// search patterns, only some of these will be present.
expr: Option<SyntaxNode>,
type_ref: Option<SyntaxNode>,
item: Option<SyntaxNode>,
path: Option<SyntaxNode>,
pattern: Option<SyntaxNode>,
raw: parsing::RawPattern,
parsed_rules: Vec<parsing::ParsedRule>,
}
#[derive(Debug, Default)]
@ -53,40 +50,112 @@ pub struct SsrMatches {
pub struct MatchFinder<'db> {
/// Our source of information about the user's code.
sema: Semantics<'db, ra_ide_db::RootDatabase>,
rules: Vec<SsrRule>,
rules: Vec<ResolvedRule>,
scope: hir::SemanticsScope<'db>,
hygiene: hir::Hygiene,
}
impl<'db> MatchFinder<'db> {
pub fn new(db: &'db ra_ide_db::RootDatabase) -> MatchFinder<'db> {
MatchFinder { sema: Semantics::new(db), rules: Vec::new() }
/// Constructs a new instance where names will be looked up as if they appeared at
/// `lookup_context`.
pub fn in_context(
db: &'db ra_ide_db::RootDatabase,
lookup_context: FilePosition,
) -> MatchFinder<'db> {
let sema = Semantics::new(db);
let file = sema.parse(lookup_context.file_id);
// Find a node at the requested position, falling back to the whole file.
let node = file
.syntax()
.token_at_offset(lookup_context.offset)
.left_biased()
.map(|token| token.parent())
.unwrap_or_else(|| file.syntax().clone());
let scope = sema.scope(&node);
MatchFinder {
sema: Semantics::new(db),
rules: Vec::new(),
scope,
hygiene: hir::Hygiene::new(db, lookup_context.file_id.into()),
}
}
pub fn add_rule(&mut self, rule: SsrRule) {
self.rules.push(rule);
/// Constructs an instance using the start of the first file in `db` as the lookup context.
pub fn at_first_file(db: &'db ra_ide_db::RootDatabase) -> Result<MatchFinder<'db>, SsrError> {
use ra_db::SourceDatabaseExt;
use ra_ide_db::symbol_index::SymbolsDatabase;
if let Some(first_file_id) = db
.local_roots()
.iter()
.next()
.and_then(|root| db.source_root(root.clone()).iter().next())
{
Ok(MatchFinder::in_context(
db,
FilePosition { file_id: first_file_id, offset: 0.into() },
))
} else {
bail!("No files to search");
}
}
/// Adds a rule to be applied. The order in which rules are added matters. Earlier rules take
/// precedence. If a node is matched by an earlier rule, then later rules won't be permitted to
/// match to it.
pub fn add_rule(&mut self, rule: SsrRule) -> Result<(), SsrError> {
for parsed_rule in rule.parsed_rules {
self.rules.push(ResolvedRule::new(
parsed_rule,
&self.scope,
&self.hygiene,
self.rules.len(),
)?);
}
Ok(())
}
/// Finds matches for all added rules and returns edits for all found matches.
pub fn edits(&self) -> Vec<SourceFileEdit> {
use ra_db::SourceDatabaseExt;
let mut matches_by_file = FxHashMap::default();
for m in self.matches().matches {
matches_by_file
.entry(m.range.file_id)
.or_insert_with(|| SsrMatches::default())
.matches
.push(m);
}
let mut edits = vec![];
for (file_id, matches) in matches_by_file {
let edit =
replacing::matches_to_edit(&matches, &self.sema.db.file_text(file_id), &self.rules);
edits.push(SourceFileEdit { file_id, edit });
}
edits
}
/// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you
/// intend to do replacement, use `add_rule` instead.
pub fn add_search_pattern(&mut self, pattern: SsrPattern) {
self.add_rule(SsrRule { pattern, template: "()".parse().unwrap() })
}
pub fn edits_for_file(&self, file_id: FileId) -> Option<TextEdit> {
let matches = self.find_matches_in_file(file_id);
if matches.matches.is_empty() {
None
} else {
use ra_db::SourceDatabaseExt;
Some(replacing::matches_to_edit(&matches, &self.sema.db.file_text(file_id)))
pub fn add_search_pattern(&mut self, pattern: SsrPattern) -> Result<(), SsrError> {
for parsed_rule in pattern.parsed_rules {
self.rules.push(ResolvedRule::new(
parsed_rule,
&self.scope,
&self.hygiene,
self.rules.len(),
)?);
}
Ok(())
}
pub fn find_matches_in_file(&self, file_id: FileId) -> SsrMatches {
let file = self.sema.parse(file_id);
let code = file.syntax();
let mut matches = SsrMatches::default();
self.find_matches(code, &None, &mut matches);
matches
/// Returns matches for all added rules.
pub fn matches(&self) -> SsrMatches {
let mut matches = Vec::new();
let mut usage_cache = search::UsageCache::default();
for rule in &self.rules {
self.find_matches_for_rule(rule, &mut usage_cache, &mut matches);
}
nester::nest_and_remove_collisions(matches, &self.sema)
}
/// Finds all nodes in `file_id` whose text is exactly equal to `snippet` and attempts to match
@ -115,53 +184,6 @@ impl<'db> MatchFinder<'db> {
res
}
fn find_matches(
&self,
code: &SyntaxNode,
restrict_range: &Option<FileRange>,
matches_out: &mut SsrMatches,
) {
for rule in &self.rules {
if let Ok(mut m) = matching::get_match(false, rule, &code, restrict_range, &self.sema) {
// Continue searching in each of our placeholders.
for placeholder_value in m.placeholder_values.values_mut() {
if let Some(placeholder_node) = &placeholder_value.node {
// Don't search our placeholder if it's the entire matched node, otherwise we'd
// find the same match over and over until we got a stack overflow.
if placeholder_node != code {
self.find_matches(
placeholder_node,
restrict_range,
&mut placeholder_value.inner_matches,
);
}
}
}
matches_out.matches.push(m);
return;
}
}
// If we've got a macro call, we already tried matching it pre-expansion, which is the only
// way to match the whole macro, now try expanding it and matching the expansion.
if let Some(macro_call) = ast::MacroCall::cast(code.clone()) {
if let Some(expanded) = self.sema.expand(&macro_call) {
if let Some(tt) = macro_call.token_tree() {
// When matching within a macro expansion, we only want to allow matches of
// nodes that originated entirely from within the token tree of the macro call.
// i.e. we don't want to match something that came from the macro itself.
self.find_matches(
&expanded,
&Some(self.sema.original_range(tt.syntax())),
matches_out,
);
}
}
}
for child in code.children() {
self.find_matches(&child, restrict_range, matches_out);
}
}
fn output_debug_for_nodes_at_range(
&self,
node: &SyntaxNode,
@ -177,8 +199,17 @@ impl<'db> MatchFinder<'db> {
}
if node_range.range == range.range {
for rule in &self.rules {
let pattern =
rule.pattern.tree_for_kind_with_reason(node.kind()).map(|p| p.clone());
// For now we ignore rules that have a different kind than our node, otherwise
// we get lots of noise. If at some point we add support for restricting rules
// to a particular kind of thing (e.g. only match type references), then we can
// relax this. We special-case expressions, since function calls can match
// method calls.
if rule.pattern.node.kind() != node.kind()
&& !(ast::Expr::can_cast(rule.pattern.node.kind())
&& ast::Expr::can_cast(node.kind()))
{
continue;
}
out.push(MatchDebugInfo {
matched: matching::get_match(true, rule, &node, restrict_range, &self.sema)
.map_err(|e| MatchFailureReason {
@ -186,7 +217,7 @@ impl<'db> MatchFinder<'db> {
"Match failed, but no reason was given".to_owned()
}),
}),
pattern,
pattern: rule.pattern.node.clone(),
node: node.clone(),
});
}
@ -209,9 +240,8 @@ impl<'db> MatchFinder<'db> {
pub struct MatchDebugInfo {
node: SyntaxNode,
/// Our search pattern parsed as the same kind of syntax node as `node`. e.g. expression, item,
/// etc. Will be absent if the pattern can't be parsed as that kind.
pattern: Result<SyntaxNode, MatchFailureReason>,
/// Our search pattern parsed as an expression or item, etc
pattern: SyntaxNode,
matched: Result<Match, MatchFailureReason>,
}
@ -228,29 +258,12 @@ impl std::fmt::Debug for MatchDebugInfo {
self.node
)?;
writeln!(f, "========= PATTERN ==========")?;
match &self.pattern {
Ok(pattern) => {
writeln!(f, "{:#?}", pattern)?;
}
Err(err) => {
writeln!(f, "{}", err.reason)?;
}
}
writeln!(f, "{:#?}", self.pattern)?;
writeln!(f, "============================")?;
Ok(())
}
}
impl SsrPattern {
fn tree_for_kind_with_reason(
&self,
kind: SyntaxKind,
) -> Result<&SyntaxNode, MatchFailureReason> {
record_match_fails_reasons_scope(true, || self.tree_for_kind(kind))
.map_err(|e| MatchFailureReason { reason: e.reason.unwrap() })
}
}
impl SsrMatches {
/// Returns `self` with any nested matches removed and made into top-level matches.
pub fn flattened(self) -> SsrMatches {

View file

@ -2,8 +2,9 @@
//! process of matching, placeholder values are recorded.
use crate::{
parsing::{Constraint, NodeKind, Placeholder, SsrTemplate},
SsrMatches, SsrPattern, SsrRule,
parsing::{Constraint, NodeKind, Placeholder},
resolving::{ResolvedPattern, ResolvedRule},
SsrMatches,
};
use hir::Semantics;
use ra_db::FileRange;
@ -48,9 +49,11 @@ pub struct Match {
pub(crate) matched_node: SyntaxNode,
pub(crate) placeholder_values: FxHashMap<Var, PlaceholderMatch>,
pub(crate) ignored_comments: Vec<ast::Comment>,
// A copy of the template for the rule that produced this match. We store this on the match for
// if/when we do replacement.
pub(crate) template: SsrTemplate,
pub(crate) rule_index: usize,
/// The depth of matched_node.
pub(crate) depth: usize,
// Each path in the template rendered for the module in which the match was found.
pub(crate) rendered_template_paths: FxHashMap<SyntaxNode, hir::ModPath>,
}
/// Represents a `$var` in an SSR query.
@ -86,7 +89,7 @@ pub(crate) struct MatchFailed {
/// parent module, we don't populate nested matches.
pub(crate) fn get_match(
debug_active: bool,
rule: &SsrRule,
rule: &ResolvedRule,
code: &SyntaxNode,
restrict_range: &Option<FileRange>,
sema: &Semantics<ra_ide_db::RootDatabase>,
@ -102,7 +105,7 @@ struct Matcher<'db, 'sema> {
/// If any placeholders come from anywhere outside of this range, then the match will be
/// rejected.
restrict_range: Option<FileRange>,
rule: &'sema SsrRule,
rule: &'sema ResolvedRule,
}
/// Which phase of matching we're currently performing. We do two phases because most attempted
@ -117,26 +120,35 @@ enum Phase<'a> {
impl<'db, 'sema> Matcher<'db, 'sema> {
fn try_match(
rule: &'sema SsrRule,
rule: &ResolvedRule,
code: &SyntaxNode,
restrict_range: &Option<FileRange>,
sema: &'sema Semantics<'db, ra_ide_db::RootDatabase>,
) -> Result<Match, MatchFailed> {
let match_state = Matcher { sema, restrict_range: restrict_range.clone(), rule };
let pattern_tree = rule.pattern.tree_for_kind(code.kind())?;
// First pass at matching, where we check that node types and idents match.
match_state.attempt_match_node(&mut Phase::First, &pattern_tree, code)?;
match_state.attempt_match_node(&mut Phase::First, &rule.pattern.node, code)?;
match_state.validate_range(&sema.original_range(code))?;
let mut the_match = Match {
range: sema.original_range(code),
matched_node: code.clone(),
placeholder_values: FxHashMap::default(),
ignored_comments: Vec::new(),
template: rule.template.clone(),
rule_index: rule.index,
depth: 0,
rendered_template_paths: FxHashMap::default(),
};
// Second matching pass, where we record placeholder matches, ignored comments and maybe do
// any other more expensive checks that we didn't want to do on the first pass.
match_state.attempt_match_node(&mut Phase::Second(&mut the_match), &pattern_tree, code)?;
match_state.attempt_match_node(
&mut Phase::Second(&mut the_match),
&rule.pattern.node,
code,
)?;
the_match.depth = sema.ancestors_with_macros(the_match.matched_node.clone()).count();
if let Some(template) = &rule.template {
the_match.render_template_paths(template, sema)?;
}
Ok(the_match)
}
@ -177,10 +189,17 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
}
return Ok(());
}
// Non-placeholders.
// We allow a UFCS call to match a method call, provided they resolve to the same function.
if let Some(pattern_function) = self.rule.pattern.ufcs_function_calls.get(pattern) {
if let (Some(pattern), Some(code)) =
(ast::CallExpr::cast(pattern.clone()), ast::MethodCallExpr::cast(code.clone()))
{
return self.attempt_match_ufcs(phase, &pattern, &code, *pattern_function);
}
}
if pattern.kind() != code.kind() {
fail_match!(
"Pattern had a `{}` ({:?}), code had `{}` ({:?})",
"Pattern had `{}` ({:?}), code had `{}` ({:?})",
pattern.text(),
pattern.kind(),
code.text(),
@ -194,6 +213,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
self.attempt_match_record_field_list(phase, pattern, code)
}
SyntaxKind::TOKEN_TREE => self.attempt_match_token_tree(phase, pattern, code),
SyntaxKind::PATH => self.attempt_match_path(phase, pattern, code),
_ => self.attempt_match_node_children(phase, pattern, code),
}
}
@ -310,6 +330,64 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
Ok(())
}
/// Paths are matched based on whether they refer to the same thing, even if they're written
/// differently.
fn attempt_match_path(
&self,
phase: &mut Phase,
pattern: &SyntaxNode,
code: &SyntaxNode,
) -> Result<(), MatchFailed> {
if let Some(pattern_resolved) = self.rule.pattern.resolved_paths.get(pattern) {
let pattern_path = ast::Path::cast(pattern.clone()).unwrap();
let code_path = ast::Path::cast(code.clone()).unwrap();
if let (Some(pattern_segment), Some(code_segment)) =
(pattern_path.segment(), code_path.segment())
{
// Match everything within the segment except for the name-ref, which is handled
// separately via comparing what the path resolves to below.
self.attempt_match_opt(
phase,
pattern_segment.type_arg_list(),
code_segment.type_arg_list(),
)?;
self.attempt_match_opt(
phase,
pattern_segment.param_list(),
code_segment.param_list(),
)?;
}
if matches!(phase, Phase::Second(_)) {
let resolution = self
.sema
.resolve_path(&code_path)
.ok_or_else(|| match_error!("Failed to resolve path `{}`", code.text()))?;
if pattern_resolved.resolution != resolution {
fail_match!("Pattern had path `{}` code had `{}`", pattern.text(), code.text());
}
}
} else {
return self.attempt_match_node_children(phase, pattern, code);
}
Ok(())
}
fn attempt_match_opt<T: AstNode>(
&self,
phase: &mut Phase,
pattern: Option<T>,
code: Option<T>,
) -> Result<(), MatchFailed> {
match (pattern, code) {
(Some(p), Some(c)) => self.attempt_match_node(phase, &p.syntax(), &c.syntax()),
(None, None) => Ok(()),
(Some(p), None) => fail_match!("Pattern `{}` had nothing to match", p.syntax().text()),
(None, Some(c)) => {
fail_match!("Nothing in pattern to match code `{}`", c.syntax().text())
}
}
}
/// We want to allow the records to match in any order, so we have special matching logic for
/// them.
fn attempt_match_record_field_list(
@ -443,9 +521,61 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
Ok(())
}
fn attempt_match_ufcs(
&self,
phase: &mut Phase,
pattern: &ast::CallExpr,
code: &ast::MethodCallExpr,
pattern_function: hir::Function,
) -> Result<(), MatchFailed> {
use ast::ArgListOwner;
let code_resolved_function = self
.sema
.resolve_method_call(code)
.ok_or_else(|| match_error!("Failed to resolve method call"))?;
if pattern_function != code_resolved_function {
fail_match!("Method call resolved to a different function");
}
// Check arguments.
let mut pattern_args = pattern
.arg_list()
.ok_or_else(|| match_error!("Pattern function call has no args"))?
.args();
self.attempt_match_opt(phase, pattern_args.next(), code.expr())?;
let mut code_args =
code.arg_list().ok_or_else(|| match_error!("Code method call has no args"))?.args();
loop {
match (pattern_args.next(), code_args.next()) {
(None, None) => return Ok(()),
(p, c) => self.attempt_match_opt(phase, p, c)?,
}
}
}
fn get_placeholder(&self, element: &SyntaxElement) -> Option<&Placeholder> {
only_ident(element.clone())
.and_then(|ident| self.rule.pattern.placeholders_by_stand_in.get(ident.text()))
only_ident(element.clone()).and_then(|ident| self.rule.get_placeholder(&ident))
}
}
impl Match {
fn render_template_paths(
&mut self,
template: &ResolvedPattern,
sema: &Semantics<ra_ide_db::RootDatabase>,
) -> Result<(), MatchFailed> {
let module = sema
.scope(&self.matched_node)
.module()
.ok_or_else(|| match_error!("Matched node isn't in a module"))?;
for (path, resolved_path) in &template.resolved_paths {
if let hir::PathResolution::Def(module_def) = resolved_path.resolution {
let mod_path = module.find_use_path(sema.db, module_def).ok_or_else(|| {
match_error!("Failed to render template path `{}` at match location")
})?;
self.rendered_template_paths.insert(path.clone(), mod_path);
}
}
Ok(())
}
}
@ -510,28 +640,6 @@ impl PlaceholderMatch {
}
}
impl SsrPattern {
pub(crate) fn tree_for_kind(&self, kind: SyntaxKind) -> Result<&SyntaxNode, MatchFailed> {
let (tree, kind_name) = if ast::Expr::can_cast(kind) {
(&self.expr, "expression")
} else if ast::TypeRef::can_cast(kind) {
(&self.type_ref, "type reference")
} else if ast::ModuleItem::can_cast(kind) {
(&self.item, "item")
} else if ast::Path::can_cast(kind) {
(&self.path, "path")
} else if ast::Pat::can_cast(kind) {
(&self.pattern, "pattern")
} else {
fail_match!("Matching nodes of kind {:?} is not supported", kind);
};
match tree {
Some(tree) => Ok(tree),
None => fail_match!("Pattern cannot be parsed as a {}", kind_name),
}
}
}
impl NodeKind {
fn matches(&self, node: &SyntaxNode) -> Result<(), MatchFailed> {
let ok = match self {
@ -596,13 +704,12 @@ mod tests {
#[test]
fn parse_match_replace() {
let rule: SsrRule = "foo($x) ==>> bar($x)".parse().unwrap();
let input = "fn foo() {} fn main() { foo(1+2); }";
let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }";
use ra_db::fixture::WithFixture;
let (db, file_id) = ra_ide_db::RootDatabase::with_single_file(input);
let mut match_finder = MatchFinder::new(&db);
match_finder.add_rule(rule);
let matches = match_finder.find_matches_in_file(file_id);
let (db, position) = crate::tests::single_file(input);
let mut match_finder = MatchFinder::in_context(&db, position);
match_finder.add_rule(rule).unwrap();
let matches = match_finder.matches();
assert_eq!(matches.matches.len(), 1);
assert_eq!(matches.matches[0].matched_node.text(), "foo(1+2)");
assert_eq!(matches.matches[0].placeholder_values.len(), 1);
@ -615,9 +722,11 @@ mod tests {
"1+2"
);
let edit = crate::replacing::matches_to_edit(&matches, input);
let edits = match_finder.edits();
assert_eq!(edits.len(), 1);
let edit = &edits[0];
let mut after = input.to_string();
edit.apply(&mut after);
assert_eq!(after, "fn foo() {} fn main() { bar(1+2); }");
edit.edit.apply(&mut after);
assert_eq!(after, "fn foo() {} fn bar() {} fn main() { bar(1+2); }");
}
}

View file

@ -0,0 +1,98 @@
//! Converts a flat collection of matches into a nested form suitable for replacement. When there
//! are multiple matches for a node, or that overlap, priority is given to the earlier rule. Nested
//! matches are only permitted if the inner match is contained entirely within a placeholder of an
//! outer match.
//!
//! For example, if our search pattern is `foo(foo($a))` and the code had `foo(foo(foo(foo(42))))`,
//! then we'll get 3 matches, however only the outermost and innermost matches can be accepted. The
//! middle match would take the second `foo` from the outer match.
use crate::{Match, SsrMatches};
use ra_syntax::SyntaxNode;
use rustc_hash::FxHashMap;
pub(crate) fn nest_and_remove_collisions(
mut matches: Vec<Match>,
sema: &hir::Semantics<ra_ide_db::RootDatabase>,
) -> SsrMatches {
// We sort the matches by depth then by rule index. Sorting by depth means that by the time we
// see a match, any parent matches or conflicting matches will have already been seen. Sorting
// by rule_index means that if there are two matches for the same node, the rule added first
// will take precedence.
matches.sort_by(|a, b| a.depth.cmp(&b.depth).then_with(|| a.rule_index.cmp(&b.rule_index)));
let mut collector = MatchCollector::default();
for m in matches {
collector.add_match(m, sema);
}
collector.into()
}
#[derive(Default)]
struct MatchCollector {
matches_by_node: FxHashMap<SyntaxNode, Match>,
}
impl MatchCollector {
/// Attempts to add `m` to matches. If it conflicts with an existing match, it is discarded. If
/// it is entirely within the a placeholder of an existing match, then it is added as a child
/// match of the existing match.
fn add_match(&mut self, m: Match, sema: &hir::Semantics<ra_ide_db::RootDatabase>) {
let matched_node = m.matched_node.clone();
if let Some(existing) = self.matches_by_node.get_mut(&matched_node) {
try_add_sub_match(m, existing, sema);
return;
}
for ancestor in sema.ancestors_with_macros(m.matched_node.clone()) {
if let Some(existing) = self.matches_by_node.get_mut(&ancestor) {
try_add_sub_match(m, existing, sema);
return;
}
}
self.matches_by_node.insert(matched_node, m);
}
}
/// Attempts to add `m` as a sub-match of `existing`.
fn try_add_sub_match(
m: Match,
existing: &mut Match,
sema: &hir::Semantics<ra_ide_db::RootDatabase>,
) {
for p in existing.placeholder_values.values_mut() {
// Note, no need to check if p.range.file is equal to m.range.file, since we
// already know we're within `existing`.
if p.range.range.contains_range(m.range.range) {
// Convert the inner matches in `p` into a temporary MatchCollector. When
// we're done, we then convert it back into an SsrMatches. If we expected
// lots of inner matches, it might be worthwhile keeping a MatchCollector
// around for each placeholder match. However we expect most placeholder
// will have 0 and a few will have 1. More than that should hopefully be
// exceptional.
let mut collector = MatchCollector::default();
for m in std::mem::replace(&mut p.inner_matches.matches, Vec::new()) {
collector.matches_by_node.insert(m.matched_node.clone(), m);
}
collector.add_match(m, sema);
p.inner_matches = collector.into();
break;
}
}
}
impl From<MatchCollector> for SsrMatches {
fn from(mut match_collector: MatchCollector) -> Self {
let mut matches = SsrMatches::default();
for (_, m) in match_collector.matches_by_node.drain() {
matches.matches.push(m);
}
matches.matches.sort_by(|a, b| {
// Order matches by file_id then by start range. This should be sufficient since ranges
// shouldn't be overlapping.
a.range
.file_id
.cmp(&b.range.file_id)
.then_with(|| a.range.range.start().cmp(&b.range.range.start()))
});
matches
}
}

View file

@ -7,17 +7,19 @@
use crate::errors::bail;
use crate::{SsrError, SsrPattern, SsrRule};
use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind, T};
use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind, SyntaxNode, T};
use rustc_hash::{FxHashMap, FxHashSet};
use std::str::FromStr;
#[derive(Clone, Debug)]
pub(crate) struct SsrTemplate {
pub(crate) tokens: Vec<PatternElement>,
#[derive(Debug)]
pub(crate) struct ParsedRule {
pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>,
pub(crate) pattern: SyntaxNode,
pub(crate) template: Option<SyntaxNode>,
}
#[derive(Debug)]
pub(crate) struct RawSearchPattern {
pub(crate) struct RawPattern {
tokens: Vec<PatternElement>,
}
@ -54,6 +56,60 @@ pub(crate) struct Token {
pub(crate) text: SmolStr,
}
impl ParsedRule {
fn new(
pattern: &RawPattern,
template: Option<&RawPattern>,
) -> Result<Vec<ParsedRule>, SsrError> {
let raw_pattern = pattern.as_rust_code();
let raw_template = template.map(|t| t.as_rust_code());
let raw_template = raw_template.as_ref().map(|s| s.as_str());
let mut builder = RuleBuilder {
placeholders_by_stand_in: pattern.placeholders_by_stand_in(),
rules: Vec::new(),
};
builder.try_add(ast::Expr::parse(&raw_pattern), raw_template.map(ast::Expr::parse));
builder.try_add(ast::TypeRef::parse(&raw_pattern), raw_template.map(ast::TypeRef::parse));
builder.try_add(
ast::ModuleItem::parse(&raw_pattern),
raw_template.map(ast::ModuleItem::parse),
);
builder.try_add(ast::Path::parse(&raw_pattern), raw_template.map(ast::Path::parse));
builder.try_add(ast::Pat::parse(&raw_pattern), raw_template.map(ast::Pat::parse));
builder.build()
}
}
struct RuleBuilder {
placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>,
rules: Vec<ParsedRule>,
}
impl RuleBuilder {
fn try_add<T: AstNode>(&mut self, pattern: Result<T, ()>, template: Option<Result<T, ()>>) {
match (pattern, template) {
(Ok(pattern), Some(Ok(template))) => self.rules.push(ParsedRule {
placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
pattern: pattern.syntax().clone(),
template: Some(template.syntax().clone()),
}),
(Ok(pattern), None) => self.rules.push(ParsedRule {
placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
pattern: pattern.syntax().clone(),
template: None,
}),
_ => {}
}
}
fn build(self) -> Result<Vec<ParsedRule>, SsrError> {
if self.rules.is_empty() {
bail!("Not a valid Rust expression, type, item, path or pattern");
}
Ok(self.rules)
}
}
impl FromStr for SsrRule {
type Err = SsrError;
@ -68,21 +124,24 @@ impl FromStr for SsrRule {
if it.next().is_some() {
return Err(SsrError("More than one delimiter found".into()));
}
let rule = SsrRule { pattern: pattern.parse()?, template: template.parse()? };
let raw_pattern = pattern.parse()?;
let raw_template = template.parse()?;
let parsed_rules = ParsedRule::new(&raw_pattern, Some(&raw_template))?;
let rule = SsrRule { pattern: raw_pattern, template: raw_template, parsed_rules };
validate_rule(&rule)?;
Ok(rule)
}
}
impl FromStr for RawSearchPattern {
impl FromStr for RawPattern {
type Err = SsrError;
fn from_str(pattern_str: &str) -> Result<RawSearchPattern, SsrError> {
Ok(RawSearchPattern { tokens: parse_pattern(pattern_str)? })
fn from_str(pattern_str: &str) -> Result<RawPattern, SsrError> {
Ok(RawPattern { tokens: parse_pattern(pattern_str)? })
}
}
impl RawSearchPattern {
impl RawPattern {
/// Returns this search pattern as Rust source code that we can feed to the Rust parser.
fn as_rust_code(&self) -> String {
let mut res = String::new();
@ -95,7 +154,7 @@ impl RawSearchPattern {
res
}
fn placeholders_by_stand_in(&self) -> FxHashMap<SmolStr, Placeholder> {
pub(crate) fn placeholders_by_stand_in(&self) -> FxHashMap<SmolStr, Placeholder> {
let mut res = FxHashMap::default();
for t in &self.tokens {
if let PatternElement::Placeholder(placeholder) = t {
@ -110,41 +169,9 @@ impl FromStr for SsrPattern {
type Err = SsrError;
fn from_str(pattern_str: &str) -> Result<SsrPattern, SsrError> {
let raw: RawSearchPattern = pattern_str.parse()?;
let raw_str = raw.as_rust_code();
let res = SsrPattern {
expr: ast::Expr::parse(&raw_str).ok().map(|n| n.syntax().clone()),
type_ref: ast::TypeRef::parse(&raw_str).ok().map(|n| n.syntax().clone()),
item: ast::ModuleItem::parse(&raw_str).ok().map(|n| n.syntax().clone()),
path: ast::Path::parse(&raw_str).ok().map(|n| n.syntax().clone()),
pattern: ast::Pat::parse(&raw_str).ok().map(|n| n.syntax().clone()),
placeholders_by_stand_in: raw.placeholders_by_stand_in(),
raw,
};
if res.expr.is_none()
&& res.type_ref.is_none()
&& res.item.is_none()
&& res.path.is_none()
&& res.pattern.is_none()
{
bail!("Pattern is not a valid Rust expression, type, item, path or pattern");
}
Ok(res)
}
}
impl FromStr for SsrTemplate {
type Err = SsrError;
fn from_str(pattern_str: &str) -> Result<SsrTemplate, SsrError> {
let tokens = parse_pattern(pattern_str)?;
// Validate that the template is a valid fragment of Rust code. We reuse the validation
// logic for search patterns since the only thing that differs is the error message.
if SsrPattern::from_str(pattern_str).is_err() {
bail!("Replacement is not a valid Rust expression, type, item, path or pattern");
}
// Our actual template needs to preserve whitespace, so we can't reuse `tokens`.
Ok(SsrTemplate { tokens })
let raw_pattern = pattern_str.parse()?;
let parsed_rules = ParsedRule::new(&raw_pattern, None)?;
Ok(SsrPattern { raw: raw_pattern, parsed_rules })
}
}
@ -173,7 +200,7 @@ fn parse_pattern(pattern_str: &str) -> Result<Vec<PatternElement>, SsrError> {
/// pattern didn't define.
fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> {
let mut defined_placeholders = FxHashSet::default();
for p in &rule.pattern.raw.tokens {
for p in &rule.pattern.tokens {
if let PatternElement::Placeholder(placeholder) = p {
defined_placeholders.insert(&placeholder.ident);
}
@ -316,7 +343,7 @@ mod tests {
}
let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap();
assert_eq!(
result.pattern.raw.tokens,
result.pattern.tokens,
vec![
token(SyntaxKind::IDENT, "foo"),
token(T!['('], "("),

View file

@ -1,66 +1,124 @@
//! Code for applying replacement templates for matches that have previously been found.
use crate::matching::Var;
use crate::parsing::PatternElement;
use crate::{Match, SsrMatches};
use ra_syntax::ast::AstToken;
use ra_syntax::TextSize;
use crate::{resolving::ResolvedRule, Match, SsrMatches};
use ra_syntax::ast::{self, AstToken};
use ra_syntax::{SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextSize};
use ra_text_edit::TextEdit;
/// Returns a text edit that will replace each match in `matches` with its corresponding replacement
/// template. Placeholders in the template will have been substituted with whatever they matched to
/// in the original code.
pub(crate) fn matches_to_edit(matches: &SsrMatches, file_src: &str) -> TextEdit {
matches_to_edit_at_offset(matches, file_src, 0.into())
pub(crate) fn matches_to_edit(
matches: &SsrMatches,
file_src: &str,
rules: &[ResolvedRule],
) -> TextEdit {
matches_to_edit_at_offset(matches, file_src, 0.into(), rules)
}
fn matches_to_edit_at_offset(
matches: &SsrMatches,
file_src: &str,
relative_start: TextSize,
rules: &[ResolvedRule],
) -> TextEdit {
let mut edit_builder = ra_text_edit::TextEditBuilder::default();
for m in &matches.matches {
edit_builder.replace(
m.range.range.checked_sub(relative_start).unwrap(),
render_replace(m, file_src),
render_replace(m, file_src, rules),
);
}
edit_builder.finish()
}
fn render_replace(match_info: &Match, file_src: &str) -> String {
struct ReplacementRenderer<'a> {
match_info: &'a Match,
file_src: &'a str,
rules: &'a [ResolvedRule],
rule: &'a ResolvedRule,
}
fn render_replace(match_info: &Match, file_src: &str, rules: &[ResolvedRule]) -> String {
let mut out = String::new();
for r in &match_info.template.tokens {
match r {
PatternElement::Token(t) => out.push_str(t.text.as_str()),
PatternElement::Placeholder(p) => {
if let Some(placeholder_value) =
match_info.placeholder_values.get(&Var(p.ident.to_string()))
{
let range = &placeholder_value.range.range;
let mut matched_text =
file_src[usize::from(range.start())..usize::from(range.end())].to_owned();
let edit = matches_to_edit_at_offset(
&placeholder_value.inner_matches,
file_src,
range.start(),
);
edit.apply(&mut matched_text);
out.push_str(&matched_text);
} else {
// We validated that all placeholder references were valid before we
// started, so this shouldn't happen.
panic!(
"Internal error: replacement referenced unknown placeholder {}",
p.ident
);
}
}
}
}
let rule = &rules[match_info.rule_index];
let template = rule
.template
.as_ref()
.expect("You called MatchFinder::edits after calling MatchFinder::add_search_pattern");
let renderer = ReplacementRenderer { match_info, file_src, rules, rule };
renderer.render_node(&template.node, &mut out);
for comment in &match_info.ignored_comments {
out.push_str(&comment.syntax().to_string());
}
out
}
impl ReplacementRenderer<'_> {
fn render_node_children(&self, node: &SyntaxNode, out: &mut String) {
for node_or_token in node.children_with_tokens() {
self.render_node_or_token(&node_or_token, out);
}
}
fn render_node_or_token(&self, node_or_token: &SyntaxElement, out: &mut String) {
match node_or_token {
SyntaxElement::Token(token) => {
self.render_token(&token, out);
}
SyntaxElement::Node(child_node) => {
self.render_node(&child_node, out);
}
}
}
fn render_node(&self, node: &SyntaxNode, out: &mut String) {
use ra_syntax::ast::AstNode;
if let Some(mod_path) = self.match_info.rendered_template_paths.get(&node) {
out.push_str(&mod_path.to_string());
// Emit everything except for the segment's name-ref, since we already effectively
// emitted that as part of `mod_path`.
if let Some(path) = ast::Path::cast(node.clone()) {
if let Some(segment) = path.segment() {
for node_or_token in segment.syntax().children_with_tokens() {
if node_or_token.kind() != SyntaxKind::NAME_REF {
self.render_node_or_token(&node_or_token, out);
}
}
}
}
} else {
self.render_node_children(&node, out);
}
}
fn render_token(&self, token: &SyntaxToken, out: &mut String) {
if let Some(placeholder) = self.rule.get_placeholder(&token) {
if let Some(placeholder_value) =
self.match_info.placeholder_values.get(&Var(placeholder.ident.to_string()))
{
let range = &placeholder_value.range.range;
let mut matched_text =
self.file_src[usize::from(range.start())..usize::from(range.end())].to_owned();
let edit = matches_to_edit_at_offset(
&placeholder_value.inner_matches,
self.file_src,
range.start(),
self.rules,
);
edit.apply(&mut matched_text);
out.push_str(&matched_text);
} else {
// We validated that all placeholder references were valid before we
// started, so this shouldn't happen.
panic!(
"Internal error: replacement referenced unknown placeholder {}",
placeholder.ident
);
}
} else {
out.push_str(token.text().as_str());
}
}
}

View file

@ -0,0 +1,173 @@
//! This module is responsible for resolving paths within rules.
use crate::errors::error;
use crate::{parsing, SsrError};
use parsing::Placeholder;
use ra_syntax::{ast, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken};
use rustc_hash::{FxHashMap, FxHashSet};
use test_utils::mark;
pub(crate) struct ResolvedRule {
pub(crate) pattern: ResolvedPattern,
pub(crate) template: Option<ResolvedPattern>,
pub(crate) index: usize,
}
pub(crate) struct ResolvedPattern {
pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
pub(crate) node: SyntaxNode,
// Paths in `node` that we've resolved.
pub(crate) resolved_paths: FxHashMap<SyntaxNode, ResolvedPath>,
pub(crate) ufcs_function_calls: FxHashMap<SyntaxNode, hir::Function>,
}
pub(crate) struct ResolvedPath {
pub(crate) resolution: hir::PathResolution,
/// The depth of the ast::Path that was resolved within the pattern.
pub(crate) depth: u32,
}
impl ResolvedRule {
pub(crate) fn new(
rule: parsing::ParsedRule,
scope: &hir::SemanticsScope,
hygiene: &hir::Hygiene,
index: usize,
) -> Result<ResolvedRule, SsrError> {
let resolver =
Resolver { scope, hygiene, placeholders_by_stand_in: rule.placeholders_by_stand_in };
let resolved_template = if let Some(template) = rule.template {
Some(resolver.resolve_pattern_tree(template)?)
} else {
None
};
Ok(ResolvedRule {
pattern: resolver.resolve_pattern_tree(rule.pattern)?,
template: resolved_template,
index,
})
}
pub(crate) fn get_placeholder(&self, token: &SyntaxToken) -> Option<&Placeholder> {
if token.kind() != SyntaxKind::IDENT {
return None;
}
self.pattern.placeholders_by_stand_in.get(token.text())
}
}
struct Resolver<'a, 'db> {
scope: &'a hir::SemanticsScope<'db>,
hygiene: &'a hir::Hygiene,
placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
}
impl Resolver<'_, '_> {
fn resolve_pattern_tree(&self, pattern: SyntaxNode) -> Result<ResolvedPattern, SsrError> {
let mut resolved_paths = FxHashMap::default();
self.resolve(pattern.clone(), 0, &mut resolved_paths)?;
let ufcs_function_calls = resolved_paths
.iter()
.filter_map(|(path_node, resolved)| {
if let Some(grandparent) = path_node.parent().and_then(|parent| parent.parent()) {
if grandparent.kind() == SyntaxKind::CALL_EXPR {
if let hir::PathResolution::AssocItem(hir::AssocItem::Function(function)) =
&resolved.resolution
{
return Some((grandparent, *function));
}
}
}
None
})
.collect();
Ok(ResolvedPattern {
node: pattern,
resolved_paths,
placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
ufcs_function_calls,
})
}
fn resolve(
&self,
node: SyntaxNode,
depth: u32,
resolved_paths: &mut FxHashMap<SyntaxNode, ResolvedPath>,
) -> Result<(), SsrError> {
use ra_syntax::ast::AstNode;
if let Some(path) = ast::Path::cast(node.clone()) {
// Check if this is an appropriate place in the path to resolve. If the path is
// something like `a::B::<i32>::c` then we want to resolve `a::B`. If the path contains
// a placeholder. e.g. `a::$b::c` then we want to resolve `a`.
if !path_contains_type_arguments(path.qualifier())
&& !self.path_contains_placeholder(&path)
{
let resolution = self
.resolve_path(&path)
.ok_or_else(|| error!("Failed to resolve path `{}`", node.text()))?;
resolved_paths.insert(node, ResolvedPath { resolution, depth });
return Ok(());
}
}
for node in node.children() {
self.resolve(node, depth + 1, resolved_paths)?;
}
Ok(())
}
/// Returns whether `path` contains a placeholder, but ignores any placeholders within type
/// arguments.
fn path_contains_placeholder(&self, path: &ast::Path) -> bool {
if let Some(segment) = path.segment() {
if let Some(name_ref) = segment.name_ref() {
if self.placeholders_by_stand_in.contains_key(name_ref.text()) {
return true;
}
}
}
if let Some(qualifier) = path.qualifier() {
return self.path_contains_placeholder(&qualifier);
}
false
}
fn resolve_path(&self, path: &ast::Path) -> Option<hir::PathResolution> {
let hir_path = hir::Path::from_src(path.clone(), self.hygiene)?;
// First try resolving the whole path. This will work for things like
// `std::collections::HashMap`, but will fail for things like
// `std::collections::HashMap::new`.
if let Some(resolution) = self.scope.resolve_hir_path(&hir_path) {
return Some(resolution);
}
// Resolution failed, try resolving the qualifier (e.g. `std::collections::HashMap` and if
// that succeeds, then iterate through the candidates on the resolved type with the provided
// name.
let resolved_qualifier = self.scope.resolve_hir_path_qualifier(&hir_path.qualifier()?)?;
if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier {
adt.ty(self.scope.db).iterate_path_candidates(
self.scope.db,
self.scope.module()?.krate(),
&FxHashSet::default(),
Some(hir_path.segments().last()?.name),
|_ty, assoc_item| Some(hir::PathResolution::AssocItem(assoc_item)),
)
} else {
None
}
}
}
/// Returns whether `path` or any of its qualifiers contains type arguments.
fn path_contains_type_arguments(path: Option<ast::Path>) -> bool {
if let Some(path) = path {
if let Some(segment) = path.segment() {
if segment.type_arg_list().is_some() {
mark::hit!(type_arguments_within_path);
return true;
}
}
return path_contains_type_arguments(path.qualifier());
}
false
}

232
crates/ra_ssr/src/search.rs Normal file
View file

@ -0,0 +1,232 @@
//! Searching for matches.
use crate::{
matching,
resolving::{ResolvedPath, ResolvedPattern, ResolvedRule},
Match, MatchFinder,
};
use ra_db::FileRange;
use ra_ide_db::{
defs::Definition,
search::{Reference, SearchScope},
};
use ra_syntax::{ast, AstNode, SyntaxKind, SyntaxNode};
use test_utils::mark;
/// A cache for the results of find_usages. This is for when we have multiple patterns that have the
/// same path. e.g. if the pattern was `foo::Bar` that can parse as a path, an expression, a type
/// and as a pattern. In each, the usages of `foo::Bar` are the same and we'd like to avoid finding
/// them more than once.
#[derive(Default)]
pub(crate) struct UsageCache {
usages: Vec<(Definition, Vec<Reference>)>,
}
impl<'db> MatchFinder<'db> {
/// Adds all matches for `rule` to `matches_out`. Matches may overlap in ways that make
/// replacement impossible, so further processing is required in order to properly nest matches
/// and remove overlapping matches. This is done in the `nesting` module.
pub(crate) fn find_matches_for_rule(
&self,
rule: &ResolvedRule,
usage_cache: &mut UsageCache,
matches_out: &mut Vec<Match>,
) {
if pick_path_for_usages(&rule.pattern).is_none() {
self.slow_scan(rule, matches_out);
return;
}
self.find_matches_for_pattern_tree(rule, &rule.pattern, usage_cache, matches_out);
}
fn find_matches_for_pattern_tree(
&self,
rule: &ResolvedRule,
pattern: &ResolvedPattern,
usage_cache: &mut UsageCache,
matches_out: &mut Vec<Match>,
) {
if let Some(resolved_path) = pick_path_for_usages(pattern) {
let definition: Definition = resolved_path.resolution.clone().into();
for reference in self.find_usages(usage_cache, definition) {
if let Some(node_to_match) = self.find_node_to_match(resolved_path, reference) {
if !is_search_permitted_ancestors(&node_to_match) {
mark::hit!(use_declaration_with_braces);
continue;
}
if let Ok(m) =
matching::get_match(false, rule, &node_to_match, &None, &self.sema)
{
matches_out.push(m);
}
}
}
}
}
fn find_node_to_match(
&self,
resolved_path: &ResolvedPath,
reference: &Reference,
) -> Option<SyntaxNode> {
let file = self.sema.parse(reference.file_range.file_id);
let depth = resolved_path.depth as usize;
let offset = reference.file_range.range.start();
if let Some(path) =
self.sema.find_node_at_offset_with_descend::<ast::Path>(file.syntax(), offset)
{
self.sema.ancestors_with_macros(path.syntax().clone()).skip(depth).next()
} else if let Some(path) =
self.sema.find_node_at_offset_with_descend::<ast::MethodCallExpr>(file.syntax(), offset)
{
// If the pattern contained a path and we found a reference to that path that wasn't
// itself a path, but was a method call, then we need to adjust how far up to try
// matching by how deep the path was within a CallExpr. The structure would have been
// CallExpr, PathExpr, Path - i.e. a depth offset of 2. We don't need to check if the
// path was part of a CallExpr because if it wasn't then all that will happen is we'll
// fail to match, which is the desired behavior.
const PATH_DEPTH_IN_CALL_EXPR: usize = 2;
if depth < PATH_DEPTH_IN_CALL_EXPR {
return None;
}
self.sema
.ancestors_with_macros(path.syntax().clone())
.skip(depth - PATH_DEPTH_IN_CALL_EXPR)
.next()
} else {
None
}
}
fn find_usages<'a>(
&self,
usage_cache: &'a mut UsageCache,
definition: Definition,
) -> &'a [Reference] {
// Logically if a lookup succeeds we should just return it. Unfortunately returning it would
// extend the lifetime of the borrow, then we wouldn't be able to do the insertion on a
// cache miss. This is a limitation of NLL and is fixed with Polonius. For now we do two
// lookups in the case of a cache hit.
if usage_cache.find(&definition).is_none() {
let usages = definition.find_usages(&self.sema, Some(self.search_scope()));
usage_cache.usages.push((definition, usages));
return &usage_cache.usages.last().unwrap().1;
}
usage_cache.find(&definition).unwrap()
}
/// Returns the scope within which we want to search. We don't want un unrestricted search
/// scope, since we don't want to find references in external dependencies.
fn search_scope(&self) -> SearchScope {
// FIXME: We should ideally have a test that checks that we edit local roots and not library
// roots. This probably would require some changes to fixtures, since currently everything
// seems to get put into a single source root.
use ra_db::SourceDatabaseExt;
use ra_ide_db::symbol_index::SymbolsDatabase;
let mut files = Vec::new();
for &root in self.sema.db.local_roots().iter() {
let sr = self.sema.db.source_root(root);
files.extend(sr.iter());
}
SearchScope::files(&files)
}
fn slow_scan(&self, rule: &ResolvedRule, matches_out: &mut Vec<Match>) {
use ra_db::SourceDatabaseExt;
use ra_ide_db::symbol_index::SymbolsDatabase;
for &root in self.sema.db.local_roots().iter() {
let sr = self.sema.db.source_root(root);
for file_id in sr.iter() {
let file = self.sema.parse(file_id);
let code = file.syntax();
self.slow_scan_node(code, rule, &None, matches_out);
}
}
}
fn slow_scan_node(
&self,
code: &SyntaxNode,
rule: &ResolvedRule,
restrict_range: &Option<FileRange>,
matches_out: &mut Vec<Match>,
) {
if !is_search_permitted(code) {
return;
}
if let Ok(m) = matching::get_match(false, rule, &code, restrict_range, &self.sema) {
matches_out.push(m);
}
// If we've got a macro call, we already tried matching it pre-expansion, which is the only
// way to match the whole macro, now try expanding it and matching the expansion.
if let Some(macro_call) = ast::MacroCall::cast(code.clone()) {
if let Some(expanded) = self.sema.expand(&macro_call) {
if let Some(tt) = macro_call.token_tree() {
// When matching within a macro expansion, we only want to allow matches of
// nodes that originated entirely from within the token tree of the macro call.
// i.e. we don't want to match something that came from the macro itself.
self.slow_scan_node(
&expanded,
rule,
&Some(self.sema.original_range(tt.syntax())),
matches_out,
);
}
}
}
for child in code.children() {
self.slow_scan_node(&child, rule, restrict_range, matches_out);
}
}
}
/// Returns whether we support matching within `node` and all of its ancestors.
fn is_search_permitted_ancestors(node: &SyntaxNode) -> bool {
if let Some(parent) = node.parent() {
if !is_search_permitted_ancestors(&parent) {
return false;
}
}
is_search_permitted(node)
}
/// Returns whether we support matching within this kind of node.
fn is_search_permitted(node: &SyntaxNode) -> bool {
// FIXME: Properly handle use declarations. At the moment, if our search pattern is `foo::bar`
// and the code is `use foo::{baz, bar}`, we'll match `bar`, since it resolves to `foo::bar`.
// However we'll then replace just the part we matched `bar`. We probably need to instead remove
// `bar` and insert a new use declaration.
node.kind() != SyntaxKind::USE_ITEM
}
impl UsageCache {
fn find(&mut self, definition: &Definition) -> Option<&[Reference]> {
// We expect a very small number of cache entries (generally 1), so a linear scan should be
// fast enough and avoids the need to implement Hash for Definition.
for (d, refs) in &self.usages {
if d == definition {
return Some(refs);
}
}
None
}
}
/// Returns a path that's suitable for path resolution. We exclude builtin types, since they aren't
/// something that we can find references to. We then somewhat arbitrarily pick the path that is the
/// longest as this is hopefully more likely to be less common, making it faster to find.
fn pick_path_for_usages(pattern: &ResolvedPattern) -> Option<&ResolvedPath> {
// FIXME: Take the scope of the resolved path into account. e.g. if there are any paths that are
// private to the current module, then we definitely would want to pick them over say a path
// from std. Possibly we should go further than this and intersect the search scopes for all
// resolved paths then search only in that scope.
pattern
.resolved_paths
.iter()
.filter(|(_, p)| {
!matches!(p.resolution, hir::PathResolution::Def(hir::ModuleDef::BuiltinType(_)))
})
.map(|(node, resolved)| (node.text().len(), resolved))
.max_by(|(a, _), (b, _)| a.cmp(b))
.map(|(_, resolved)| resolved)
}

View file

@ -1,5 +1,8 @@
use crate::{MatchFinder, SsrRule};
use ra_db::{FileId, SourceDatabaseExt};
use expect::{expect, Expect};
use ra_db::{salsa::Durability, FileId, FilePosition, SourceDatabaseExt};
use rustc_hash::FxHashSet;
use std::sync::Arc;
use test_utils::mark;
fn parse_error_text(query: &str) -> String {
@ -36,7 +39,7 @@ fn parser_repeated_name() {
fn parser_invalid_pattern() {
assert_eq!(
parse_error_text(" ==>> ()"),
"Parse error: Pattern is not a valid Rust expression, type, item, path or pattern"
"Parse error: Not a valid Rust expression, type, item, path or pattern"
);
}
@ -44,7 +47,7 @@ fn parser_invalid_pattern() {
fn parser_invalid_template() {
assert_eq!(
parse_error_text("() ==>> )"),
"Parse error: Replacement is not a valid Rust expression, type, item, path or pattern"
"Parse error: Not a valid Rust expression, type, item, path or pattern"
);
}
@ -56,39 +59,44 @@ fn parser_undefined_placeholder_in_replacement() {
);
}
fn single_file(code: &str) -> (ra_ide_db::RootDatabase, FileId) {
/// `code` may optionally contain a cursor marker `<|>`. If it doesn't, then the position will be
/// the start of the file.
pub(crate) fn single_file(code: &str) -> (ra_ide_db::RootDatabase, FilePosition) {
use ra_db::fixture::WithFixture;
ra_ide_db::RootDatabase::with_single_file(code)
use ra_ide_db::symbol_index::SymbolsDatabase;
let (mut db, position) = if code.contains(test_utils::CURSOR_MARKER) {
ra_ide_db::RootDatabase::with_position(code)
} else {
let (db, file_id) = ra_ide_db::RootDatabase::with_single_file(code);
(db, FilePosition { file_id, offset: 0.into() })
};
let mut local_roots = FxHashSet::default();
local_roots.insert(ra_db::fixture::WORKSPACE);
db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH);
(db, position)
}
fn assert_ssr_transform(rule: &str, input: &str, result: &str) {
assert_ssr_transforms(&[rule], input, result);
fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) {
assert_ssr_transforms(&[rule], input, expected);
}
fn normalize_code(code: &str) -> String {
let (db, file_id) = single_file(code);
db.file_text(file_id).to_string()
}
fn assert_ssr_transforms(rules: &[&str], input: &str, result: &str) {
let (db, file_id) = single_file(input);
let mut match_finder = MatchFinder::new(&db);
fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) {
let (db, position) = single_file(input);
let mut match_finder = MatchFinder::in_context(&db, position);
for rule in rules {
let rule: SsrRule = rule.parse().unwrap();
match_finder.add_rule(rule);
match_finder.add_rule(rule).unwrap();
}
if let Some(edits) = match_finder.edits_for_file(file_id) {
// Note, db.file_text is not necessarily the same as `input`, since fixture parsing alters
// stuff.
let mut after = db.file_text(file_id).to_string();
edits.apply(&mut after);
// Likewise, we need to make sure that whatever transformations fixture parsing applies,
// also get applied to our expected result.
let result = normalize_code(result);
assert_eq!(after, result);
} else {
let edits = match_finder.edits();
if edits.is_empty() {
panic!("No edits were made");
}
assert_eq!(edits[0].file_id, position.file_id);
// Note, db.file_text is not necessarily the same as `input`, since fixture parsing alters
// stuff.
let mut actual = db.file_text(position.file_id).to_string();
edits[0].edit.apply(&mut actual);
expected.assert_eq(&actual);
}
fn print_match_debug_info(match_finder: &MatchFinder, file_id: FileId, snippet: &str) {
@ -104,39 +112,34 @@ fn print_match_debug_info(match_finder: &MatchFinder, file_id: FileId, snippet:
}
fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
let (db, file_id) = single_file(code);
let mut match_finder = MatchFinder::new(&db);
match_finder.add_search_pattern(pattern.parse().unwrap());
let matched_strings: Vec<String> = match_finder
.find_matches_in_file(file_id)
.flattened()
.matches
.iter()
.map(|m| m.matched_text())
.collect();
let (db, position) = single_file(code);
let mut match_finder = MatchFinder::in_context(&db, position);
match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
let matched_strings: Vec<String> =
match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect();
if matched_strings != expected && !expected.is_empty() {
print_match_debug_info(&match_finder, file_id, &expected[0]);
print_match_debug_info(&match_finder, position.file_id, &expected[0]);
}
assert_eq!(matched_strings, expected);
}
fn assert_no_match(pattern: &str, code: &str) {
let (db, file_id) = single_file(code);
let mut match_finder = MatchFinder::new(&db);
match_finder.add_search_pattern(pattern.parse().unwrap());
let matches = match_finder.find_matches_in_file(file_id).flattened().matches;
let (db, position) = single_file(code);
let mut match_finder = MatchFinder::in_context(&db, position);
match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
let matches = match_finder.matches().flattened().matches;
if !matches.is_empty() {
print_match_debug_info(&match_finder, file_id, &matches[0].matched_text());
print_match_debug_info(&match_finder, position.file_id, &matches[0].matched_text());
panic!("Got {} matches when we expected none: {:#?}", matches.len(), matches);
}
}
fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) {
let (db, file_id) = single_file(code);
let mut match_finder = MatchFinder::new(&db);
match_finder.add_search_pattern(pattern.parse().unwrap());
let (db, position) = single_file(code);
let mut match_finder = MatchFinder::in_context(&db, position);
match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
let mut reasons = Vec::new();
for d in match_finder.debug_where_text_equal(file_id, snippet) {
for d in match_finder.debug_where_text_equal(position.file_id, snippet) {
if let Some(reason) = d.match_failure_reason() {
reasons.push(reason.to_owned());
}
@ -149,7 +152,7 @@ fn ssr_function_to_method() {
assert_ssr_transform(
"my_function($a, $b) ==>> ($a).my_method($b)",
"fn my_function() {} fn main() { loop { my_function( other_func(x, y), z + w) } }",
"fn my_function() {} fn main() { loop { (other_func(x, y)).my_method(z + w) } }",
expect![["fn my_function() {} fn main() { loop { (other_func(x, y)).my_method(z + w) } }"]],
)
}
@ -157,8 +160,19 @@ fn ssr_function_to_method() {
fn ssr_nested_function() {
assert_ssr_transform(
"foo($a, $b, $c) ==>> bar($c, baz($a, $b))",
"fn foo() {} fn main { foo (x + value.method(b), x+y-z, true && false) }",
"fn foo() {} fn main { bar(true && false, baz(x + value.method(b), x+y-z)) }",
r#"
//- /lib.rs crate:foo
fn foo() {}
fn bar() {}
fn baz() {}
fn main { foo (x + value.method(b), x+y-z, true && false) }
"#,
expect![[r#"
fn foo() {}
fn bar() {}
fn baz() {}
fn main { bar(true && false, baz(x + value.method(b), x+y-z)) }
"#]],
)
}
@ -167,7 +181,7 @@ fn ssr_expected_spacing() {
assert_ssr_transform(
"foo($x) + bar() ==>> bar($x)",
"fn foo() {} fn bar() {} fn main() { foo(5) + bar() }",
"fn foo() {} fn bar() {} fn main() { bar(5) }",
expect![["fn foo() {} fn bar() {} fn main() { bar(5) }"]],
);
}
@ -176,7 +190,7 @@ fn ssr_with_extra_space() {
assert_ssr_transform(
"foo($x ) + bar() ==>> bar($x)",
"fn foo() {} fn bar() {} fn main() { foo( 5 ) +bar( ) }",
"fn foo() {} fn bar() {} fn main() { bar(5) }",
expect![["fn foo() {} fn bar() {} fn main() { bar(5) }"]],
);
}
@ -184,8 +198,8 @@ fn ssr_with_extra_space() {
fn ssr_keeps_nested_comment() {
assert_ssr_transform(
"foo($x) ==>> bar($x)",
"fn foo() {} fn main() { foo(other(5 /* using 5 */)) }",
"fn foo() {} fn main() { bar(other(5 /* using 5 */)) }",
"fn foo() {} fn bar() {} fn main() { foo(other(5 /* using 5 */)) }",
expect![["fn foo() {} fn bar() {} fn main() { bar(other(5 /* using 5 */)) }"]],
)
}
@ -193,17 +207,25 @@ fn ssr_keeps_nested_comment() {
fn ssr_keeps_comment() {
assert_ssr_transform(
"foo($x) ==>> bar($x)",
"fn foo() {} fn main() { foo(5 /* using 5 */) }",
"fn foo() {} fn main() { bar(5)/* using 5 */ }",
"fn foo() {} fn bar() {} fn main() { foo(5 /* using 5 */) }",
expect![["fn foo() {} fn bar() {} fn main() { bar(5)/* using 5 */ }"]],
)
}
#[test]
fn ssr_struct_lit() {
assert_ssr_transform(
"foo{a: $a, b: $b} ==>> foo::new($a, $b)",
"fn foo() {} fn main() { foo{b:2, a:1} }",
"fn foo() {} fn main() { foo::new(1, 2) }",
"Foo{a: $a, b: $b} ==>> Foo::new($a, $b)",
r#"
struct Foo() {}
impl Foo { fn new() {} }
fn main() { Foo{b:2, a:1} }
"#,
expect![[r#"
struct Foo() {}
impl Foo { fn new() {} }
fn main() { Foo::new(1, 2) }
"#]],
)
}
@ -315,7 +337,7 @@ fn match_struct_instantiation() {
fn match_path() {
let code = r#"
mod foo {
fn bar() {}
pub fn bar() {}
}
fn f() {foo::bar(42)}"#;
assert_matches("foo::bar", code, &["foo::bar"]);
@ -328,6 +350,60 @@ fn match_pattern() {
assert_matches("Some($a)", "struct Some(); fn f() {if let Some(x) = foo() {}}", &["Some(x)"]);
}
// If our pattern has a full path, e.g. a::b::c() and the code has c(), but c resolves to
// a::b::c, then we should match.
#[test]
fn match_fully_qualified_fn_path() {
let code = r#"
mod a {
pub mod b {
pub fn c(_: i32) {}
}
}
use a::b::c;
fn f1() {
c(42);
}
"#;
assert_matches("a::b::c($a)", code, &["c(42)"]);
}
#[test]
fn match_resolved_type_name() {
let code = r#"
mod m1 {
pub mod m2 {
pub trait Foo<T> {}
}
}
mod m3 {
trait Foo<T> {}
fn f1(f: Option<&dyn Foo<bool>>) {}
}
mod m4 {
use crate::m1::m2::Foo;
fn f1(f: Option<&dyn Foo<i32>>) {}
}
"#;
assert_matches("m1::m2::Foo<$t>", code, &["Foo<i32>"]);
}
#[test]
fn type_arguments_within_path() {
mark::check!(type_arguments_within_path);
let code = r#"
mod foo {
pub struct Bar<T> {t: T}
impl<T> Bar<T> {
pub fn baz() {}
}
}
fn f1() {foo::Bar::<i32>::baz();}
"#;
assert_no_match("foo::Bar::<i64>::baz()", code);
assert_matches("foo::Bar::<i32>::baz()", code, &["foo::Bar::<i32>::baz()"]);
}
#[test]
fn literal_constraint() {
mark::check!(literal_constraint);
@ -416,8 +492,8 @@ fn no_match_split_expression() {
fn replace_function_call() {
assert_ssr_transform(
"foo() ==>> bar()",
"fn foo() {} fn f1() {foo(); foo();}",
"fn foo() {} fn f1() {bar(); bar();}",
"fn foo() {} fn bar() {} fn f1() {foo(); foo();}",
expect![["fn foo() {} fn bar() {} fn f1() {bar(); bar();}"]],
);
}
@ -425,8 +501,8 @@ fn replace_function_call() {
fn replace_function_call_with_placeholders() {
assert_ssr_transform(
"foo($a, $b) ==>> bar($b, $a)",
"fn foo() {} fn f1() {foo(5, 42)}",
"fn foo() {} fn f1() {bar(42, 5)}",
"fn foo() {} fn bar() {} fn f1() {foo(5, 42)}",
expect![["fn foo() {} fn bar() {} fn f1() {bar(42, 5)}"]],
);
}
@ -434,8 +510,109 @@ fn replace_function_call_with_placeholders() {
fn replace_nested_function_calls() {
assert_ssr_transform(
"foo($a) ==>> bar($a)",
"fn foo() {} fn f1() {foo(foo(42))}",
"fn foo() {} fn f1() {bar(bar(42))}",
"fn foo() {} fn bar() {} fn f1() {foo(foo(42))}",
expect![["fn foo() {} fn bar() {} fn f1() {bar(bar(42))}"]],
);
}
#[test]
fn replace_associated_function_call() {
assert_ssr_transform(
"Foo::new() ==>> Bar::new()",
r#"
struct Foo {}
impl Foo { fn new() {} }
struct Bar {}
impl Bar { fn new() {} }
fn f1() {Foo::new();}
"#,
expect![[r#"
struct Foo {}
impl Foo { fn new() {} }
struct Bar {}
impl Bar { fn new() {} }
fn f1() {Bar::new();}
"#]],
);
}
#[test]
fn replace_path_in_different_contexts() {
// Note the <|> inside module a::b which marks the point where the rule is interpreted. We
// replace foo with bar, but both need different path qualifiers in different contexts. In f4,
// foo is unqualified because of a use statement, however the replacement needs to be fully
// qualified.
assert_ssr_transform(
"c::foo() ==>> c::bar()",
r#"
mod a {
pub mod b {<|>
pub mod c {
pub fn foo() {}
pub fn bar() {}
fn f1() { foo() }
}
fn f2() { c::foo() }
}
fn f3() { b::c::foo() }
}
use a::b::c::foo;
fn f4() { foo() }
"#,
expect![[r#"
mod a {
pub mod b {
pub mod c {
pub fn foo() {}
pub fn bar() {}
fn f1() { bar() }
}
fn f2() { c::bar() }
}
fn f3() { b::c::bar() }
}
use a::b::c::foo;
fn f4() { a::b::c::bar() }
"#]],
);
}
#[test]
fn replace_associated_function_with_generics() {
assert_ssr_transform(
"c::Foo::<$a>::new() ==>> d::Bar::<$a>::default()",
r#"
mod c {
pub struct Foo<T> {v: T}
impl<T> Foo<T> { pub fn new() {} }
fn f1() {
Foo::<i32>::new();
}
}
mod d {
pub struct Bar<T> {v: T}
impl<T> Bar<T> { pub fn default() {} }
fn f1() {
super::c::Foo::<i32>::new();
}
}
"#,
expect![[r#"
mod c {
pub struct Foo<T> {v: T}
impl<T> Foo<T> { pub fn new() {} }
fn f1() {
crate::d::Bar::<i32>::default();
}
}
mod d {
pub struct Bar<T> {v: T}
impl<T> Bar<T> { pub fn default() {} }
fn f1() {
Bar::<i32>::default();
}
}
"#]],
);
}
@ -443,17 +620,10 @@ fn replace_nested_function_calls() {
fn replace_type() {
assert_ssr_transform(
"Result<(), $a> ==>> Option<$a>",
"struct Result<T, E> {} fn f1() -> Result<(), Vec<Error>> {foo()}",
"struct Result<T, E> {} fn f1() -> Option<Vec<Error>> {foo()}",
);
}
#[test]
fn replace_struct_init() {
assert_ssr_transform(
"Foo {a: $a, b: $b} ==>> Foo::new($a, $b)",
"struct Foo {} fn f1() {Foo{b: 1, a: 2}}",
"struct Foo {} fn f1() {Foo::new(2, 1)}",
"struct Result<T, E> {} struct Option<T> {} fn f1() -> Result<(), Vec<Error>> {foo()}",
expect![[
"struct Result<T, E> {} struct Option<T> {} fn f1() -> Option<Vec<Error>> {foo()}"
]],
);
}
@ -462,12 +632,12 @@ fn replace_macro_invocations() {
assert_ssr_transform(
"try!($a) ==>> $a?",
"macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(try!(foo()));}",
"macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(foo()?);}",
expect![["macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(foo()?);}"]],
);
assert_ssr_transform(
"foo!($a($b)) ==>> foo($b, $a)",
"macro_rules! foo {() => {}} fn f1() {foo!(abc(def() + 2));}",
"macro_rules! foo {() => {}} fn f1() {foo(def() + 2, abc);}",
expect![["macro_rules! foo {() => {}} fn f1() {foo(def() + 2, abc);}"]],
);
}
@ -476,12 +646,12 @@ fn replace_binary_op() {
assert_ssr_transform(
"$a + $b ==>> $b + $a",
"fn f() {2 * 3 + 4 * 5}",
"fn f() {4 * 5 + 2 * 3}",
expect![["fn f() {4 * 5 + 2 * 3}"]],
);
assert_ssr_transform(
"$a + $b ==>> $b + $a",
"fn f() {1 + 2 + 3 + 4}",
"fn f() {4 + 3 + 2 + 1}",
expect![["fn f() {4 + 3 + 2 + 1}"]],
);
}
@ -494,8 +664,23 @@ fn match_binary_op() {
fn multiple_rules() {
assert_ssr_transforms(
&["$a + 1 ==>> add_one($a)", "$a + $b ==>> add($a, $b)"],
"fn f() -> i32 {3 + 2 + 1}",
"fn f() -> i32 {add_one(add(3, 2))}",
"fn add() {} fn add_one() {} fn f() -> i32 {3 + 2 + 1}",
expect![["fn add() {} fn add_one() {} fn f() -> i32 {add_one(add(3, 2))}"]],
)
}
#[test]
fn multiple_rules_with_nested_matches() {
assert_ssr_transforms(
&["foo1($a) ==>> bar1($a)", "foo2($a) ==>> bar2($a)"],
r#"
fn foo1() {} fn foo2() {} fn bar1() {} fn bar2() {}
fn f() {foo1(foo2(foo1(foo2(foo1(42)))))}
"#,
expect![[r#"
fn foo1() {} fn foo2() {} fn bar1() {} fn bar2() {}
fn f() {bar1(bar2(bar1(bar2(bar1(42)))))}
"#]],
)
}
@ -527,12 +712,37 @@ fn replace_within_macro_expansion() {
macro_rules! macro1 {
($a:expr) => {$a}
}
fn f() {macro1!(5.x().foo().o2())}"#,
r#"
fn bar() {}
fn f() {macro1!(5.x().foo().o2())}
"#,
expect![[r#"
macro_rules! macro1 {
($a:expr) => {$a}
}
fn f() {macro1!(bar(5.x()).o2())}"#,
fn bar() {}
fn f() {macro1!(bar(5.x()).o2())}
"#]],
)
}
#[test]
fn replace_outside_and_within_macro_expansion() {
assert_ssr_transform(
"foo($a) ==>> bar($a)",
r#"
fn foo() {} fn bar() {}
macro_rules! macro1 {
($a:expr) => {$a}
}
fn f() {foo(foo(macro1!(foo(foo(42)))))}
"#,
expect![[r#"
fn foo() {} fn bar() {}
macro_rules! macro1 {
($a:expr) => {$a}
}
fn f() {bar(bar(macro1!(bar(bar(42)))))}
"#]],
)
}
@ -544,12 +754,14 @@ fn preserves_whitespace_within_macro_expansion() {
macro_rules! macro1 {
($a:expr) => {$a}
}
fn f() {macro1!(1 * 2 + 3 + 4}"#,
r#"
fn f() {macro1!(1 * 2 + 3 + 4}
"#,
expect![[r#"
macro_rules! macro1 {
($a:expr) => {$a}
}
fn f() {macro1!(4 - 3 - 1 * 2}"#,
fn f() {macro1!(4 - 3 - 1 * 2}
"#]],
)
}
@ -580,3 +792,96 @@ fn match_failure_reasons() {
r#"Pattern wanted token '42' (INT_NUMBER), but code had token '43' (INT_NUMBER)"#,
);
}
#[test]
fn overlapping_possible_matches() {
// There are three possible matches here, however the middle one, `foo(foo(foo(42)))` shouldn't
// match because it overlaps with the outer match. The inner match is permitted since it's is
// contained entirely within the placeholder of the outer match.
assert_matches(
"foo(foo($a))",
"fn foo() {} fn main() {foo(foo(foo(foo(42))))}",
&["foo(foo(42))", "foo(foo(foo(foo(42))))"],
);
}
#[test]
fn use_declaration_with_braces() {
// It would be OK for a path rule to match and alter a use declaration. We shouldn't mess it up
// though. In particular, we must not change `use foo::{baz, bar}` to `use foo::{baz,
// foo2::bar2}`.
mark::check!(use_declaration_with_braces);
assert_ssr_transform(
"foo::bar ==>> foo2::bar2",
r#"
mod foo { pub fn bar() {} pub fn baz() {} }
mod foo2 { pub fn bar2() {} }
use foo::{baz, bar};
fn main() { bar() }
"#,
expect![["
mod foo { pub fn bar() {} pub fn baz() {} }
mod foo2 { pub fn bar2() {} }
use foo::{baz, bar};
fn main() { foo2::bar2() }
"]],
)
}
#[test]
fn ufcs_matches_method_call() {
let code = r#"
struct Foo {}
impl Foo {
fn new(_: i32) -> Foo { Foo {} }
fn do_stuff(&self, _: i32) {}
}
struct Bar {}
impl Bar {
fn new(_: i32) -> Bar { Bar {} }
fn do_stuff(&self, v: i32) {}
}
fn main() {
let b = Bar {};
let f = Foo {};
b.do_stuff(1);
f.do_stuff(2);
Foo::new(4).do_stuff(3);
// Too many / too few args - should never match
f.do_stuff(2, 10);
f.do_stuff();
}
"#;
assert_matches("Foo::do_stuff($a, $b)", code, &["f.do_stuff(2)", "Foo::new(4).do_stuff(3)"]);
// The arguments needs special handling in the case of a function call matching a method call
// and the first argument is different.
assert_matches("Foo::do_stuff($a, 2)", code, &["f.do_stuff(2)"]);
assert_matches("Foo::do_stuff(Foo::new(4), $b)", code, &["Foo::new(4).do_stuff(3)"]);
assert_ssr_transform(
"Foo::do_stuff(Foo::new($a), $b) ==>> Bar::new($b).do_stuff($a)",
code,
expect![[r#"
struct Foo {}
impl Foo {
fn new(_: i32) -> Foo { Foo {} }
fn do_stuff(&self, _: i32) {}
}
struct Bar {}
impl Bar {
fn new(_: i32) -> Bar { Bar {} }
fn do_stuff(&self, v: i32) {}
}
fn main() {
let b = Bar {};
let f = Foo {};
b.do_stuff(1);
f.do_stuff(2);
Bar::new(3).do_stuff(4);
// Too many / too few args - should never match
f.do_stuff(2, 10);
f.do_stuff();
}
"#]],
);
}

View file

@ -1,27 +1,17 @@
//! Applies structured search replace rules from the command line.
use crate::cli::{load_cargo::load_cargo, Result};
use ra_ide::SourceFileEdit;
use ra_ssr::{MatchFinder, SsrPattern, SsrRule};
pub fn apply_ssr_rules(rules: Vec<SsrRule>) -> Result<()> {
use ra_db::SourceDatabaseExt;
use ra_ide_db::symbol_index::SymbolsDatabase;
let (host, vfs) = load_cargo(&std::env::current_dir()?, true, true)?;
let db = host.raw_database();
let mut match_finder = MatchFinder::new(db);
let mut match_finder = MatchFinder::at_first_file(db)?;
for rule in rules {
match_finder.add_rule(rule);
}
let mut edits = Vec::new();
for &root in db.local_roots().iter() {
let sr = db.source_root(root);
for file_id in sr.iter() {
if let Some(edit) = match_finder.edits_for_file(file_id) {
edits.push(SourceFileEdit { file_id, edit });
}
}
match_finder.add_rule(rule)?;
}
let edits = match_finder.edits();
for edit in edits {
if let Some(path) = vfs.file_path(edit.file_id).as_path() {
let mut contents = db.file_text(edit.file_id).to_string();
@ -38,34 +28,27 @@ pub fn apply_ssr_rules(rules: Vec<SsrRule>) -> Result<()> {
pub fn search_for_patterns(patterns: Vec<SsrPattern>, debug_snippet: Option<String>) -> Result<()> {
use ra_db::SourceDatabaseExt;
use ra_ide_db::symbol_index::SymbolsDatabase;
let (host, vfs) = load_cargo(&std::env::current_dir()?, true, true)?;
let (host, _vfs) = load_cargo(&std::env::current_dir()?, true, true)?;
let db = host.raw_database();
let mut match_finder = MatchFinder::new(db);
let mut match_finder = MatchFinder::at_first_file(db)?;
for pattern in patterns {
match_finder.add_search_pattern(pattern);
match_finder.add_search_pattern(pattern)?;
}
for &root in db.local_roots().iter() {
let sr = db.source_root(root);
for file_id in sr.iter() {
if let Some(debug_snippet) = &debug_snippet {
if let Some(debug_snippet) = &debug_snippet {
for &root in db.local_roots().iter() {
let sr = db.source_root(root);
for file_id in sr.iter() {
for debug_info in match_finder.debug_where_text_equal(file_id, debug_snippet) {
println!("{:#?}", debug_info);
}
} else {
let matches = match_finder.find_matches_in_file(file_id);
if !matches.matches.is_empty() {
let matches = matches.flattened().matches;
if let Some(path) = vfs.file_path(file_id).as_path() {
println!("{} matches in '{}'", matches.len(), path.to_string_lossy());
}
// We could possibly at some point do something more useful than just printing
// the matched text. For now though, that's the easiest thing to do.
for m in matches {
println!("{}", m.matched_text());
}
}
}
}
} else {
for m in match_finder.matches().flattened().matches {
// We could possibly at some point do something more useful than just printing
// the matched text. For now though, that's the easiest thing to do.
println!("{}", m.matched_text());
}
}
Ok(())
}

View file

@ -1026,8 +1026,9 @@ pub(crate) fn handle_ssr(
params: lsp_ext::SsrParams,
) -> Result<lsp_types::WorkspaceEdit> {
let _p = profile("handle_ssr");
let position = from_proto::file_position(&snap, params.position)?;
let source_change =
snap.analysis.structural_search_replace(&params.query, params.parse_only)??;
snap.analysis.structural_search_replace(&params.query, params.parse_only, position)??;
to_proto::workspace_edit(&snap, source_change)
}

View file

@ -216,6 +216,11 @@ impl Request for Ssr {
pub struct SsrParams {
pub query: String,
pub parse_only: bool,
/// File position where SSR was invoked. Paths in `query` will be resolved relative to this
/// position.
#[serde(flatten)]
pub position: lsp_types::TextDocumentPositionParams,
}
pub enum StatusNotification {}

View file

@ -274,6 +274,11 @@ interface SsrParams {
query: string,
/// If true, only check the syntax of the query and don't compute the actual edit.
parseOnly: bool,
/// The current text document. This and `position` will be used to determine in what scope
/// paths in `query` should be resolved.
textDocument: lc.TextDocumentIdentifier;
/// Position where SSR was invoked.
position: lc.Position;
}
```
@ -285,7 +290,7 @@ WorkspaceEdit
### Example
SSR with query `foo($a:expr, $b:expr) ==>> ($a).foo($b)` will transform, eg `foo(y + 5, z)` into `(y + 5).foo(z)`.
SSR with query `foo($a, $b) ==>> ($a).foo($b)` will transform, eg `foo(y + 5, z)` into `(y + 5).foo(z)`.
### Unresolved Question

View file

@ -185,15 +185,21 @@ export function parentModule(ctx: Ctx): Cmd {
export function ssr(ctx: Ctx): Cmd {
return async () => {
const editor = vscode.window.activeTextEditor;
const client = ctx.client;
if (!client) return;
if (!editor || !client) return;
const position = editor.selection.active;
const textDocument = { uri: editor.document.uri.toString() };
const options: vscode.InputBoxOptions = {
value: "() ==>> ()",
prompt: "Enter request, for example 'Foo($a) ==> Foo::new($a)' ",
validateInput: async (x: string) => {
try {
await client.sendRequest(ra.ssr, { query: x, parseOnly: true });
await client.sendRequest(ra.ssr, {
query: x, parseOnly: true, textDocument, position,
});
} catch (e) {
return e.toString();
}
@ -208,7 +214,9 @@ export function ssr(ctx: Ctx): Cmd {
title: "Structured search replace in progress...",
cancellable: false,
}, async (_progress, _token) => {
const edit = await client.sendRequest(ra.ssr, { query: request, parseOnly: false });
const edit = await client.sendRequest(ra.ssr, {
query: request, parseOnly: false, textDocument, position
});
await vscode.workspace.applyEdit(client.protocol2CodeConverter.asWorkspaceEdit(edit));
});

View file

@ -93,6 +93,8 @@ export const inlayHints = new lc.RequestType<InlayHintsParams, InlayHint[], void
export interface SsrParams {
query: string;
parseOnly: boolean;
textDocument: lc.TextDocumentIdentifier;
position: lc.Position;
}
export const ssr = new lc.RequestType<SsrParams, lc.WorkspaceEdit, void>('experimental/ssr');