SSR: Match paths based on what they resolve to

Also render template paths appropriately for their context.
This commit is contained in:
David Lattimore 2020-07-22 16:46:29 +10:00
parent 3975952601
commit 757f755c29
9 changed files with 482 additions and 61 deletions

View file

@ -11,6 +11,16 @@ use ra_ssr::{MatchFinder, SsrError, SsrRule};
// A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement. // A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement.
// Within a macro call, a placeholder will match up until whatever token follows the placeholder. // Within a macro call, a placeholder will match up until whatever token follows the placeholder.
// //
// All paths in both the search pattern and the replacement template must resolve in the context
// in which this command is invoked. Paths in the search pattern will then match the code if they
// resolve to the same item, even if they're written differently. For example if we invoke the
// command in the module `foo` with a pattern of `Bar`, then code in the parent module that refers
// to `foo::Bar` will match.
//
// Paths in the replacement template will be rendered appropriately for the context in which the
// replacement occurs. For example if our replacement template is `foo::Bar` and we match some
// code in the `foo` module, we'll insert just `Bar`.
//
// Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`. // Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`.
// //
// Supported constraints: // Supported constraints:
@ -47,7 +57,7 @@ pub fn parse_search_replace(
) -> Result<Vec<SourceFileEdit>, SsrError> { ) -> Result<Vec<SourceFileEdit>, SsrError> {
let rule: SsrRule = rule.parse()?; let rule: SsrRule = rule.parse()?;
let mut match_finder = MatchFinder::in_context(db, position); let mut match_finder = MatchFinder::in_context(db, position);
match_finder.add_rule(rule); match_finder.add_rule(rule)?;
if parse_only { if parse_only {
return Ok(Vec::new()); return Ok(Vec::new());
} }

View file

@ -7,6 +7,7 @@ mod matching;
mod nester; mod nester;
mod parsing; mod parsing;
mod replacing; mod replacing;
mod resolving;
mod search; mod search;
#[macro_use] #[macro_use]
mod errors; mod errors;
@ -21,6 +22,7 @@ use hir::Semantics;
use ra_db::{FileId, FilePosition, FileRange}; use ra_db::{FileId, FilePosition, FileRange};
use ra_ide_db::source_change::SourceFileEdit; use ra_ide_db::source_change::SourceFileEdit;
use ra_syntax::{ast, AstNode, SyntaxNode, TextRange}; use ra_syntax::{ast, AstNode, SyntaxNode, TextRange};
use resolving::ResolvedRule;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
// A structured search replace rule. Create by calling `parse` on a str. // A structured search replace rule. Create by calling `parse` on a str.
@ -48,7 +50,9 @@ pub struct SsrMatches {
pub struct MatchFinder<'db> { pub struct MatchFinder<'db> {
/// Our source of information about the user's code. /// Our source of information about the user's code.
sema: Semantics<'db, ra_ide_db::RootDatabase>, sema: Semantics<'db, ra_ide_db::RootDatabase>,
rules: Vec<parsing::ParsedRule>, rules: Vec<ResolvedRule>,
scope: hir::SemanticsScope<'db>,
hygiene: hir::Hygiene,
} }
impl<'db> MatchFinder<'db> { impl<'db> MatchFinder<'db> {
@ -56,10 +60,24 @@ impl<'db> MatchFinder<'db> {
/// `lookup_context`. /// `lookup_context`.
pub fn in_context( pub fn in_context(
db: &'db ra_ide_db::RootDatabase, db: &'db ra_ide_db::RootDatabase,
_lookup_context: FilePosition, lookup_context: FilePosition,
) -> MatchFinder<'db> { ) -> MatchFinder<'db> {
// FIXME: Use lookup_context let sema = Semantics::new(db);
MatchFinder { sema: Semantics::new(db), rules: Vec::new() } let file = sema.parse(lookup_context.file_id);
// Find a node at the requested position, falling back to the whole file.
let node = file
.syntax()
.token_at_offset(lookup_context.offset)
.left_biased()
.map(|token| token.parent())
.unwrap_or_else(|| file.syntax().clone());
let scope = sema.scope(&node);
MatchFinder {
sema: Semantics::new(db),
rules: Vec::new(),
scope,
hygiene: hir::Hygiene::new(db, lookup_context.file_id.into()),
}
} }
/// Constructs an instance using the start of the first file in `db` as the lookup context. /// Constructs an instance using the start of the first file in `db` as the lookup context.
@ -84,8 +102,16 @@ impl<'db> MatchFinder<'db> {
/// Adds a rule to be applied. The order in which rules are added matters. Earlier rules take /// Adds a rule to be applied. The order in which rules are added matters. Earlier rules take
/// precedence. If a node is matched by an earlier rule, then later rules won't be permitted to /// precedence. If a node is matched by an earlier rule, then later rules won't be permitted to
/// match to it. /// match to it.
pub fn add_rule(&mut self, rule: SsrRule) { pub fn add_rule(&mut self, rule: SsrRule) -> Result<(), SsrError> {
self.add_parsed_rules(rule.parsed_rules); for parsed_rule in rule.parsed_rules {
self.rules.push(ResolvedRule::new(
parsed_rule,
&self.scope,
&self.hygiene,
self.rules.len(),
)?);
}
Ok(())
} }
/// Finds matches for all added rules and returns edits for all found matches. /// Finds matches for all added rules and returns edits for all found matches.
@ -110,8 +136,16 @@ impl<'db> MatchFinder<'db> {
/// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you /// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you
/// intend to do replacement, use `add_rule` instead. /// intend to do replacement, use `add_rule` instead.
pub fn add_search_pattern(&mut self, pattern: SsrPattern) { pub fn add_search_pattern(&mut self, pattern: SsrPattern) -> Result<(), SsrError> {
self.add_parsed_rules(pattern.parsed_rules); for parsed_rule in pattern.parsed_rules {
self.rules.push(ResolvedRule::new(
parsed_rule,
&self.scope,
&self.hygiene,
self.rules.len(),
)?);
}
Ok(())
} }
/// Returns matches for all added rules. /// Returns matches for all added rules.
@ -149,13 +183,6 @@ impl<'db> MatchFinder<'db> {
res res
} }
fn add_parsed_rules(&mut self, parsed_rules: Vec<parsing::ParsedRule>) {
for mut parsed_rule in parsed_rules {
parsed_rule.index = self.rules.len();
self.rules.push(parsed_rule);
}
}
fn output_debug_for_nodes_at_range( fn output_debug_for_nodes_at_range(
&self, &self,
node: &SyntaxNode, node: &SyntaxNode,
@ -175,7 +202,7 @@ impl<'db> MatchFinder<'db> {
// we get lots of noise. If at some point we add support for restricting rules // we get lots of noise. If at some point we add support for restricting rules
// to a particular kind of thing (e.g. only match type references), then we can // to a particular kind of thing (e.g. only match type references), then we can
// relax this. // relax this.
if rule.pattern.kind() != node.kind() { if rule.pattern.node.kind() != node.kind() {
continue; continue;
} }
out.push(MatchDebugInfo { out.push(MatchDebugInfo {
@ -185,7 +212,7 @@ impl<'db> MatchFinder<'db> {
"Match failed, but no reason was given".to_owned() "Match failed, but no reason was given".to_owned()
}), }),
}), }),
pattern: rule.pattern.clone(), pattern: rule.pattern.node.clone(),
node: node.clone(), node: node.clone(),
}); });
} }

View file

@ -2,7 +2,8 @@
//! process of matching, placeholder values are recorded. //! process of matching, placeholder values are recorded.
use crate::{ use crate::{
parsing::{Constraint, NodeKind, ParsedRule, Placeholder}, parsing::{Constraint, NodeKind, Placeholder},
resolving::{ResolvedPattern, ResolvedRule},
SsrMatches, SsrMatches,
}; };
use hir::Semantics; use hir::Semantics;
@ -51,6 +52,8 @@ pub struct Match {
pub(crate) rule_index: usize, pub(crate) rule_index: usize,
/// The depth of matched_node. /// The depth of matched_node.
pub(crate) depth: usize, pub(crate) depth: usize,
// Each path in the template rendered for the module in which the match was found.
pub(crate) rendered_template_paths: FxHashMap<SyntaxNode, hir::ModPath>,
} }
/// Represents a `$var` in an SSR query. /// Represents a `$var` in an SSR query.
@ -86,7 +89,7 @@ pub(crate) struct MatchFailed {
/// parent module, we don't populate nested matches. /// parent module, we don't populate nested matches.
pub(crate) fn get_match( pub(crate) fn get_match(
debug_active: bool, debug_active: bool,
rule: &ParsedRule, rule: &ResolvedRule,
code: &SyntaxNode, code: &SyntaxNode,
restrict_range: &Option<FileRange>, restrict_range: &Option<FileRange>,
sema: &Semantics<ra_ide_db::RootDatabase>, sema: &Semantics<ra_ide_db::RootDatabase>,
@ -102,7 +105,7 @@ struct Matcher<'db, 'sema> {
/// If any placeholders come from anywhere outside of this range, then the match will be /// If any placeholders come from anywhere outside of this range, then the match will be
/// rejected. /// rejected.
restrict_range: Option<FileRange>, restrict_range: Option<FileRange>,
rule: &'sema ParsedRule, rule: &'sema ResolvedRule,
} }
/// Which phase of matching we're currently performing. We do two phases because most attempted /// Which phase of matching we're currently performing. We do two phases because most attempted
@ -117,14 +120,14 @@ enum Phase<'a> {
impl<'db, 'sema> Matcher<'db, 'sema> { impl<'db, 'sema> Matcher<'db, 'sema> {
fn try_match( fn try_match(
rule: &ParsedRule, rule: &ResolvedRule,
code: &SyntaxNode, code: &SyntaxNode,
restrict_range: &Option<FileRange>, restrict_range: &Option<FileRange>,
sema: &'sema Semantics<'db, ra_ide_db::RootDatabase>, sema: &'sema Semantics<'db, ra_ide_db::RootDatabase>,
) -> Result<Match, MatchFailed> { ) -> Result<Match, MatchFailed> {
let match_state = Matcher { sema, restrict_range: restrict_range.clone(), rule }; let match_state = Matcher { sema, restrict_range: restrict_range.clone(), rule };
// First pass at matching, where we check that node types and idents match. // First pass at matching, where we check that node types and idents match.
match_state.attempt_match_node(&mut Phase::First, &rule.pattern, code)?; match_state.attempt_match_node(&mut Phase::First, &rule.pattern.node, code)?;
match_state.validate_range(&sema.original_range(code))?; match_state.validate_range(&sema.original_range(code))?;
let mut the_match = Match { let mut the_match = Match {
range: sema.original_range(code), range: sema.original_range(code),
@ -133,11 +136,19 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
ignored_comments: Vec::new(), ignored_comments: Vec::new(),
rule_index: rule.index, rule_index: rule.index,
depth: 0, depth: 0,
rendered_template_paths: FxHashMap::default(),
}; };
// Second matching pass, where we record placeholder matches, ignored comments and maybe do // Second matching pass, where we record placeholder matches, ignored comments and maybe do
// any other more expensive checks that we didn't want to do on the first pass. // any other more expensive checks that we didn't want to do on the first pass.
match_state.attempt_match_node(&mut Phase::Second(&mut the_match), &rule.pattern, code)?; match_state.attempt_match_node(
&mut Phase::Second(&mut the_match),
&rule.pattern.node,
code,
)?;
the_match.depth = sema.ancestors_with_macros(the_match.matched_node.clone()).count(); the_match.depth = sema.ancestors_with_macros(the_match.matched_node.clone()).count();
if let Some(template) = &rule.template {
the_match.render_template_paths(template, sema)?;
}
Ok(the_match) Ok(the_match)
} }
@ -195,6 +206,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
self.attempt_match_record_field_list(phase, pattern, code) self.attempt_match_record_field_list(phase, pattern, code)
} }
SyntaxKind::TOKEN_TREE => self.attempt_match_token_tree(phase, pattern, code), SyntaxKind::TOKEN_TREE => self.attempt_match_token_tree(phase, pattern, code),
SyntaxKind::PATH => self.attempt_match_path(phase, pattern, code),
_ => self.attempt_match_node_children(phase, pattern, code), _ => self.attempt_match_node_children(phase, pattern, code),
} }
} }
@ -311,6 +323,64 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
Ok(()) Ok(())
} }
/// Paths are matched based on whether they refer to the same thing, even if they're written
/// differently.
fn attempt_match_path(
&self,
phase: &mut Phase,
pattern: &SyntaxNode,
code: &SyntaxNode,
) -> Result<(), MatchFailed> {
if let Some(pattern_resolved) = self.rule.pattern.resolved_paths.get(pattern) {
let pattern_path = ast::Path::cast(pattern.clone()).unwrap();
let code_path = ast::Path::cast(code.clone()).unwrap();
if let (Some(pattern_segment), Some(code_segment)) =
(pattern_path.segment(), code_path.segment())
{
// Match everything within the segment except for the name-ref, which is handled
// separately via comparing what the path resolves to below.
self.attempt_match_opt(
phase,
pattern_segment.type_arg_list(),
code_segment.type_arg_list(),
)?;
self.attempt_match_opt(
phase,
pattern_segment.param_list(),
code_segment.param_list(),
)?;
}
if matches!(phase, Phase::Second(_)) {
let resolution = self
.sema
.resolve_path(&code_path)
.ok_or_else(|| match_error!("Failed to resolve path `{}`", code.text()))?;
if pattern_resolved.resolution != resolution {
fail_match!("Pattern had path `{}` code had `{}`", pattern.text(), code.text());
}
}
} else {
return self.attempt_match_node_children(phase, pattern, code);
}
Ok(())
}
fn attempt_match_opt<T: AstNode>(
&self,
phase: &mut Phase,
pattern: Option<T>,
code: Option<T>,
) -> Result<(), MatchFailed> {
match (pattern, code) {
(Some(p), Some(c)) => self.attempt_match_node(phase, &p.syntax(), &c.syntax()),
(None, None) => Ok(()),
(Some(p), None) => fail_match!("Pattern `{}` had nothing to match", p.syntax().text()),
(None, Some(c)) => {
fail_match!("Nothing in pattern to match code `{}`", c.syntax().text())
}
}
}
/// We want to allow the records to match in any order, so we have special matching logic for /// We want to allow the records to match in any order, so we have special matching logic for
/// them. /// them.
fn attempt_match_record_field_list( fn attempt_match_record_field_list(
@ -449,6 +519,28 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
} }
} }
impl Match {
fn render_template_paths(
&mut self,
template: &ResolvedPattern,
sema: &Semantics<ra_ide_db::RootDatabase>,
) -> Result<(), MatchFailed> {
let module = sema
.scope(&self.matched_node)
.module()
.ok_or_else(|| match_error!("Matched node isn't in a module"))?;
for (path, resolved_path) in &template.resolved_paths {
if let hir::PathResolution::Def(module_def) = resolved_path.resolution {
let mod_path = module.find_use_path(sema.db, module_def).ok_or_else(|| {
match_error!("Failed to render template path `{}` at match location")
})?;
self.rendered_template_paths.insert(path.clone(), mod_path);
}
}
Ok(())
}
}
impl Phase<'_> { impl Phase<'_> {
fn next_non_trivial(&mut self, code_it: &mut SyntaxElementChildren) -> Option<SyntaxElement> { fn next_non_trivial(&mut self, code_it: &mut SyntaxElementChildren) -> Option<SyntaxElement> {
loop { loop {
@ -578,7 +670,7 @@ mod tests {
let (db, position) = crate::tests::single_file(input); let (db, position) = crate::tests::single_file(input);
let mut match_finder = MatchFinder::in_context(&db, position); let mut match_finder = MatchFinder::in_context(&db, position);
match_finder.add_rule(rule); match_finder.add_rule(rule).unwrap();
let matches = match_finder.matches(); let matches = match_finder.matches();
assert_eq!(matches.matches.len(), 1); assert_eq!(matches.matches.len(), 1);
assert_eq!(matches.matches[0].matched_node.text(), "foo(1+2)"); assert_eq!(matches.matches[0].matched_node.text(), "foo(1+2)");

View file

@ -7,7 +7,7 @@
use crate::errors::bail; use crate::errors::bail;
use crate::{SsrError, SsrPattern, SsrRule}; use crate::{SsrError, SsrPattern, SsrRule};
use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken, T}; use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind, SyntaxNode, T};
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use std::str::FromStr; use std::str::FromStr;
@ -16,7 +16,6 @@ pub(crate) struct ParsedRule {
pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>, pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>,
pub(crate) pattern: SyntaxNode, pub(crate) pattern: SyntaxNode,
pub(crate) template: Option<SyntaxNode>, pub(crate) template: Option<SyntaxNode>,
pub(crate) index: usize,
} }
#[derive(Debug)] #[derive(Debug)]
@ -93,16 +92,11 @@ impl RuleBuilder {
placeholders_by_stand_in: self.placeholders_by_stand_in.clone(), placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
pattern: pattern.syntax().clone(), pattern: pattern.syntax().clone(),
template: Some(template.syntax().clone()), template: Some(template.syntax().clone()),
// For now we give the rule an index of 0. It's given a proper index when the rule
// is added to the SsrMatcher. Using an Option<usize>, instead would be slightly
// more correct, but we delete this field from ParsedRule in a subsequent commit.
index: 0,
}), }),
(Ok(pattern), None) => self.rules.push(ParsedRule { (Ok(pattern), None) => self.rules.push(ParsedRule {
placeholders_by_stand_in: self.placeholders_by_stand_in.clone(), placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
pattern: pattern.syntax().clone(), pattern: pattern.syntax().clone(),
template: None, template: None,
index: 0,
}), }),
_ => {} _ => {}
} }
@ -171,15 +165,6 @@ impl RawPattern {
} }
} }
impl ParsedRule {
pub(crate) fn get_placeholder(&self, token: &SyntaxToken) -> Option<&Placeholder> {
if token.kind() != SyntaxKind::IDENT {
return None;
}
self.placeholders_by_stand_in.get(token.text())
}
}
impl FromStr for SsrPattern { impl FromStr for SsrPattern {
type Err = SsrError; type Err = SsrError;

View file

@ -1,9 +1,9 @@
//! Code for applying replacement templates for matches that have previously been found. //! Code for applying replacement templates for matches that have previously been found.
use crate::matching::Var; use crate::matching::Var;
use crate::{parsing::ParsedRule, Match, SsrMatches}; use crate::{resolving::ResolvedRule, Match, SsrMatches};
use ra_syntax::ast::AstToken; use ra_syntax::ast::{self, AstToken};
use ra_syntax::{SyntaxElement, SyntaxNode, SyntaxToken, TextSize}; use ra_syntax::{SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextSize};
use ra_text_edit::TextEdit; use ra_text_edit::TextEdit;
/// Returns a text edit that will replace each match in `matches` with its corresponding replacement /// Returns a text edit that will replace each match in `matches` with its corresponding replacement
@ -12,7 +12,7 @@ use ra_text_edit::TextEdit;
pub(crate) fn matches_to_edit( pub(crate) fn matches_to_edit(
matches: &SsrMatches, matches: &SsrMatches,
file_src: &str, file_src: &str,
rules: &[ParsedRule], rules: &[ResolvedRule],
) -> TextEdit { ) -> TextEdit {
matches_to_edit_at_offset(matches, file_src, 0.into(), rules) matches_to_edit_at_offset(matches, file_src, 0.into(), rules)
} }
@ -21,7 +21,7 @@ fn matches_to_edit_at_offset(
matches: &SsrMatches, matches: &SsrMatches,
file_src: &str, file_src: &str,
relative_start: TextSize, relative_start: TextSize,
rules: &[ParsedRule], rules: &[ResolvedRule],
) -> TextEdit { ) -> TextEdit {
let mut edit_builder = ra_text_edit::TextEditBuilder::default(); let mut edit_builder = ra_text_edit::TextEditBuilder::default();
for m in &matches.matches { for m in &matches.matches {
@ -36,11 +36,11 @@ fn matches_to_edit_at_offset(
struct ReplacementRenderer<'a> { struct ReplacementRenderer<'a> {
match_info: &'a Match, match_info: &'a Match,
file_src: &'a str, file_src: &'a str,
rules: &'a [ParsedRule], rules: &'a [ResolvedRule],
rule: &'a ParsedRule, rule: &'a ResolvedRule,
} }
fn render_replace(match_info: &Match, file_src: &str, rules: &[ParsedRule]) -> String { fn render_replace(match_info: &Match, file_src: &str, rules: &[ResolvedRule]) -> String {
let mut out = String::new(); let mut out = String::new();
let rule = &rules[match_info.rule_index]; let rule = &rules[match_info.rule_index];
let template = rule let template = rule
@ -48,7 +48,7 @@ fn render_replace(match_info: &Match, file_src: &str, rules: &[ParsedRule]) -> S
.as_ref() .as_ref()
.expect("You called MatchFinder::edits after calling MatchFinder::add_search_pattern"); .expect("You called MatchFinder::edits after calling MatchFinder::add_search_pattern");
let renderer = ReplacementRenderer { match_info, file_src, rules, rule }; let renderer = ReplacementRenderer { match_info, file_src, rules, rule };
renderer.render_node_children(&template, &mut out); renderer.render_node(&template.node, &mut out);
for comment in &match_info.ignored_comments { for comment in &match_info.ignored_comments {
out.push_str(&comment.syntax().to_string()); out.push_str(&comment.syntax().to_string());
} }
@ -68,11 +68,31 @@ impl ReplacementRenderer<'_> {
self.render_token(&token, out); self.render_token(&token, out);
} }
SyntaxElement::Node(child_node) => { SyntaxElement::Node(child_node) => {
self.render_node_children(&child_node, out); self.render_node(&child_node, out);
} }
} }
} }
fn render_node(&self, node: &SyntaxNode, out: &mut String) {
use ra_syntax::ast::AstNode;
if let Some(mod_path) = self.match_info.rendered_template_paths.get(&node) {
out.push_str(&mod_path.to_string());
// Emit everything except for the segment's name-ref, since we already effectively
// emitted that as part of `mod_path`.
if let Some(path) = ast::Path::cast(node.clone()) {
if let Some(segment) = path.segment() {
for node_or_token in segment.syntax().children_with_tokens() {
if node_or_token.kind() != SyntaxKind::NAME_REF {
self.render_node_or_token(&node_or_token, out);
}
}
}
}
} else {
self.render_node_children(&node, out);
}
}
fn render_token(&self, token: &SyntaxToken, out: &mut String) { fn render_token(&self, token: &SyntaxToken, out: &mut String) {
if let Some(placeholder) = self.rule.get_placeholder(&token) { if let Some(placeholder) = self.rule.get_placeholder(&token) {
if let Some(placeholder_value) = if let Some(placeholder_value) =

View file

@ -0,0 +1,153 @@
//! This module is responsible for resolving paths within rules.
use crate::errors::error;
use crate::{parsing, SsrError};
use parsing::Placeholder;
use ra_syntax::{ast, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken};
use rustc_hash::{FxHashMap, FxHashSet};
use test_utils::mark;
pub(crate) struct ResolvedRule {
pub(crate) pattern: ResolvedPattern,
pub(crate) template: Option<ResolvedPattern>,
pub(crate) index: usize,
}
pub(crate) struct ResolvedPattern {
pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
pub(crate) node: SyntaxNode,
// Paths in `node` that we've resolved.
pub(crate) resolved_paths: FxHashMap<SyntaxNode, ResolvedPath>,
}
pub(crate) struct ResolvedPath {
pub(crate) resolution: hir::PathResolution,
}
impl ResolvedRule {
pub(crate) fn new(
rule: parsing::ParsedRule,
scope: &hir::SemanticsScope,
hygiene: &hir::Hygiene,
index: usize,
) -> Result<ResolvedRule, SsrError> {
let resolver =
Resolver { scope, hygiene, placeholders_by_stand_in: rule.placeholders_by_stand_in };
let resolved_template = if let Some(template) = rule.template {
Some(resolver.resolve_pattern_tree(template)?)
} else {
None
};
Ok(ResolvedRule {
pattern: resolver.resolve_pattern_tree(rule.pattern)?,
template: resolved_template,
index,
})
}
pub(crate) fn get_placeholder(&self, token: &SyntaxToken) -> Option<&Placeholder> {
if token.kind() != SyntaxKind::IDENT {
return None;
}
self.pattern.placeholders_by_stand_in.get(token.text())
}
}
struct Resolver<'a, 'db> {
scope: &'a hir::SemanticsScope<'db>,
hygiene: &'a hir::Hygiene,
placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
}
impl Resolver<'_, '_> {
fn resolve_pattern_tree(&self, pattern: SyntaxNode) -> Result<ResolvedPattern, SsrError> {
let mut resolved_paths = FxHashMap::default();
self.resolve(pattern.clone(), &mut resolved_paths)?;
Ok(ResolvedPattern {
node: pattern,
resolved_paths,
placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
})
}
fn resolve(
&self,
node: SyntaxNode,
resolved_paths: &mut FxHashMap<SyntaxNode, ResolvedPath>,
) -> Result<(), SsrError> {
use ra_syntax::ast::AstNode;
if let Some(path) = ast::Path::cast(node.clone()) {
// Check if this is an appropriate place in the path to resolve. If the path is
// something like `a::B::<i32>::c` then we want to resolve `a::B`. If the path contains
// a placeholder. e.g. `a::$b::c` then we want to resolve `a`.
if !path_contains_type_arguments(path.qualifier())
&& !self.path_contains_placeholder(&path)
{
let resolution = self
.resolve_path(&path)
.ok_or_else(|| error!("Failed to resolve path `{}`", node.text()))?;
resolved_paths.insert(node, ResolvedPath { resolution });
return Ok(());
}
}
for node in node.children() {
self.resolve(node, resolved_paths)?;
}
Ok(())
}
/// Returns whether `path` contains a placeholder, but ignores any placeholders within type
/// arguments.
fn path_contains_placeholder(&self, path: &ast::Path) -> bool {
if let Some(segment) = path.segment() {
if let Some(name_ref) = segment.name_ref() {
if self.placeholders_by_stand_in.contains_key(name_ref.text()) {
return true;
}
}
}
if let Some(qualifier) = path.qualifier() {
return self.path_contains_placeholder(&qualifier);
}
false
}
fn resolve_path(&self, path: &ast::Path) -> Option<hir::PathResolution> {
let hir_path = hir::Path::from_src(path.clone(), self.hygiene)?;
// First try resolving the whole path. This will work for things like
// `std::collections::HashMap`, but will fail for things like
// `std::collections::HashMap::new`.
if let Some(resolution) = self.scope.resolve_hir_path(&hir_path) {
return Some(resolution);
}
// Resolution failed, try resolving the qualifier (e.g. `std::collections::HashMap` and if
// that succeeds, then iterate through the candidates on the resolved type with the provided
// name.
let resolved_qualifier = self.scope.resolve_hir_path_qualifier(&hir_path.qualifier()?)?;
if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier {
adt.ty(self.scope.db).iterate_path_candidates(
self.scope.db,
self.scope.module()?.krate(),
&FxHashSet::default(),
Some(hir_path.segments().last()?.name),
|_ty, assoc_item| Some(hir::PathResolution::AssocItem(assoc_item)),
)
} else {
None
}
}
}
/// Returns whether `path` or any of its qualifiers contains type arguments.
fn path_contains_type_arguments(path: Option<ast::Path>) -> bool {
if let Some(path) = path {
if let Some(segment) = path.segment() {
if segment.type_arg_list().is_some() {
mark::hit!(type_arguments_within_path);
return true;
}
}
return path_contains_type_arguments(path.qualifier());
}
false
}

View file

@ -1,6 +1,6 @@
//! Searching for matches. //! Searching for matches.
use crate::{matching, parsing::ParsedRule, Match, MatchFinder}; use crate::{matching, resolving::ResolvedRule, Match, MatchFinder};
use ra_db::FileRange; use ra_db::FileRange;
use ra_syntax::{ast, AstNode, SyntaxNode}; use ra_syntax::{ast, AstNode, SyntaxNode};
@ -8,13 +8,13 @@ impl<'db> MatchFinder<'db> {
/// Adds all matches for `rule` to `matches_out`. Matches may overlap in ways that make /// Adds all matches for `rule` to `matches_out`. Matches may overlap in ways that make
/// replacement impossible, so further processing is required in order to properly nest matches /// replacement impossible, so further processing is required in order to properly nest matches
/// and remove overlapping matches. This is done in the `nesting` module. /// and remove overlapping matches. This is done in the `nesting` module.
pub(crate) fn find_matches_for_rule(&self, rule: &ParsedRule, matches_out: &mut Vec<Match>) { pub(crate) fn find_matches_for_rule(&self, rule: &ResolvedRule, matches_out: &mut Vec<Match>) {
// FIXME: Use resolved paths in the pattern to find places to search instead of always // FIXME: Use resolved paths in the pattern to find places to search instead of always
// scanning every node. // scanning every node.
self.slow_scan(rule, matches_out); self.slow_scan(rule, matches_out);
} }
fn slow_scan(&self, rule: &ParsedRule, matches_out: &mut Vec<Match>) { fn slow_scan(&self, rule: &ResolvedRule, matches_out: &mut Vec<Match>) {
use ra_db::SourceDatabaseExt; use ra_db::SourceDatabaseExt;
use ra_ide_db::symbol_index::SymbolsDatabase; use ra_ide_db::symbol_index::SymbolsDatabase;
for &root in self.sema.db.local_roots().iter() { for &root in self.sema.db.local_roots().iter() {
@ -30,7 +30,7 @@ impl<'db> MatchFinder<'db> {
fn slow_scan_node( fn slow_scan_node(
&self, &self,
code: &SyntaxNode, code: &SyntaxNode,
rule: &ParsedRule, rule: &ResolvedRule,
restrict_range: &Option<FileRange>, restrict_range: &Option<FileRange>,
matches_out: &mut Vec<Match>, matches_out: &mut Vec<Match>,
) { ) {

View file

@ -85,7 +85,7 @@ fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) {
let mut match_finder = MatchFinder::in_context(&db, position); let mut match_finder = MatchFinder::in_context(&db, position);
for rule in rules { for rule in rules {
let rule: SsrRule = rule.parse().unwrap(); let rule: SsrRule = rule.parse().unwrap();
match_finder.add_rule(rule); match_finder.add_rule(rule).unwrap();
} }
let edits = match_finder.edits(); let edits = match_finder.edits();
if edits.is_empty() { if edits.is_empty() {
@ -114,7 +114,7 @@ fn print_match_debug_info(match_finder: &MatchFinder, file_id: FileId, snippet:
fn assert_matches(pattern: &str, code: &str, expected: &[&str]) { fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
let (db, position) = single_file(code); let (db, position) = single_file(code);
let mut match_finder = MatchFinder::in_context(&db, position); let mut match_finder = MatchFinder::in_context(&db, position);
match_finder.add_search_pattern(pattern.parse().unwrap()); match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
let matched_strings: Vec<String> = let matched_strings: Vec<String> =
match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect(); match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect();
if matched_strings != expected && !expected.is_empty() { if matched_strings != expected && !expected.is_empty() {
@ -126,7 +126,7 @@ fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
fn assert_no_match(pattern: &str, code: &str) { fn assert_no_match(pattern: &str, code: &str) {
let (db, position) = single_file(code); let (db, position) = single_file(code);
let mut match_finder = MatchFinder::in_context(&db, position); let mut match_finder = MatchFinder::in_context(&db, position);
match_finder.add_search_pattern(pattern.parse().unwrap()); match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
let matches = match_finder.matches().flattened().matches; let matches = match_finder.matches().flattened().matches;
if !matches.is_empty() { if !matches.is_empty() {
print_match_debug_info(&match_finder, position.file_id, &matches[0].matched_text()); print_match_debug_info(&match_finder, position.file_id, &matches[0].matched_text());
@ -137,7 +137,7 @@ fn assert_no_match(pattern: &str, code: &str) {
fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) { fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) {
let (db, position) = single_file(code); let (db, position) = single_file(code);
let mut match_finder = MatchFinder::in_context(&db, position); let mut match_finder = MatchFinder::in_context(&db, position);
match_finder.add_search_pattern(pattern.parse().unwrap()); match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
let mut reasons = Vec::new(); let mut reasons = Vec::new();
for d in match_finder.debug_where_text_equal(position.file_id, snippet) { for d in match_finder.debug_where_text_equal(position.file_id, snippet) {
if let Some(reason) = d.match_failure_reason() { if let Some(reason) = d.match_failure_reason() {
@ -350,6 +350,60 @@ fn match_pattern() {
assert_matches("Some($a)", "struct Some(); fn f() {if let Some(x) = foo() {}}", &["Some(x)"]); assert_matches("Some($a)", "struct Some(); fn f() {if let Some(x) = foo() {}}", &["Some(x)"]);
} }
// If our pattern has a full path, e.g. a::b::c() and the code has c(), but c resolves to
// a::b::c, then we should match.
#[test]
fn match_fully_qualified_fn_path() {
let code = r#"
mod a {
pub mod b {
pub fn c(_: i32) {}
}
}
use a::b::c;
fn f1() {
c(42);
}
"#;
assert_matches("a::b::c($a)", code, &["c(42)"]);
}
#[test]
fn match_resolved_type_name() {
let code = r#"
mod m1 {
pub mod m2 {
pub trait Foo<T> {}
}
}
mod m3 {
trait Foo<T> {}
fn f1(f: Option<&dyn Foo<bool>>) {}
}
mod m4 {
use crate::m1::m2::Foo;
fn f1(f: Option<&dyn Foo<i32>>) {}
}
"#;
assert_matches("m1::m2::Foo<$t>", code, &["Foo<i32>"]);
}
#[test]
fn type_arguments_within_path() {
mark::check!(type_arguments_within_path);
let code = r#"
mod foo {
pub struct Bar<T> {t: T}
impl<T> Bar<T> {
pub fn baz() {}
}
}
fn f1() {foo::Bar::<i32>::baz();}
"#;
assert_no_match("foo::Bar::<i64>::baz()", code);
assert_matches("foo::Bar::<i32>::baz()", code, &["foo::Bar::<i32>::baz()"]);
}
#[test] #[test]
fn literal_constraint() { fn literal_constraint() {
mark::check!(literal_constraint); mark::check!(literal_constraint);
@ -482,6 +536,86 @@ fn replace_associated_function_call() {
); );
} }
#[test]
fn replace_path_in_different_contexts() {
// Note the <|> inside module a::b which marks the point where the rule is interpreted. We
// replace foo with bar, but both need different path qualifiers in different contexts. In f4,
// foo is unqualified because of a use statement, however the replacement needs to be fully
// qualified.
assert_ssr_transform(
"c::foo() ==>> c::bar()",
r#"
mod a {
pub mod b {<|>
pub mod c {
pub fn foo() {}
pub fn bar() {}
fn f1() { foo() }
}
fn f2() { c::foo() }
}
fn f3() { b::c::foo() }
}
use a::b::c::foo;
fn f4() { foo() }
"#,
expect![[r#"
mod a {
pub mod b {
pub mod c {
pub fn foo() {}
pub fn bar() {}
fn f1() { bar() }
}
fn f2() { c::bar() }
}
fn f3() { b::c::bar() }
}
use a::b::c::foo;
fn f4() { a::b::c::bar() }
"#]],
);
}
#[test]
fn replace_associated_function_with_generics() {
assert_ssr_transform(
"c::Foo::<$a>::new() ==>> d::Bar::<$a>::default()",
r#"
mod c {
pub struct Foo<T> {v: T}
impl<T> Foo<T> { pub fn new() {} }
fn f1() {
Foo::<i32>::new();
}
}
mod d {
pub struct Bar<T> {v: T}
impl<T> Bar<T> { pub fn default() {} }
fn f1() {
super::c::Foo::<i32>::new();
}
}
"#,
expect![[r#"
mod c {
pub struct Foo<T> {v: T}
impl<T> Foo<T> { pub fn new() {} }
fn f1() {
crate::d::Bar::<i32>::default();
}
}
mod d {
pub struct Bar<T> {v: T}
impl<T> Bar<T> { pub fn default() {} }
fn f1() {
Bar::<i32>::default();
}
}
"#]],
);
}
#[test] #[test]
fn replace_type() { fn replace_type() {
assert_ssr_transform( assert_ssr_transform(

View file

@ -9,7 +9,7 @@ pub fn apply_ssr_rules(rules: Vec<SsrRule>) -> Result<()> {
let db = host.raw_database(); let db = host.raw_database();
let mut match_finder = MatchFinder::at_first_file(db)?; let mut match_finder = MatchFinder::at_first_file(db)?;
for rule in rules { for rule in rules {
match_finder.add_rule(rule); match_finder.add_rule(rule)?;
} }
let edits = match_finder.edits(); let edits = match_finder.edits();
for edit in edits { for edit in edits {
@ -32,7 +32,7 @@ pub fn search_for_patterns(patterns: Vec<SsrPattern>, debug_snippet: Option<Stri
let db = host.raw_database(); let db = host.raw_database();
let mut match_finder = MatchFinder::at_first_file(db)?; let mut match_finder = MatchFinder::at_first_file(db)?;
for pattern in patterns { for pattern in patterns {
match_finder.add_search_pattern(pattern); match_finder.add_search_pattern(pattern)?;
} }
if let Some(debug_snippet) = &debug_snippet { if let Some(debug_snippet) = &debug_snippet {
for &root in db.local_roots().iter() { for &root in db.local_roots().iter() {