aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_ssr/src
diff options
context:
space:
mode:
authorDmitry <[email protected]>2020-08-09 14:35:51 +0100
committerDmitry <[email protected]>2020-08-09 14:39:32 +0100
commit8068302fefc75440b823f4bf1731a5f347d7c767 (patch)
tree251b967182e79bc82a58c2fb208c688f6152df1f /crates/ra_ssr/src
parent1a43a0f63e0008787225abb6fb2baef97b6a39e0 (diff)
parent8a57afe5a4bfab40072a83f7dc4ca560bf860919 (diff)
Merge remote-tracking branch 'origin/master'
Diffstat (limited to 'crates/ra_ssr/src')
-rw-r--r--crates/ra_ssr/src/lib.rs6
-rw-r--r--crates/ra_ssr/src/matching.rs12
-rw-r--r--crates/ra_ssr/src/parsing.rs31
-rw-r--r--crates/ra_ssr/src/replacing.rs106
-rw-r--r--crates/ra_ssr/src/resolving.rs35
-rw-r--r--crates/ra_ssr/src/search.rs98
-rw-r--r--crates/ra_ssr/src/tests.rs195
7 files changed, 403 insertions, 80 deletions
diff --git a/crates/ra_ssr/src/lib.rs b/crates/ra_ssr/src/lib.rs
index 7014a6ac6..c780b460a 100644
--- a/crates/ra_ssr/src/lib.rs
+++ b/crates/ra_ssr/src/lib.rs
@@ -52,6 +52,7 @@ pub struct MatchFinder<'db> {
52 sema: Semantics<'db, ra_ide_db::RootDatabase>, 52 sema: Semantics<'db, ra_ide_db::RootDatabase>,
53 rules: Vec<ResolvedRule>, 53 rules: Vec<ResolvedRule>,
54 resolution_scope: resolving::ResolutionScope<'db>, 54 resolution_scope: resolving::ResolutionScope<'db>,
55 restrict_ranges: Vec<FileRange>,
55} 56}
56 57
57impl<'db> MatchFinder<'db> { 58impl<'db> MatchFinder<'db> {
@@ -60,10 +61,12 @@ impl<'db> MatchFinder<'db> {
60 pub fn in_context( 61 pub fn in_context(
61 db: &'db ra_ide_db::RootDatabase, 62 db: &'db ra_ide_db::RootDatabase,
62 lookup_context: FilePosition, 63 lookup_context: FilePosition,
64 mut restrict_ranges: Vec<FileRange>,
63 ) -> MatchFinder<'db> { 65 ) -> MatchFinder<'db> {
66 restrict_ranges.retain(|range| !range.range.is_empty());
64 let sema = Semantics::new(db); 67 let sema = Semantics::new(db);
65 let resolution_scope = resolving::ResolutionScope::new(&sema, lookup_context); 68 let resolution_scope = resolving::ResolutionScope::new(&sema, lookup_context);
66 MatchFinder { sema: Semantics::new(db), rules: Vec::new(), resolution_scope } 69 MatchFinder { sema, rules: Vec::new(), resolution_scope, restrict_ranges }
67 } 70 }
68 71
69 /// Constructs an instance using the start of the first file in `db` as the lookup context. 72 /// Constructs an instance using the start of the first file in `db` as the lookup context.
@@ -79,6 +82,7 @@ impl<'db> MatchFinder<'db> {
79 Ok(MatchFinder::in_context( 82 Ok(MatchFinder::in_context(
80 db, 83 db,
81 FilePosition { file_id: first_file_id, offset: 0.into() }, 84 FilePosition { file_id: first_file_id, offset: 0.into() },
85 vec![],
82 )) 86 ))
83 } else { 87 } else {
84 bail!("No files to search"); 88 bail!("No files to search");
diff --git a/crates/ra_ssr/src/matching.rs b/crates/ra_ssr/src/matching.rs
index 4862622bd..0f72fea69 100644
--- a/crates/ra_ssr/src/matching.rs
+++ b/crates/ra_ssr/src/matching.rs
@@ -209,7 +209,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
209 // Some kinds of nodes have special handling. For everything else, we fall back to default 209 // Some kinds of nodes have special handling. For everything else, we fall back to default
210 // matching. 210 // matching.
211 match code.kind() { 211 match code.kind() {
212 SyntaxKind::RECORD_FIELD_LIST => { 212 SyntaxKind::RECORD_EXPR_FIELD_LIST => {
213 self.attempt_match_record_field_list(phase, pattern, code) 213 self.attempt_match_record_field_list(phase, pattern, code)
214 } 214 }
215 SyntaxKind::TOKEN_TREE => self.attempt_match_token_tree(phase, pattern, code), 215 SyntaxKind::TOKEN_TREE => self.attempt_match_token_tree(phase, pattern, code),
@@ -348,8 +348,8 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
348 // separately via comparing what the path resolves to below. 348 // separately via comparing what the path resolves to below.
349 self.attempt_match_opt( 349 self.attempt_match_opt(
350 phase, 350 phase,
351 pattern_segment.type_arg_list(), 351 pattern_segment.generic_arg_list(),
352 code_segment.type_arg_list(), 352 code_segment.generic_arg_list(),
353 )?; 353 )?;
354 self.attempt_match_opt( 354 self.attempt_match_opt(
355 phase, 355 phase,
@@ -399,7 +399,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
399 // Build a map keyed by field name. 399 // Build a map keyed by field name.
400 let mut fields_by_name = FxHashMap::default(); 400 let mut fields_by_name = FxHashMap::default();
401 for child in code.children() { 401 for child in code.children() {
402 if let Some(record) = ast::RecordField::cast(child.clone()) { 402 if let Some(record) = ast::RecordExprField::cast(child.clone()) {
403 if let Some(name) = record.field_name() { 403 if let Some(name) = record.field_name() {
404 fields_by_name.insert(name.text().clone(), child.clone()); 404 fields_by_name.insert(name.text().clone(), child.clone());
405 } 405 }
@@ -706,8 +706,8 @@ mod tests {
706 let rule: SsrRule = "foo($x) ==>> bar($x)".parse().unwrap(); 706 let rule: SsrRule = "foo($x) ==>> bar($x)".parse().unwrap();
707 let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }"; 707 let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }";
708 708
709 let (db, position) = crate::tests::single_file(input); 709 let (db, position, selections) = crate::tests::single_file(input);
710 let mut match_finder = MatchFinder::in_context(&db, position); 710 let mut match_finder = MatchFinder::in_context(&db, position, selections);
711 match_finder.add_rule(rule).unwrap(); 711 match_finder.add_rule(rule).unwrap();
712 let matches = match_finder.matches(); 712 let matches = match_finder.matches();
713 assert_eq!(matches.matches.len(), 1); 713 assert_eq!(matches.matches.len(), 1);
diff --git a/crates/ra_ssr/src/parsing.rs b/crates/ra_ssr/src/parsing.rs
index 2d6f4e514..f455eb5b7 100644
--- a/crates/ra_ssr/src/parsing.rs
+++ b/crates/ra_ssr/src/parsing.rs
@@ -10,6 +10,7 @@ use crate::{SsrError, SsrPattern, SsrRule};
10use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind, SyntaxNode, T}; 10use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind, SyntaxNode, T};
11use rustc_hash::{FxHashMap, FxHashSet}; 11use rustc_hash::{FxHashMap, FxHashSet};
12use std::str::FromStr; 12use std::str::FromStr;
13use test_utils::mark;
13 14
14#[derive(Debug)] 15#[derive(Debug)]
15pub(crate) struct ParsedRule { 16pub(crate) struct ParsedRule {
@@ -69,11 +70,8 @@ impl ParsedRule {
69 rules: Vec::new(), 70 rules: Vec::new(),
70 }; 71 };
71 builder.try_add(ast::Expr::parse(&raw_pattern), raw_template.map(ast::Expr::parse)); 72 builder.try_add(ast::Expr::parse(&raw_pattern), raw_template.map(ast::Expr::parse));
72 builder.try_add(ast::TypeRef::parse(&raw_pattern), raw_template.map(ast::TypeRef::parse)); 73 builder.try_add(ast::Type::parse(&raw_pattern), raw_template.map(ast::Type::parse));
73 builder.try_add( 74 builder.try_add(ast::Item::parse(&raw_pattern), raw_template.map(ast::Item::parse));
74 ast::ModuleItem::parse(&raw_pattern),
75 raw_template.map(ast::ModuleItem::parse),
76 );
77 builder.try_add(ast::Path::parse(&raw_pattern), raw_template.map(ast::Path::parse)); 75 builder.try_add(ast::Path::parse(&raw_pattern), raw_template.map(ast::Path::parse));
78 builder.try_add(ast::Pat::parse(&raw_pattern), raw_template.map(ast::Pat::parse)); 76 builder.try_add(ast::Pat::parse(&raw_pattern), raw_template.map(ast::Pat::parse));
79 builder.build() 77 builder.build()
@@ -102,14 +100,35 @@ impl RuleBuilder {
102 } 100 }
103 } 101 }
104 102
105 fn build(self) -> Result<Vec<ParsedRule>, SsrError> { 103 fn build(mut self) -> Result<Vec<ParsedRule>, SsrError> {
106 if self.rules.is_empty() { 104 if self.rules.is_empty() {
107 bail!("Not a valid Rust expression, type, item, path or pattern"); 105 bail!("Not a valid Rust expression, type, item, path or pattern");
108 } 106 }
107 // If any rules contain paths, then we reject any rules that don't contain paths. Allowing a
108 // mix leads to strange semantics, since the path-based rules only match things where the
109 // path refers to semantically the same thing, whereas the non-path-based rules could match
110 // anything. Specifically, if we have a rule like `foo ==>> bar` we only want to match the
111 // `foo` that is in the current scope, not any `foo`. However "foo" can be parsed as a
112 // pattern (IDENT_PAT -> NAME -> IDENT). Allowing such a rule through would result in
113 // renaming everything called `foo` to `bar`. It'd also be slow, since without a path, we'd
114 // have to use the slow-scan search mechanism.
115 if self.rules.iter().any(|rule| contains_path(&rule.pattern)) {
116 let old_len = self.rules.len();
117 self.rules.retain(|rule| contains_path(&rule.pattern));
118 if self.rules.len() < old_len {
119 mark::hit!(pattern_is_a_single_segment_path);
120 }
121 }
109 Ok(self.rules) 122 Ok(self.rules)
110 } 123 }
111} 124}
112 125
126/// Returns whether there are any paths in `node`.
127fn contains_path(node: &SyntaxNode) -> bool {
128 node.kind() == SyntaxKind::PATH
129 || node.descendants().any(|node| node.kind() == SyntaxKind::PATH)
130}
131
113impl FromStr for SsrRule { 132impl FromStr for SsrRule {
114 type Err = SsrError; 133 type Err = SsrError;
115 134
diff --git a/crates/ra_ssr/src/replacing.rs b/crates/ra_ssr/src/replacing.rs
index 4b3f5509c..0943244ff 100644
--- a/crates/ra_ssr/src/replacing.rs
+++ b/crates/ra_ssr/src/replacing.rs
@@ -3,8 +3,9 @@
3use crate::matching::Var; 3use crate::matching::Var;
4use crate::{resolving::ResolvedRule, Match, SsrMatches}; 4use crate::{resolving::ResolvedRule, Match, SsrMatches};
5use ra_syntax::ast::{self, AstToken}; 5use ra_syntax::ast::{self, AstToken};
6use ra_syntax::{SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextSize}; 6use ra_syntax::{SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize};
7use ra_text_edit::TextEdit; 7use ra_text_edit::TextEdit;
8use rustc_hash::{FxHashMap, FxHashSet};
8 9
9/// Returns a text edit that will replace each match in `matches` with its corresponding replacement 10/// Returns a text edit that will replace each match in `matches` with its corresponding replacement
10/// template. Placeholders in the template will have been substituted with whatever they matched to 11/// template. Placeholders in the template will have been substituted with whatever they matched to
@@ -38,62 +39,79 @@ struct ReplacementRenderer<'a> {
38 file_src: &'a str, 39 file_src: &'a str,
39 rules: &'a [ResolvedRule], 40 rules: &'a [ResolvedRule],
40 rule: &'a ResolvedRule, 41 rule: &'a ResolvedRule,
42 out: String,
43 // Map from a range within `out` to a token in `template` that represents a placeholder. This is
44 // used to validate that the generated source code doesn't split any placeholder expansions (see
45 // below).
46 placeholder_tokens_by_range: FxHashMap<TextRange, SyntaxToken>,
47 // Which placeholder tokens need to be wrapped in parenthesis in order to ensure that when `out`
48 // is parsed, placeholders don't get split. e.g. if a template of `$a.to_string()` results in `1
49 // + 2.to_string()` then the placeholder value `1 + 2` was split and needs parenthesis.
50 placeholder_tokens_requiring_parenthesis: FxHashSet<SyntaxToken>,
41} 51}
42 52
43fn render_replace(match_info: &Match, file_src: &str, rules: &[ResolvedRule]) -> String { 53fn render_replace(match_info: &Match, file_src: &str, rules: &[ResolvedRule]) -> String {
44 let mut out = String::new();
45 let rule = &rules[match_info.rule_index]; 54 let rule = &rules[match_info.rule_index];
46 let template = rule 55 let template = rule
47 .template 56 .template
48 .as_ref() 57 .as_ref()
49 .expect("You called MatchFinder::edits after calling MatchFinder::add_search_pattern"); 58 .expect("You called MatchFinder::edits after calling MatchFinder::add_search_pattern");
50 let renderer = ReplacementRenderer { match_info, file_src, rules, rule }; 59 let mut renderer = ReplacementRenderer {
51 renderer.render_node(&template.node, &mut out); 60 match_info,
61 file_src,
62 rules,
63 rule,
64 out: String::new(),
65 placeholder_tokens_requiring_parenthesis: FxHashSet::default(),
66 placeholder_tokens_by_range: FxHashMap::default(),
67 };
68 renderer.render_node(&template.node);
69 renderer.maybe_rerender_with_extra_parenthesis(&template.node);
52 for comment in &match_info.ignored_comments { 70 for comment in &match_info.ignored_comments {
53 out.push_str(&comment.syntax().to_string()); 71 renderer.out.push_str(&comment.syntax().to_string());
54 } 72 }
55 out 73 renderer.out
56} 74}
57 75
58impl ReplacementRenderer<'_> { 76impl ReplacementRenderer<'_> {
59 fn render_node_children(&self, node: &SyntaxNode, out: &mut String) { 77 fn render_node_children(&mut self, node: &SyntaxNode) {
60 for node_or_token in node.children_with_tokens() { 78 for node_or_token in node.children_with_tokens() {
61 self.render_node_or_token(&node_or_token, out); 79 self.render_node_or_token(&node_or_token);
62 } 80 }
63 } 81 }
64 82
65 fn render_node_or_token(&self, node_or_token: &SyntaxElement, out: &mut String) { 83 fn render_node_or_token(&mut self, node_or_token: &SyntaxElement) {
66 match node_or_token { 84 match node_or_token {
67 SyntaxElement::Token(token) => { 85 SyntaxElement::Token(token) => {
68 self.render_token(&token, out); 86 self.render_token(&token);
69 } 87 }
70 SyntaxElement::Node(child_node) => { 88 SyntaxElement::Node(child_node) => {
71 self.render_node(&child_node, out); 89 self.render_node(&child_node);
72 } 90 }
73 } 91 }
74 } 92 }
75 93
76 fn render_node(&self, node: &SyntaxNode, out: &mut String) { 94 fn render_node(&mut self, node: &SyntaxNode) {
77 use ra_syntax::ast::AstNode; 95 use ra_syntax::ast::AstNode;
78 if let Some(mod_path) = self.match_info.rendered_template_paths.get(&node) { 96 if let Some(mod_path) = self.match_info.rendered_template_paths.get(&node) {
79 out.push_str(&mod_path.to_string()); 97 self.out.push_str(&mod_path.to_string());
80 // Emit everything except for the segment's name-ref, since we already effectively 98 // Emit everything except for the segment's name-ref, since we already effectively
81 // emitted that as part of `mod_path`. 99 // emitted that as part of `mod_path`.
82 if let Some(path) = ast::Path::cast(node.clone()) { 100 if let Some(path) = ast::Path::cast(node.clone()) {
83 if let Some(segment) = path.segment() { 101 if let Some(segment) = path.segment() {
84 for node_or_token in segment.syntax().children_with_tokens() { 102 for node_or_token in segment.syntax().children_with_tokens() {
85 if node_or_token.kind() != SyntaxKind::NAME_REF { 103 if node_or_token.kind() != SyntaxKind::NAME_REF {
86 self.render_node_or_token(&node_or_token, out); 104 self.render_node_or_token(&node_or_token);
87 } 105 }
88 } 106 }
89 } 107 }
90 } 108 }
91 } else { 109 } else {
92 self.render_node_children(&node, out); 110 self.render_node_children(&node);
93 } 111 }
94 } 112 }
95 113
96 fn render_token(&self, token: &SyntaxToken, out: &mut String) { 114 fn render_token(&mut self, token: &SyntaxToken) {
97 if let Some(placeholder) = self.rule.get_placeholder(&token) { 115 if let Some(placeholder) = self.rule.get_placeholder(&token) {
98 if let Some(placeholder_value) = 116 if let Some(placeholder_value) =
99 self.match_info.placeholder_values.get(&Var(placeholder.ident.to_string())) 117 self.match_info.placeholder_values.get(&Var(placeholder.ident.to_string()))
@@ -107,8 +125,23 @@ impl ReplacementRenderer<'_> {
107 range.start(), 125 range.start(),
108 self.rules, 126 self.rules,
109 ); 127 );
128 let needs_parenthesis =
129 self.placeholder_tokens_requiring_parenthesis.contains(token);
110 edit.apply(&mut matched_text); 130 edit.apply(&mut matched_text);
111 out.push_str(&matched_text); 131 if needs_parenthesis {
132 self.out.push('(');
133 }
134 self.placeholder_tokens_by_range.insert(
135 TextRange::new(
136 TextSize::of(&self.out),
137 TextSize::of(&self.out) + TextSize::of(&matched_text),
138 ),
139 token.clone(),
140 );
141 self.out.push_str(&matched_text);
142 if needs_parenthesis {
143 self.out.push(')');
144 }
112 } else { 145 } else {
113 // We validated that all placeholder references were valid before we 146 // We validated that all placeholder references were valid before we
114 // started, so this shouldn't happen. 147 // started, so this shouldn't happen.
@@ -118,7 +151,44 @@ impl ReplacementRenderer<'_> {
118 ); 151 );
119 } 152 }
120 } else { 153 } else {
121 out.push_str(token.text().as_str()); 154 self.out.push_str(token.text().as_str());
155 }
156 }
157
158 // Checks if the resulting code, when parsed doesn't split any placeholders due to different
159 // order of operations between the search pattern and the replacement template. If any do, then
160 // we rerender the template and wrap the problematic placeholders with parenthesis.
161 fn maybe_rerender_with_extra_parenthesis(&mut self, template: &SyntaxNode) {
162 if let Some(node) = parse_as_kind(&self.out, template.kind()) {
163 self.remove_node_ranges(node);
164 if self.placeholder_tokens_by_range.is_empty() {
165 return;
166 }
167 self.placeholder_tokens_requiring_parenthesis =
168 self.placeholder_tokens_by_range.values().cloned().collect();
169 self.out.clear();
170 self.render_node(template);
171 }
172 }
173
174 fn remove_node_ranges(&mut self, node: SyntaxNode) {
175 self.placeholder_tokens_by_range.remove(&node.text_range());
176 for child in node.children() {
177 self.remove_node_ranges(child);
178 }
179 }
180}
181
182fn parse_as_kind(code: &str, kind: SyntaxKind) -> Option<SyntaxNode> {
183 use ra_syntax::ast::AstNode;
184 if ast::Expr::can_cast(kind) {
185 if let Ok(expr) = ast::Expr::parse(code) {
186 return Some(expr.syntax().clone());
187 }
188 } else if ast::Item::can_cast(kind) {
189 if let Ok(item) = ast::Item::parse(code) {
190 return Some(item.syntax().clone());
122 } 191 }
123 } 192 }
193 None
124} 194}
diff --git a/crates/ra_ssr/src/resolving.rs b/crates/ra_ssr/src/resolving.rs
index 123bd2bb2..df60048eb 100644
--- a/crates/ra_ssr/src/resolving.rs
+++ b/crates/ra_ssr/src/resolving.rs
@@ -11,6 +11,7 @@ use test_utils::mark;
11pub(crate) struct ResolutionScope<'db> { 11pub(crate) struct ResolutionScope<'db> {
12 scope: hir::SemanticsScope<'db>, 12 scope: hir::SemanticsScope<'db>,
13 hygiene: hir::Hygiene, 13 hygiene: hir::Hygiene,
14 node: SyntaxNode,
14} 15}
15 16
16pub(crate) struct ResolvedRule { 17pub(crate) struct ResolvedRule {
@@ -25,6 +26,7 @@ pub(crate) struct ResolvedPattern {
25 // Paths in `node` that we've resolved. 26 // Paths in `node` that we've resolved.
26 pub(crate) resolved_paths: FxHashMap<SyntaxNode, ResolvedPath>, 27 pub(crate) resolved_paths: FxHashMap<SyntaxNode, ResolvedPath>,
27 pub(crate) ufcs_function_calls: FxHashMap<SyntaxNode, hir::Function>, 28 pub(crate) ufcs_function_calls: FxHashMap<SyntaxNode, hir::Function>,
29 pub(crate) contains_self: bool,
28} 30}
29 31
30pub(crate) struct ResolvedPath { 32pub(crate) struct ResolvedPath {
@@ -68,6 +70,7 @@ struct Resolver<'a, 'db> {
68 70
69impl Resolver<'_, '_> { 71impl Resolver<'_, '_> {
70 fn resolve_pattern_tree(&self, pattern: SyntaxNode) -> Result<ResolvedPattern, SsrError> { 72 fn resolve_pattern_tree(&self, pattern: SyntaxNode) -> Result<ResolvedPattern, SsrError> {
73 use ra_syntax::{SyntaxElement, T};
71 let mut resolved_paths = FxHashMap::default(); 74 let mut resolved_paths = FxHashMap::default();
72 self.resolve(pattern.clone(), 0, &mut resolved_paths)?; 75 self.resolve(pattern.clone(), 0, &mut resolved_paths)?;
73 let ufcs_function_calls = resolved_paths 76 let ufcs_function_calls = resolved_paths
@@ -85,11 +88,17 @@ impl Resolver<'_, '_> {
85 None 88 None
86 }) 89 })
87 .collect(); 90 .collect();
91 let contains_self =
92 pattern.descendants_with_tokens().any(|node_or_token| match node_or_token {
93 SyntaxElement::Token(t) => t.kind() == T![self],
94 _ => false,
95 });
88 Ok(ResolvedPattern { 96 Ok(ResolvedPattern {
89 node: pattern, 97 node: pattern,
90 resolved_paths, 98 resolved_paths,
91 placeholders_by_stand_in: self.placeholders_by_stand_in.clone(), 99 placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
92 ufcs_function_calls, 100 ufcs_function_calls,
101 contains_self,
93 }) 102 })
94 } 103 }
95 104
@@ -101,6 +110,10 @@ impl Resolver<'_, '_> {
101 ) -> Result<(), SsrError> { 110 ) -> Result<(), SsrError> {
102 use ra_syntax::ast::AstNode; 111 use ra_syntax::ast::AstNode;
103 if let Some(path) = ast::Path::cast(node.clone()) { 112 if let Some(path) = ast::Path::cast(node.clone()) {
113 if is_self(&path) {
114 // Self cannot be resolved like other paths.
115 return Ok(());
116 }
104 // Check if this is an appropriate place in the path to resolve. If the path is 117 // Check if this is an appropriate place in the path to resolve. If the path is
105 // something like `a::B::<i32>::c` then we want to resolve `a::B`. If the path contains 118 // something like `a::B::<i32>::c` then we want to resolve `a::B`. If the path contains
106 // a placeholder. e.g. `a::$b::c` then we want to resolve `a`. 119 // a placeholder. e.g. `a::$b::c` then we want to resolve `a`.
@@ -141,14 +154,14 @@ impl Resolver<'_, '_> {
141impl<'db> ResolutionScope<'db> { 154impl<'db> ResolutionScope<'db> {
142 pub(crate) fn new( 155 pub(crate) fn new(
143 sema: &hir::Semantics<'db, ra_ide_db::RootDatabase>, 156 sema: &hir::Semantics<'db, ra_ide_db::RootDatabase>,
144 lookup_context: FilePosition, 157 resolve_context: FilePosition,
145 ) -> ResolutionScope<'db> { 158 ) -> ResolutionScope<'db> {
146 use ra_syntax::ast::AstNode; 159 use ra_syntax::ast::AstNode;
147 let file = sema.parse(lookup_context.file_id); 160 let file = sema.parse(resolve_context.file_id);
148 // Find a node at the requested position, falling back to the whole file. 161 // Find a node at the requested position, falling back to the whole file.
149 let node = file 162 let node = file
150 .syntax() 163 .syntax()
151 .token_at_offset(lookup_context.offset) 164 .token_at_offset(resolve_context.offset)
152 .left_biased() 165 .left_biased()
153 .map(|token| token.parent()) 166 .map(|token| token.parent())
154 .unwrap_or_else(|| file.syntax().clone()); 167 .unwrap_or_else(|| file.syntax().clone());
@@ -156,10 +169,16 @@ impl<'db> ResolutionScope<'db> {
156 let scope = sema.scope(&node); 169 let scope = sema.scope(&node);
157 ResolutionScope { 170 ResolutionScope {
158 scope, 171 scope,
159 hygiene: hir::Hygiene::new(sema.db, lookup_context.file_id.into()), 172 hygiene: hir::Hygiene::new(sema.db, resolve_context.file_id.into()),
173 node,
160 } 174 }
161 } 175 }
162 176
177 /// Returns the function in which SSR was invoked, if any.
178 pub(crate) fn current_function(&self) -> Option<SyntaxNode> {
179 self.node.ancestors().find(|node| node.kind() == SyntaxKind::FN).map(|node| node.clone())
180 }
181
163 fn resolve_path(&self, path: &ast::Path) -> Option<hir::PathResolution> { 182 fn resolve_path(&self, path: &ast::Path) -> Option<hir::PathResolution> {
164 let hir_path = hir::Path::from_src(path.clone(), &self.hygiene)?; 183 let hir_path = hir::Path::from_src(path.clone(), &self.hygiene)?;
165 // First try resolving the whole path. This will work for things like 184 // First try resolving the whole path. This will work for things like
@@ -186,6 +205,10 @@ impl<'db> ResolutionScope<'db> {
186 } 205 }
187} 206}
188 207
208fn is_self(path: &ast::Path) -> bool {
209 path.segment().map(|segment| segment.self_token().is_some()).unwrap_or(false)
210}
211
189/// Returns a suitable node for resolving paths in the current scope. If we create a scope based on 212/// Returns a suitable node for resolving paths in the current scope. If we create a scope based on
190/// a statement node, then we can't resolve local variables that were defined in the current scope 213/// a statement node, then we can't resolve local variables that were defined in the current scope
191/// (only in parent scopes). So we find another node, ideally a child of the statement where local 214/// (only in parent scopes). So we find another node, ideally a child of the statement where local
@@ -198,7 +221,7 @@ fn pick_node_for_resolution(node: SyntaxNode) -> SyntaxNode {
198 return n; 221 return n;
199 } 222 }
200 } 223 }
201 SyntaxKind::LET_STMT | SyntaxKind::BIND_PAT => { 224 SyntaxKind::LET_STMT | SyntaxKind::IDENT_PAT => {
202 if let Some(next) = node.next_sibling() { 225 if let Some(next) = node.next_sibling() {
203 return pick_node_for_resolution(next); 226 return pick_node_for_resolution(next);
204 } 227 }
@@ -217,7 +240,7 @@ fn pick_node_for_resolution(node: SyntaxNode) -> SyntaxNode {
217fn path_contains_type_arguments(path: Option<ast::Path>) -> bool { 240fn path_contains_type_arguments(path: Option<ast::Path>) -> bool {
218 if let Some(path) = path { 241 if let Some(path) = path {
219 if let Some(segment) = path.segment() { 242 if let Some(segment) = path.segment() {
220 if segment.type_arg_list().is_some() { 243 if segment.generic_arg_list().is_some() {
221 mark::hit!(type_arguments_within_path); 244 mark::hit!(type_arguments_within_path);
222 return true; 245 return true;
223 } 246 }
diff --git a/crates/ra_ssr/src/search.rs b/crates/ra_ssr/src/search.rs
index bcf0f0468..85ffa2ac2 100644
--- a/crates/ra_ssr/src/search.rs
+++ b/crates/ra_ssr/src/search.rs
@@ -5,12 +5,13 @@ use crate::{
5 resolving::{ResolvedPath, ResolvedPattern, ResolvedRule}, 5 resolving::{ResolvedPath, ResolvedPattern, ResolvedRule},
6 Match, MatchFinder, 6 Match, MatchFinder,
7}; 7};
8use ra_db::FileRange; 8use ra_db::{FileId, FileRange};
9use ra_ide_db::{ 9use ra_ide_db::{
10 defs::Definition, 10 defs::Definition,
11 search::{Reference, SearchScope}, 11 search::{Reference, SearchScope},
12}; 12};
13use ra_syntax::{ast, AstNode, SyntaxKind, SyntaxNode}; 13use ra_syntax::{ast, AstNode, SyntaxKind, SyntaxNode};
14use rustc_hash::FxHashSet;
14use test_utils::mark; 15use test_utils::mark;
15 16
16/// A cache for the results of find_usages. This is for when we have multiple patterns that have the 17/// A cache for the results of find_usages. This is for when we have multiple patterns that have the
@@ -32,6 +33,15 @@ impl<'db> MatchFinder<'db> {
32 usage_cache: &mut UsageCache, 33 usage_cache: &mut UsageCache,
33 matches_out: &mut Vec<Match>, 34 matches_out: &mut Vec<Match>,
34 ) { 35 ) {
36 if rule.pattern.contains_self {
37 // If the pattern contains `self` we restrict the scope of the search to just the
38 // current method. No other method can reference the same `self`. This makes the
39 // behavior of `self` consistent with other variables.
40 if let Some(current_function) = self.resolution_scope.current_function() {
41 self.slow_scan_node(&current_function, rule, &None, matches_out);
42 }
43 return;
44 }
35 if pick_path_for_usages(&rule.pattern).is_none() { 45 if pick_path_for_usages(&rule.pattern).is_none() {
36 self.slow_scan(rule, matches_out); 46 self.slow_scan(rule, matches_out);
37 return; 47 return;
@@ -54,11 +64,7 @@ impl<'db> MatchFinder<'db> {
54 mark::hit!(use_declaration_with_braces); 64 mark::hit!(use_declaration_with_braces);
55 continue; 65 continue;
56 } 66 }
57 if let Ok(m) = 67 self.try_add_match(rule, &node_to_match, &None, matches_out);
58 matching::get_match(false, rule, &node_to_match, &None, &self.sema)
59 {
60 matches_out.push(m);
61 }
62 } 68 }
63 } 69 }
64 } 70 }
@@ -121,25 +127,39 @@ impl<'db> MatchFinder<'db> {
121 // FIXME: We should ideally have a test that checks that we edit local roots and not library 127 // FIXME: We should ideally have a test that checks that we edit local roots and not library
122 // roots. This probably would require some changes to fixtures, since currently everything 128 // roots. This probably would require some changes to fixtures, since currently everything
123 // seems to get put into a single source root. 129 // seems to get put into a single source root.
124 use ra_db::SourceDatabaseExt;
125 use ra_ide_db::symbol_index::SymbolsDatabase;
126 let mut files = Vec::new(); 130 let mut files = Vec::new();
127 for &root in self.sema.db.local_roots().iter() { 131 self.search_files_do(|file_id| {
128 let sr = self.sema.db.source_root(root); 132 files.push(file_id);
129 files.extend(sr.iter()); 133 });
130 }
131 SearchScope::files(&files) 134 SearchScope::files(&files)
132 } 135 }
133 136
134 fn slow_scan(&self, rule: &ResolvedRule, matches_out: &mut Vec<Match>) { 137 fn slow_scan(&self, rule: &ResolvedRule, matches_out: &mut Vec<Match>) {
135 use ra_db::SourceDatabaseExt; 138 self.search_files_do(|file_id| {
136 use ra_ide_db::symbol_index::SymbolsDatabase; 139 let file = self.sema.parse(file_id);
137 for &root in self.sema.db.local_roots().iter() { 140 let code = file.syntax();
138 let sr = self.sema.db.source_root(root); 141 self.slow_scan_node(code, rule, &None, matches_out);
139 for file_id in sr.iter() { 142 })
140 let file = self.sema.parse(file_id); 143 }
141 let code = file.syntax(); 144
142 self.slow_scan_node(code, rule, &None, matches_out); 145 fn search_files_do(&self, mut callback: impl FnMut(FileId)) {
146 if self.restrict_ranges.is_empty() {
147 // Unrestricted search.
148 use ra_db::SourceDatabaseExt;
149 use ra_ide_db::symbol_index::SymbolsDatabase;
150 for &root in self.sema.db.local_roots().iter() {
151 let sr = self.sema.db.source_root(root);
152 for file_id in sr.iter() {
153 callback(file_id);
154 }
155 }
156 } else {
157 // Search is restricted, deduplicate file IDs (generally only one).
158 let mut files = FxHashSet::default();
159 for range in &self.restrict_ranges {
160 if files.insert(range.file_id) {
161 callback(range.file_id);
162 }
143 } 163 }
144 } 164 }
145 } 165 }
@@ -154,9 +174,7 @@ impl<'db> MatchFinder<'db> {
154 if !is_search_permitted(code) { 174 if !is_search_permitted(code) {
155 return; 175 return;
156 } 176 }
157 if let Ok(m) = matching::get_match(false, rule, &code, restrict_range, &self.sema) { 177 self.try_add_match(rule, &code, restrict_range, matches_out);
158 matches_out.push(m);
159 }
160 // If we've got a macro call, we already tried matching it pre-expansion, which is the only 178 // If we've got a macro call, we already tried matching it pre-expansion, which is the only
161 // way to match the whole macro, now try expanding it and matching the expansion. 179 // way to match the whole macro, now try expanding it and matching the expansion.
162 if let Some(macro_call) = ast::MacroCall::cast(code.clone()) { 180 if let Some(macro_call) = ast::MacroCall::cast(code.clone()) {
@@ -178,6 +196,38 @@ impl<'db> MatchFinder<'db> {
178 self.slow_scan_node(&child, rule, restrict_range, matches_out); 196 self.slow_scan_node(&child, rule, restrict_range, matches_out);
179 } 197 }
180 } 198 }
199
200 fn try_add_match(
201 &self,
202 rule: &ResolvedRule,
203 code: &SyntaxNode,
204 restrict_range: &Option<FileRange>,
205 matches_out: &mut Vec<Match>,
206 ) {
207 if !self.within_range_restrictions(code) {
208 mark::hit!(replace_nonpath_within_selection);
209 return;
210 }
211 if let Ok(m) = matching::get_match(false, rule, code, restrict_range, &self.sema) {
212 matches_out.push(m);
213 }
214 }
215
216 /// Returns whether `code` is within one of our range restrictions if we have any. No range
217 /// restrictions is considered unrestricted and always returns true.
218 fn within_range_restrictions(&self, code: &SyntaxNode) -> bool {
219 if self.restrict_ranges.is_empty() {
220 // There is no range restriction.
221 return true;
222 }
223 let node_range = self.sema.original_range(code);
224 for range in &self.restrict_ranges {
225 if range.file_id == node_range.file_id && range.range.contains_range(node_range.range) {
226 return true;
227 }
228 }
229 false
230 }
181} 231}
182 232
183/// Returns whether we support matching within `node` and all of its ancestors. 233/// Returns whether we support matching within `node` and all of its ancestors.
@@ -196,7 +246,7 @@ fn is_search_permitted(node: &SyntaxNode) -> bool {
196 // and the code is `use foo::{baz, bar}`, we'll match `bar`, since it resolves to `foo::bar`. 246 // and the code is `use foo::{baz, bar}`, we'll match `bar`, since it resolves to `foo::bar`.
197 // However we'll then replace just the part we matched `bar`. We probably need to instead remove 247 // However we'll then replace just the part we matched `bar`. We probably need to instead remove
198 // `bar` and insert a new use declaration. 248 // `bar` and insert a new use declaration.
199 node.kind() != SyntaxKind::USE_ITEM 249 node.kind() != SyntaxKind::USE
200} 250}
201 251
202impl UsageCache { 252impl UsageCache {
diff --git a/crates/ra_ssr/src/tests.rs b/crates/ra_ssr/src/tests.rs
index 18ef2506a..d483640df 100644
--- a/crates/ra_ssr/src/tests.rs
+++ b/crates/ra_ssr/src/tests.rs
@@ -1,9 +1,9 @@
1use crate::{MatchFinder, SsrRule}; 1use crate::{MatchFinder, SsrRule};
2use expect::{expect, Expect}; 2use expect::{expect, Expect};
3use ra_db::{salsa::Durability, FileId, FilePosition, SourceDatabaseExt}; 3use ra_db::{salsa::Durability, FileId, FilePosition, FileRange, SourceDatabaseExt};
4use rustc_hash::FxHashSet; 4use rustc_hash::FxHashSet;
5use std::sync::Arc; 5use std::sync::Arc;
6use test_utils::mark; 6use test_utils::{mark, RangeOrOffset};
7 7
8fn parse_error_text(query: &str) -> String { 8fn parse_error_text(query: &str) -> String {
9 format!("{}", query.parse::<SsrRule>().unwrap_err()) 9 format!("{}", query.parse::<SsrRule>().unwrap_err())
@@ -60,20 +60,32 @@ fn parser_undefined_placeholder_in_replacement() {
60} 60}
61 61
62/// `code` may optionally contain a cursor marker `<|>`. If it doesn't, then the position will be 62/// `code` may optionally contain a cursor marker `<|>`. If it doesn't, then the position will be
63/// the start of the file. 63/// the start of the file. If there's a second cursor marker, then we'll return a single range.
64pub(crate) fn single_file(code: &str) -> (ra_ide_db::RootDatabase, FilePosition) { 64pub(crate) fn single_file(code: &str) -> (ra_ide_db::RootDatabase, FilePosition, Vec<FileRange>) {
65 use ra_db::fixture::WithFixture; 65 use ra_db::fixture::WithFixture;
66 use ra_ide_db::symbol_index::SymbolsDatabase; 66 use ra_ide_db::symbol_index::SymbolsDatabase;
67 let (mut db, position) = if code.contains(test_utils::CURSOR_MARKER) { 67 let (mut db, file_id, range_or_offset) = if code.contains(test_utils::CURSOR_MARKER) {
68 ra_ide_db::RootDatabase::with_position(code) 68 ra_ide_db::RootDatabase::with_range_or_offset(code)
69 } else { 69 } else {
70 let (db, file_id) = ra_ide_db::RootDatabase::with_single_file(code); 70 let (db, file_id) = ra_ide_db::RootDatabase::with_single_file(code);
71 (db, FilePosition { file_id, offset: 0.into() }) 71 (db, file_id, RangeOrOffset::Offset(0.into()))
72 }; 72 };
73 let selections;
74 let position;
75 match range_or_offset {
76 RangeOrOffset::Range(range) => {
77 position = FilePosition { file_id, offset: range.start() };
78 selections = vec![FileRange { file_id, range: range }];
79 }
80 RangeOrOffset::Offset(offset) => {
81 position = FilePosition { file_id, offset };
82 selections = vec![];
83 }
84 }
73 let mut local_roots = FxHashSet::default(); 85 let mut local_roots = FxHashSet::default();
74 local_roots.insert(ra_db::fixture::WORKSPACE); 86 local_roots.insert(ra_db::fixture::WORKSPACE);
75 db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); 87 db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH);
76 (db, position) 88 (db, position, selections)
77} 89}
78 90
79fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) { 91fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) {
@@ -81,8 +93,8 @@ fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) {
81} 93}
82 94
83fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) { 95fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) {
84 let (db, position) = single_file(input); 96 let (db, position, selections) = single_file(input);
85 let mut match_finder = MatchFinder::in_context(&db, position); 97 let mut match_finder = MatchFinder::in_context(&db, position, selections);
86 for rule in rules { 98 for rule in rules {
87 let rule: SsrRule = rule.parse().unwrap(); 99 let rule: SsrRule = rule.parse().unwrap();
88 match_finder.add_rule(rule).unwrap(); 100 match_finder.add_rule(rule).unwrap();
@@ -112,8 +124,8 @@ fn print_match_debug_info(match_finder: &MatchFinder, file_id: FileId, snippet:
112} 124}
113 125
114fn assert_matches(pattern: &str, code: &str, expected: &[&str]) { 126fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
115 let (db, position) = single_file(code); 127 let (db, position, selections) = single_file(code);
116 let mut match_finder = MatchFinder::in_context(&db, position); 128 let mut match_finder = MatchFinder::in_context(&db, position, selections);
117 match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap(); 129 match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
118 let matched_strings: Vec<String> = 130 let matched_strings: Vec<String> =
119 match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect(); 131 match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect();
@@ -124,8 +136,8 @@ fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
124} 136}
125 137
126fn assert_no_match(pattern: &str, code: &str) { 138fn assert_no_match(pattern: &str, code: &str) {
127 let (db, position) = single_file(code); 139 let (db, position, selections) = single_file(code);
128 let mut match_finder = MatchFinder::in_context(&db, position); 140 let mut match_finder = MatchFinder::in_context(&db, position, selections);
129 match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap(); 141 match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
130 let matches = match_finder.matches().flattened().matches; 142 let matches = match_finder.matches().flattened().matches;
131 if !matches.is_empty() { 143 if !matches.is_empty() {
@@ -135,8 +147,8 @@ fn assert_no_match(pattern: &str, code: &str) {
135} 147}
136 148
137fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) { 149fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) {
138 let (db, position) = single_file(code); 150 let (db, position, selections) = single_file(code);
139 let mut match_finder = MatchFinder::in_context(&db, position); 151 let mut match_finder = MatchFinder::in_context(&db, position, selections);
140 match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap(); 152 match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
141 let mut reasons = Vec::new(); 153 let mut reasons = Vec::new();
142 for d in match_finder.debug_where_text_equal(position.file_id, snippet) { 154 for d in match_finder.debug_where_text_equal(position.file_id, snippet) {
@@ -490,9 +502,10 @@ fn no_match_split_expression() {
490 502
491#[test] 503#[test]
492fn replace_function_call() { 504fn replace_function_call() {
505 // This test also makes sure that we ignore empty-ranges.
493 assert_ssr_transform( 506 assert_ssr_transform(
494 "foo() ==>> bar()", 507 "foo() ==>> bar()",
495 "fn foo() {} fn bar() {} fn f1() {foo(); foo();}", 508 "fn foo() {<|><|>} fn bar() {} fn f1() {foo(); foo();}",
496 expect![["fn foo() {} fn bar() {} fn f1() {bar(); bar();}"]], 509 expect![["fn foo() {} fn bar() {} fn f1() {bar(); bar();}"]],
497 ); 510 );
498} 511}
@@ -651,7 +664,7 @@ fn replace_binary_op() {
651 assert_ssr_transform( 664 assert_ssr_transform(
652 "$a + $b ==>> $b + $a", 665 "$a + $b ==>> $b + $a",
653 "fn f() {1 + 2 + 3 + 4}", 666 "fn f() {1 + 2 + 3 + 4}",
654 expect![["fn f() {4 + 3 + 2 + 1}"]], 667 expect![[r#"fn f() {4 + (3 + (2 + 1))}"#]],
655 ); 668 );
656} 669}
657 670
@@ -760,12 +773,33 @@ fn preserves_whitespace_within_macro_expansion() {
760 macro_rules! macro1 { 773 macro_rules! macro1 {
761 ($a:expr) => {$a} 774 ($a:expr) => {$a}
762 } 775 }
763 fn f() {macro1!(4 - 3 - 1 * 2} 776 fn f() {macro1!(4 - (3 - 1 * 2)}
764 "#]], 777 "#]],
765 ) 778 )
766} 779}
767 780
768#[test] 781#[test]
782fn add_parenthesis_when_necessary() {
783 assert_ssr_transform(
784 "foo($a) ==>> $a.to_string()",
785 r#"
786 fn foo(_: i32) {}
787 fn bar3(v: i32) {
788 foo(1 + 2);
789 foo(-v);
790 }
791 "#,
792 expect![[r#"
793 fn foo(_: i32) {}
794 fn bar3(v: i32) {
795 (1 + 2).to_string();
796 (-v).to_string();
797 }
798 "#]],
799 )
800}
801
802#[test]
769fn match_failure_reasons() { 803fn match_failure_reasons() {
770 let code = r#" 804 let code = r#"
771 fn bar() {} 805 fn bar() {}
@@ -887,6 +921,45 @@ fn ufcs_matches_method_call() {
887} 921}
888 922
889#[test] 923#[test]
924fn pattern_is_a_single_segment_path() {
925 mark::check!(pattern_is_a_single_segment_path);
926 // The first function should not be altered because the `foo` in scope at the cursor position is
927 // a different `foo`. This case is special because "foo" can be parsed as a pattern (IDENT_PAT ->
928 // NAME -> IDENT), which contains no path. If we're not careful we'll end up matching the `foo`
929 // in `let foo` from the first function. Whether we should match the `let foo` in the second
930 // function is less clear. At the moment, we don't. Doing so sounds like a rename operation,
931 // which isn't really what SSR is for, especially since the replacement `bar` must be able to be
932 // resolved, which means if we rename `foo` we'll get a name collision.
933 assert_ssr_transform(
934 "foo ==>> bar",
935 r#"
936 fn f1() -> i32 {
937 let foo = 1;
938 let bar = 2;
939 foo
940 }
941 fn f1() -> i32 {
942 let foo = 1;
943 let bar = 2;
944 foo<|>
945 }
946 "#,
947 expect![[r#"
948 fn f1() -> i32 {
949 let foo = 1;
950 let bar = 2;
951 foo
952 }
953 fn f1() -> i32 {
954 let foo = 1;
955 let bar = 2;
956 bar
957 }
958 "#]],
959 );
960}
961
962#[test]
890fn replace_local_variable_reference() { 963fn replace_local_variable_reference() {
891 // The pattern references a local variable `foo` in the block containing the cursor. We should 964 // The pattern references a local variable `foo` in the block containing the cursor. We should
892 // only replace references to this variable `foo`, not other variables that just happen to have 965 // only replace references to this variable `foo`, not other variables that just happen to have
@@ -922,3 +995,87 @@ fn replace_local_variable_reference() {
922 "#]], 995 "#]],
923 ) 996 )
924} 997}
998
999#[test]
1000fn replace_path_within_selection() {
1001 assert_ssr_transform(
1002 "foo ==>> bar",
1003 r#"
1004 fn main() {
1005 let foo = 41;
1006 let bar = 42;
1007 do_stuff(foo);
1008 do_stuff(foo);<|>
1009 do_stuff(foo);
1010 do_stuff(foo);<|>
1011 do_stuff(foo);
1012 }"#,
1013 expect![[r#"
1014 fn main() {
1015 let foo = 41;
1016 let bar = 42;
1017 do_stuff(foo);
1018 do_stuff(foo);
1019 do_stuff(bar);
1020 do_stuff(bar);
1021 do_stuff(foo);
1022 }"#]],
1023 );
1024}
1025
1026#[test]
1027fn replace_nonpath_within_selection() {
1028 mark::check!(replace_nonpath_within_selection);
1029 assert_ssr_transform(
1030 "$a + $b ==>> $b * $a",
1031 r#"
1032 fn main() {
1033 let v = 1 + 2;<|>
1034 let v2 = 3 + 3;
1035 let v3 = 4 + 5;<|>
1036 let v4 = 6 + 7;
1037 }"#,
1038 expect![[r#"
1039 fn main() {
1040 let v = 1 + 2;
1041 let v2 = 3 * 3;
1042 let v3 = 5 * 4;
1043 let v4 = 6 + 7;
1044 }"#]],
1045 );
1046}
1047
1048#[test]
1049fn replace_self() {
1050 // `foo(self)` occurs twice in the code, however only the first occurrence is the `self` that's
1051 // in scope where the rule is invoked.
1052 assert_ssr_transform(
1053 "foo(self) ==>> bar(self)",
1054 r#"
1055 struct S1 {}
1056 fn foo(_: &S1) {}
1057 fn bar(_: &S1) {}
1058 impl S1 {
1059 fn f1(&self) {
1060 foo(self)<|>
1061 }
1062 fn f2(&self) {
1063 foo(self)
1064 }
1065 }
1066 "#,
1067 expect![[r#"
1068 struct S1 {}
1069 fn foo(_: &S1) {}
1070 fn bar(_: &S1) {}
1071 impl S1 {
1072 fn f1(&self) {
1073 bar(self)
1074 }
1075 fn f2(&self) {
1076 foo(self)
1077 }
1078 }
1079 "#]],
1080 );
1081}