diff options
-rw-r--r-- | crates/ra_ide/src/ssr.rs | 12 | ||||
-rw-r--r-- | crates/ra_ssr/src/lib.rs | 61 | ||||
-rw-r--r-- | crates/ra_ssr/src/matching.rs | 106 | ||||
-rw-r--r-- | crates/ra_ssr/src/parsing.rs | 17 | ||||
-rw-r--r-- | crates/ra_ssr/src/replacing.rs | 40 | ||||
-rw-r--r-- | crates/ra_ssr/src/resolving.rs | 153 | ||||
-rw-r--r-- | crates/ra_ssr/src/search.rs | 8 | ||||
-rw-r--r-- | crates/ra_ssr/src/tests.rs | 142 | ||||
-rw-r--r-- | crates/rust-analyzer/src/cli/ssr.rs | 4 |
9 files changed, 482 insertions, 61 deletions
diff --git a/crates/ra_ide/src/ssr.rs b/crates/ra_ide/src/ssr.rs index 3e2705d62..2f40bac08 100644 --- a/crates/ra_ide/src/ssr.rs +++ b/crates/ra_ide/src/ssr.rs | |||
@@ -11,6 +11,16 @@ use ra_ssr::{MatchFinder, SsrError, SsrRule}; | |||
11 | // A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement. | 11 | // A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement. |
12 | // Within a macro call, a placeholder will match up until whatever token follows the placeholder. | 12 | // Within a macro call, a placeholder will match up until whatever token follows the placeholder. |
13 | // | 13 | // |
14 | // All paths in both the search pattern and the replacement template must resolve in the context | ||
15 | // in which this command is invoked. Paths in the search pattern will then match the code if they | ||
16 | // resolve to the same item, even if they're written differently. For example if we invoke the | ||
17 | // command in the module `foo` with a pattern of `Bar`, then code in the parent module that refers | ||
18 | // to `foo::Bar` will match. | ||
19 | // | ||
20 | // Paths in the replacement template will be rendered appropriately for the context in which the | ||
21 | // replacement occurs. For example if our replacement template is `foo::Bar` and we match some | ||
22 | // code in the `foo` module, we'll insert just `Bar`. | ||
23 | // | ||
14 | // Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`. | 24 | // Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`. |
15 | // | 25 | // |
16 | // Supported constraints: | 26 | // Supported constraints: |
@@ -47,7 +57,7 @@ pub fn parse_search_replace( | |||
47 | ) -> Result<Vec<SourceFileEdit>, SsrError> { | 57 | ) -> Result<Vec<SourceFileEdit>, SsrError> { |
48 | let rule: SsrRule = rule.parse()?; | 58 | let rule: SsrRule = rule.parse()?; |
49 | let mut match_finder = MatchFinder::in_context(db, position); | 59 | let mut match_finder = MatchFinder::in_context(db, position); |
50 | match_finder.add_rule(rule); | 60 | match_finder.add_rule(rule)?; |
51 | if parse_only { | 61 | if parse_only { |
52 | return Ok(Vec::new()); | 62 | return Ok(Vec::new()); |
53 | } | 63 | } |
diff --git a/crates/ra_ssr/src/lib.rs b/crates/ra_ssr/src/lib.rs index a0a5c9762..286619f59 100644 --- a/crates/ra_ssr/src/lib.rs +++ b/crates/ra_ssr/src/lib.rs | |||
@@ -7,6 +7,7 @@ mod matching; | |||
7 | mod nester; | 7 | mod nester; |
8 | mod parsing; | 8 | mod parsing; |
9 | mod replacing; | 9 | mod replacing; |
10 | mod resolving; | ||
10 | mod search; | 11 | mod search; |
11 | #[macro_use] | 12 | #[macro_use] |
12 | mod errors; | 13 | mod errors; |
@@ -21,6 +22,7 @@ use hir::Semantics; | |||
21 | use ra_db::{FileId, FilePosition, FileRange}; | 22 | use ra_db::{FileId, FilePosition, FileRange}; |
22 | use ra_ide_db::source_change::SourceFileEdit; | 23 | use ra_ide_db::source_change::SourceFileEdit; |
23 | use ra_syntax::{ast, AstNode, SyntaxNode, TextRange}; | 24 | use ra_syntax::{ast, AstNode, SyntaxNode, TextRange}; |
25 | use resolving::ResolvedRule; | ||
24 | use rustc_hash::FxHashMap; | 26 | use rustc_hash::FxHashMap; |
25 | 27 | ||
26 | // A structured search replace rule. Create by calling `parse` on a str. | 28 | // A structured search replace rule. Create by calling `parse` on a str. |
@@ -48,7 +50,9 @@ pub struct SsrMatches { | |||
48 | pub struct MatchFinder<'db> { | 50 | pub struct MatchFinder<'db> { |
49 | /// Our source of information about the user's code. | 51 | /// Our source of information about the user's code. |
50 | sema: Semantics<'db, ra_ide_db::RootDatabase>, | 52 | sema: Semantics<'db, ra_ide_db::RootDatabase>, |
51 | rules: Vec<parsing::ParsedRule>, | 53 | rules: Vec<ResolvedRule>, |
54 | scope: hir::SemanticsScope<'db>, | ||
55 | hygiene: hir::Hygiene, | ||
52 | } | 56 | } |
53 | 57 | ||
54 | impl<'db> MatchFinder<'db> { | 58 | impl<'db> MatchFinder<'db> { |
@@ -56,10 +60,24 @@ impl<'db> MatchFinder<'db> { | |||
56 | /// `lookup_context`. | 60 | /// `lookup_context`. |
57 | pub fn in_context( | 61 | pub fn in_context( |
58 | db: &'db ra_ide_db::RootDatabase, | 62 | db: &'db ra_ide_db::RootDatabase, |
59 | _lookup_context: FilePosition, | 63 | lookup_context: FilePosition, |
60 | ) -> MatchFinder<'db> { | 64 | ) -> MatchFinder<'db> { |
61 | // FIXME: Use lookup_context | 65 | let sema = Semantics::new(db); |
62 | MatchFinder { sema: Semantics::new(db), rules: Vec::new() } | 66 | let file = sema.parse(lookup_context.file_id); |
67 | // Find a node at the requested position, falling back to the whole file. | ||
68 | let node = file | ||
69 | .syntax() | ||
70 | .token_at_offset(lookup_context.offset) | ||
71 | .left_biased() | ||
72 | .map(|token| token.parent()) | ||
73 | .unwrap_or_else(|| file.syntax().clone()); | ||
74 | let scope = sema.scope(&node); | ||
75 | MatchFinder { | ||
76 | sema: Semantics::new(db), | ||
77 | rules: Vec::new(), | ||
78 | scope, | ||
79 | hygiene: hir::Hygiene::new(db, lookup_context.file_id.into()), | ||
80 | } | ||
63 | } | 81 | } |
64 | 82 | ||
65 | /// Constructs an instance using the start of the first file in `db` as the lookup context. | 83 | /// Constructs an instance using the start of the first file in `db` as the lookup context. |
@@ -84,8 +102,16 @@ impl<'db> MatchFinder<'db> { | |||
84 | /// Adds a rule to be applied. The order in which rules are added matters. Earlier rules take | 102 | /// Adds a rule to be applied. The order in which rules are added matters. Earlier rules take |
85 | /// precedence. If a node is matched by an earlier rule, then later rules won't be permitted to | 103 | /// precedence. If a node is matched by an earlier rule, then later rules won't be permitted to |
86 | /// match to it. | 104 | /// match to it. |
87 | pub fn add_rule(&mut self, rule: SsrRule) { | 105 | pub fn add_rule(&mut self, rule: SsrRule) -> Result<(), SsrError> { |
88 | self.add_parsed_rules(rule.parsed_rules); | 106 | for parsed_rule in rule.parsed_rules { |
107 | self.rules.push(ResolvedRule::new( | ||
108 | parsed_rule, | ||
109 | &self.scope, | ||
110 | &self.hygiene, | ||
111 | self.rules.len(), | ||
112 | )?); | ||
113 | } | ||
114 | Ok(()) | ||
89 | } | 115 | } |
90 | 116 | ||
91 | /// Finds matches for all added rules and returns edits for all found matches. | 117 | /// Finds matches for all added rules and returns edits for all found matches. |
@@ -110,8 +136,16 @@ impl<'db> MatchFinder<'db> { | |||
110 | 136 | ||
111 | /// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you | 137 | /// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you |
112 | /// intend to do replacement, use `add_rule` instead. | 138 | /// intend to do replacement, use `add_rule` instead. |
113 | pub fn add_search_pattern(&mut self, pattern: SsrPattern) { | 139 | pub fn add_search_pattern(&mut self, pattern: SsrPattern) -> Result<(), SsrError> { |
114 | self.add_parsed_rules(pattern.parsed_rules); | 140 | for parsed_rule in pattern.parsed_rules { |
141 | self.rules.push(ResolvedRule::new( | ||
142 | parsed_rule, | ||
143 | &self.scope, | ||
144 | &self.hygiene, | ||
145 | self.rules.len(), | ||
146 | )?); | ||
147 | } | ||
148 | Ok(()) | ||
115 | } | 149 | } |
116 | 150 | ||
117 | /// Returns matches for all added rules. | 151 | /// Returns matches for all added rules. |
@@ -149,13 +183,6 @@ impl<'db> MatchFinder<'db> { | |||
149 | res | 183 | res |
150 | } | 184 | } |
151 | 185 | ||
152 | fn add_parsed_rules(&mut self, parsed_rules: Vec<parsing::ParsedRule>) { | ||
153 | for mut parsed_rule in parsed_rules { | ||
154 | parsed_rule.index = self.rules.len(); | ||
155 | self.rules.push(parsed_rule); | ||
156 | } | ||
157 | } | ||
158 | |||
159 | fn output_debug_for_nodes_at_range( | 186 | fn output_debug_for_nodes_at_range( |
160 | &self, | 187 | &self, |
161 | node: &SyntaxNode, | 188 | node: &SyntaxNode, |
@@ -175,7 +202,7 @@ impl<'db> MatchFinder<'db> { | |||
175 | // we get lots of noise. If at some point we add support for restricting rules | 202 | // we get lots of noise. If at some point we add support for restricting rules |
176 | // to a particular kind of thing (e.g. only match type references), then we can | 203 | // to a particular kind of thing (e.g. only match type references), then we can |
177 | // relax this. | 204 | // relax this. |
178 | if rule.pattern.kind() != node.kind() { | 205 | if rule.pattern.node.kind() != node.kind() { |
179 | continue; | 206 | continue; |
180 | } | 207 | } |
181 | out.push(MatchDebugInfo { | 208 | out.push(MatchDebugInfo { |
@@ -185,7 +212,7 @@ impl<'db> MatchFinder<'db> { | |||
185 | "Match failed, but no reason was given".to_owned() | 212 | "Match failed, but no reason was given".to_owned() |
186 | }), | 213 | }), |
187 | }), | 214 | }), |
188 | pattern: rule.pattern.clone(), | 215 | pattern: rule.pattern.node.clone(), |
189 | node: node.clone(), | 216 | node: node.clone(), |
190 | }); | 217 | }); |
191 | } | 218 | } |
diff --git a/crates/ra_ssr/src/matching.rs b/crates/ra_ssr/src/matching.rs index a43d57c34..f3cc60c29 100644 --- a/crates/ra_ssr/src/matching.rs +++ b/crates/ra_ssr/src/matching.rs | |||
@@ -2,7 +2,8 @@ | |||
2 | //! process of matching, placeholder values are recorded. | 2 | //! process of matching, placeholder values are recorded. |
3 | 3 | ||
4 | use crate::{ | 4 | use crate::{ |
5 | parsing::{Constraint, NodeKind, ParsedRule, Placeholder}, | 5 | parsing::{Constraint, NodeKind, Placeholder}, |
6 | resolving::{ResolvedPattern, ResolvedRule}, | ||
6 | SsrMatches, | 7 | SsrMatches, |
7 | }; | 8 | }; |
8 | use hir::Semantics; | 9 | use hir::Semantics; |
@@ -51,6 +52,8 @@ pub struct Match { | |||
51 | pub(crate) rule_index: usize, | 52 | pub(crate) rule_index: usize, |
52 | /// The depth of matched_node. | 53 | /// The depth of matched_node. |
53 | pub(crate) depth: usize, | 54 | pub(crate) depth: usize, |
55 | // Each path in the template rendered for the module in which the match was found. | ||
56 | pub(crate) rendered_template_paths: FxHashMap<SyntaxNode, hir::ModPath>, | ||
54 | } | 57 | } |
55 | 58 | ||
56 | /// Represents a `$var` in an SSR query. | 59 | /// Represents a `$var` in an SSR query. |
@@ -86,7 +89,7 @@ pub(crate) struct MatchFailed { | |||
86 | /// parent module, we don't populate nested matches. | 89 | /// parent module, we don't populate nested matches. |
87 | pub(crate) fn get_match( | 90 | pub(crate) fn get_match( |
88 | debug_active: bool, | 91 | debug_active: bool, |
89 | rule: &ParsedRule, | 92 | rule: &ResolvedRule, |
90 | code: &SyntaxNode, | 93 | code: &SyntaxNode, |
91 | restrict_range: &Option<FileRange>, | 94 | restrict_range: &Option<FileRange>, |
92 | sema: &Semantics<ra_ide_db::RootDatabase>, | 95 | sema: &Semantics<ra_ide_db::RootDatabase>, |
@@ -102,7 +105,7 @@ struct Matcher<'db, 'sema> { | |||
102 | /// If any placeholders come from anywhere outside of this range, then the match will be | 105 | /// If any placeholders come from anywhere outside of this range, then the match will be |
103 | /// rejected. | 106 | /// rejected. |
104 | restrict_range: Option<FileRange>, | 107 | restrict_range: Option<FileRange>, |
105 | rule: &'sema ParsedRule, | 108 | rule: &'sema ResolvedRule, |
106 | } | 109 | } |
107 | 110 | ||
108 | /// Which phase of matching we're currently performing. We do two phases because most attempted | 111 | /// Which phase of matching we're currently performing. We do two phases because most attempted |
@@ -117,14 +120,14 @@ enum Phase<'a> { | |||
117 | 120 | ||
118 | impl<'db, 'sema> Matcher<'db, 'sema> { | 121 | impl<'db, 'sema> Matcher<'db, 'sema> { |
119 | fn try_match( | 122 | fn try_match( |
120 | rule: &ParsedRule, | 123 | rule: &ResolvedRule, |
121 | code: &SyntaxNode, | 124 | code: &SyntaxNode, |
122 | restrict_range: &Option<FileRange>, | 125 | restrict_range: &Option<FileRange>, |
123 | sema: &'sema Semantics<'db, ra_ide_db::RootDatabase>, | 126 | sema: &'sema Semantics<'db, ra_ide_db::RootDatabase>, |
124 | ) -> Result<Match, MatchFailed> { | 127 | ) -> Result<Match, MatchFailed> { |
125 | let match_state = Matcher { sema, restrict_range: restrict_range.clone(), rule }; | 128 | let match_state = Matcher { sema, restrict_range: restrict_range.clone(), rule }; |
126 | // First pass at matching, where we check that node types and idents match. | 129 | // First pass at matching, where we check that node types and idents match. |
127 | match_state.attempt_match_node(&mut Phase::First, &rule.pattern, code)?; | 130 | match_state.attempt_match_node(&mut Phase::First, &rule.pattern.node, code)?; |
128 | match_state.validate_range(&sema.original_range(code))?; | 131 | match_state.validate_range(&sema.original_range(code))?; |
129 | let mut the_match = Match { | 132 | let mut the_match = Match { |
130 | range: sema.original_range(code), | 133 | range: sema.original_range(code), |
@@ -133,11 +136,19 @@ impl<'db, 'sema> Matcher<'db, 'sema> { | |||
133 | ignored_comments: Vec::new(), | 136 | ignored_comments: Vec::new(), |
134 | rule_index: rule.index, | 137 | rule_index: rule.index, |
135 | depth: 0, | 138 | depth: 0, |
139 | rendered_template_paths: FxHashMap::default(), | ||
136 | }; | 140 | }; |
137 | // Second matching pass, where we record placeholder matches, ignored comments and maybe do | 141 | // Second matching pass, where we record placeholder matches, ignored comments and maybe do |
138 | // any other more expensive checks that we didn't want to do on the first pass. | 142 | // any other more expensive checks that we didn't want to do on the first pass. |
139 | match_state.attempt_match_node(&mut Phase::Second(&mut the_match), &rule.pattern, code)?; | 143 | match_state.attempt_match_node( |
144 | &mut Phase::Second(&mut the_match), | ||
145 | &rule.pattern.node, | ||
146 | code, | ||
147 | )?; | ||
140 | the_match.depth = sema.ancestors_with_macros(the_match.matched_node.clone()).count(); | 148 | the_match.depth = sema.ancestors_with_macros(the_match.matched_node.clone()).count(); |
149 | if let Some(template) = &rule.template { | ||
150 | the_match.render_template_paths(template, sema)?; | ||
151 | } | ||
141 | Ok(the_match) | 152 | Ok(the_match) |
142 | } | 153 | } |
143 | 154 | ||
@@ -195,6 +206,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> { | |||
195 | self.attempt_match_record_field_list(phase, pattern, code) | 206 | self.attempt_match_record_field_list(phase, pattern, code) |
196 | } | 207 | } |
197 | SyntaxKind::TOKEN_TREE => self.attempt_match_token_tree(phase, pattern, code), | 208 | SyntaxKind::TOKEN_TREE => self.attempt_match_token_tree(phase, pattern, code), |
209 | SyntaxKind::PATH => self.attempt_match_path(phase, pattern, code), | ||
198 | _ => self.attempt_match_node_children(phase, pattern, code), | 210 | _ => self.attempt_match_node_children(phase, pattern, code), |
199 | } | 211 | } |
200 | } | 212 | } |
@@ -311,6 +323,64 @@ impl<'db, 'sema> Matcher<'db, 'sema> { | |||
311 | Ok(()) | 323 | Ok(()) |
312 | } | 324 | } |
313 | 325 | ||
326 | /// Paths are matched based on whether they refer to the same thing, even if they're written | ||
327 | /// differently. | ||
328 | fn attempt_match_path( | ||
329 | &self, | ||
330 | phase: &mut Phase, | ||
331 | pattern: &SyntaxNode, | ||
332 | code: &SyntaxNode, | ||
333 | ) -> Result<(), MatchFailed> { | ||
334 | if let Some(pattern_resolved) = self.rule.pattern.resolved_paths.get(pattern) { | ||
335 | let pattern_path = ast::Path::cast(pattern.clone()).unwrap(); | ||
336 | let code_path = ast::Path::cast(code.clone()).unwrap(); | ||
337 | if let (Some(pattern_segment), Some(code_segment)) = | ||
338 | (pattern_path.segment(), code_path.segment()) | ||
339 | { | ||
340 | // Match everything within the segment except for the name-ref, which is handled | ||
341 | // separately via comparing what the path resolves to below. | ||
342 | self.attempt_match_opt( | ||
343 | phase, | ||
344 | pattern_segment.type_arg_list(), | ||
345 | code_segment.type_arg_list(), | ||
346 | )?; | ||
347 | self.attempt_match_opt( | ||
348 | phase, | ||
349 | pattern_segment.param_list(), | ||
350 | code_segment.param_list(), | ||
351 | )?; | ||
352 | } | ||
353 | if matches!(phase, Phase::Second(_)) { | ||
354 | let resolution = self | ||
355 | .sema | ||
356 | .resolve_path(&code_path) | ||
357 | .ok_or_else(|| match_error!("Failed to resolve path `{}`", code.text()))?; | ||
358 | if pattern_resolved.resolution != resolution { | ||
359 | fail_match!("Pattern had path `{}` code had `{}`", pattern.text(), code.text()); | ||
360 | } | ||
361 | } | ||
362 | } else { | ||
363 | return self.attempt_match_node_children(phase, pattern, code); | ||
364 | } | ||
365 | Ok(()) | ||
366 | } | ||
367 | |||
368 | fn attempt_match_opt<T: AstNode>( | ||
369 | &self, | ||
370 | phase: &mut Phase, | ||
371 | pattern: Option<T>, | ||
372 | code: Option<T>, | ||
373 | ) -> Result<(), MatchFailed> { | ||
374 | match (pattern, code) { | ||
375 | (Some(p), Some(c)) => self.attempt_match_node(phase, &p.syntax(), &c.syntax()), | ||
376 | (None, None) => Ok(()), | ||
377 | (Some(p), None) => fail_match!("Pattern `{}` had nothing to match", p.syntax().text()), | ||
378 | (None, Some(c)) => { | ||
379 | fail_match!("Nothing in pattern to match code `{}`", c.syntax().text()) | ||
380 | } | ||
381 | } | ||
382 | } | ||
383 | |||
314 | /// We want to allow the records to match in any order, so we have special matching logic for | 384 | /// We want to allow the records to match in any order, so we have special matching logic for |
315 | /// them. | 385 | /// them. |
316 | fn attempt_match_record_field_list( | 386 | fn attempt_match_record_field_list( |
@@ -449,6 +519,28 @@ impl<'db, 'sema> Matcher<'db, 'sema> { | |||
449 | } | 519 | } |
450 | } | 520 | } |
451 | 521 | ||
522 | impl Match { | ||
523 | fn render_template_paths( | ||
524 | &mut self, | ||
525 | template: &ResolvedPattern, | ||
526 | sema: &Semantics<ra_ide_db::RootDatabase>, | ||
527 | ) -> Result<(), MatchFailed> { | ||
528 | let module = sema | ||
529 | .scope(&self.matched_node) | ||
530 | .module() | ||
531 | .ok_or_else(|| match_error!("Matched node isn't in a module"))?; | ||
532 | for (path, resolved_path) in &template.resolved_paths { | ||
533 | if let hir::PathResolution::Def(module_def) = resolved_path.resolution { | ||
534 | let mod_path = module.find_use_path(sema.db, module_def).ok_or_else(|| { | ||
535 | match_error!("Failed to render template path `{}` at match location") | ||
536 | })?; | ||
537 | self.rendered_template_paths.insert(path.clone(), mod_path); | ||
538 | } | ||
539 | } | ||
540 | Ok(()) | ||
541 | } | ||
542 | } | ||
543 | |||
452 | impl Phase<'_> { | 544 | impl Phase<'_> { |
453 | fn next_non_trivial(&mut self, code_it: &mut SyntaxElementChildren) -> Option<SyntaxElement> { | 545 | fn next_non_trivial(&mut self, code_it: &mut SyntaxElementChildren) -> Option<SyntaxElement> { |
454 | loop { | 546 | loop { |
@@ -578,7 +670,7 @@ mod tests { | |||
578 | 670 | ||
579 | let (db, position) = crate::tests::single_file(input); | 671 | let (db, position) = crate::tests::single_file(input); |
580 | let mut match_finder = MatchFinder::in_context(&db, position); | 672 | let mut match_finder = MatchFinder::in_context(&db, position); |
581 | match_finder.add_rule(rule); | 673 | match_finder.add_rule(rule).unwrap(); |
582 | let matches = match_finder.matches(); | 674 | let matches = match_finder.matches(); |
583 | assert_eq!(matches.matches.len(), 1); | 675 | assert_eq!(matches.matches.len(), 1); |
584 | assert_eq!(matches.matches[0].matched_node.text(), "foo(1+2)"); | 676 | assert_eq!(matches.matches[0].matched_node.text(), "foo(1+2)"); |
diff --git a/crates/ra_ssr/src/parsing.rs b/crates/ra_ssr/src/parsing.rs index cf7fb517f..2d6f4e514 100644 --- a/crates/ra_ssr/src/parsing.rs +++ b/crates/ra_ssr/src/parsing.rs | |||
@@ -7,7 +7,7 @@ | |||
7 | 7 | ||
8 | use crate::errors::bail; | 8 | use crate::errors::bail; |
9 | use crate::{SsrError, SsrPattern, SsrRule}; | 9 | use crate::{SsrError, SsrPattern, SsrRule}; |
10 | use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken, T}; | 10 | use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind, SyntaxNode, T}; |
11 | use rustc_hash::{FxHashMap, FxHashSet}; | 11 | use rustc_hash::{FxHashMap, FxHashSet}; |
12 | use std::str::FromStr; | 12 | use std::str::FromStr; |
13 | 13 | ||
@@ -16,7 +16,6 @@ pub(crate) struct ParsedRule { | |||
16 | pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>, | 16 | pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>, |
17 | pub(crate) pattern: SyntaxNode, | 17 | pub(crate) pattern: SyntaxNode, |
18 | pub(crate) template: Option<SyntaxNode>, | 18 | pub(crate) template: Option<SyntaxNode>, |
19 | pub(crate) index: usize, | ||
20 | } | 19 | } |
21 | 20 | ||
22 | #[derive(Debug)] | 21 | #[derive(Debug)] |
@@ -93,16 +92,11 @@ impl RuleBuilder { | |||
93 | placeholders_by_stand_in: self.placeholders_by_stand_in.clone(), | 92 | placeholders_by_stand_in: self.placeholders_by_stand_in.clone(), |
94 | pattern: pattern.syntax().clone(), | 93 | pattern: pattern.syntax().clone(), |
95 | template: Some(template.syntax().clone()), | 94 | template: Some(template.syntax().clone()), |
96 | // For now we give the rule an index of 0. It's given a proper index when the rule | ||
97 | // is added to the SsrMatcher. Using an Option<usize>, instead would be slightly | ||
98 | // more correct, but we delete this field from ParsedRule in a subsequent commit. | ||
99 | index: 0, | ||
100 | }), | 95 | }), |
101 | (Ok(pattern), None) => self.rules.push(ParsedRule { | 96 | (Ok(pattern), None) => self.rules.push(ParsedRule { |
102 | placeholders_by_stand_in: self.placeholders_by_stand_in.clone(), | 97 | placeholders_by_stand_in: self.placeholders_by_stand_in.clone(), |
103 | pattern: pattern.syntax().clone(), | 98 | pattern: pattern.syntax().clone(), |
104 | template: None, | 99 | template: None, |
105 | index: 0, | ||
106 | }), | 100 | }), |
107 | _ => {} | 101 | _ => {} |
108 | } | 102 | } |
@@ -171,15 +165,6 @@ impl RawPattern { | |||
171 | } | 165 | } |
172 | } | 166 | } |
173 | 167 | ||
174 | impl ParsedRule { | ||
175 | pub(crate) fn get_placeholder(&self, token: &SyntaxToken) -> Option<&Placeholder> { | ||
176 | if token.kind() != SyntaxKind::IDENT { | ||
177 | return None; | ||
178 | } | ||
179 | self.placeholders_by_stand_in.get(token.text()) | ||
180 | } | ||
181 | } | ||
182 | |||
183 | impl FromStr for SsrPattern { | 168 | impl FromStr for SsrPattern { |
184 | type Err = SsrError; | 169 | type Err = SsrError; |
185 | 170 | ||
diff --git a/crates/ra_ssr/src/replacing.rs b/crates/ra_ssr/src/replacing.rs index f1c5bdf14..4b3f5509c 100644 --- a/crates/ra_ssr/src/replacing.rs +++ b/crates/ra_ssr/src/replacing.rs | |||
@@ -1,9 +1,9 @@ | |||
1 | //! Code for applying replacement templates for matches that have previously been found. | 1 | //! Code for applying replacement templates for matches that have previously been found. |
2 | 2 | ||
3 | use crate::matching::Var; | 3 | use crate::matching::Var; |
4 | use crate::{parsing::ParsedRule, Match, SsrMatches}; | 4 | use crate::{resolving::ResolvedRule, Match, SsrMatches}; |
5 | use ra_syntax::ast::AstToken; | 5 | use ra_syntax::ast::{self, AstToken}; |
6 | use ra_syntax::{SyntaxElement, SyntaxNode, SyntaxToken, TextSize}; | 6 | use ra_syntax::{SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextSize}; |
7 | use ra_text_edit::TextEdit; | 7 | use ra_text_edit::TextEdit; |
8 | 8 | ||
9 | /// Returns a text edit that will replace each match in `matches` with its corresponding replacement | 9 | /// Returns a text edit that will replace each match in `matches` with its corresponding replacement |
@@ -12,7 +12,7 @@ use ra_text_edit::TextEdit; | |||
12 | pub(crate) fn matches_to_edit( | 12 | pub(crate) fn matches_to_edit( |
13 | matches: &SsrMatches, | 13 | matches: &SsrMatches, |
14 | file_src: &str, | 14 | file_src: &str, |
15 | rules: &[ParsedRule], | 15 | rules: &[ResolvedRule], |
16 | ) -> TextEdit { | 16 | ) -> TextEdit { |
17 | matches_to_edit_at_offset(matches, file_src, 0.into(), rules) | 17 | matches_to_edit_at_offset(matches, file_src, 0.into(), rules) |
18 | } | 18 | } |
@@ -21,7 +21,7 @@ fn matches_to_edit_at_offset( | |||
21 | matches: &SsrMatches, | 21 | matches: &SsrMatches, |
22 | file_src: &str, | 22 | file_src: &str, |
23 | relative_start: TextSize, | 23 | relative_start: TextSize, |
24 | rules: &[ParsedRule], | 24 | rules: &[ResolvedRule], |
25 | ) -> TextEdit { | 25 | ) -> TextEdit { |
26 | let mut edit_builder = ra_text_edit::TextEditBuilder::default(); | 26 | let mut edit_builder = ra_text_edit::TextEditBuilder::default(); |
27 | for m in &matches.matches { | 27 | for m in &matches.matches { |
@@ -36,11 +36,11 @@ fn matches_to_edit_at_offset( | |||
36 | struct ReplacementRenderer<'a> { | 36 | struct ReplacementRenderer<'a> { |
37 | match_info: &'a Match, | 37 | match_info: &'a Match, |
38 | file_src: &'a str, | 38 | file_src: &'a str, |
39 | rules: &'a [ParsedRule], | 39 | rules: &'a [ResolvedRule], |
40 | rule: &'a ParsedRule, | 40 | rule: &'a ResolvedRule, |
41 | } | 41 | } |
42 | 42 | ||
43 | fn render_replace(match_info: &Match, file_src: &str, rules: &[ParsedRule]) -> String { | 43 | fn render_replace(match_info: &Match, file_src: &str, rules: &[ResolvedRule]) -> String { |
44 | let mut out = String::new(); | 44 | let mut out = String::new(); |
45 | let rule = &rules[match_info.rule_index]; | 45 | let rule = &rules[match_info.rule_index]; |
46 | let template = rule | 46 | let template = rule |
@@ -48,7 +48,7 @@ fn render_replace(match_info: &Match, file_src: &str, rules: &[ParsedRule]) -> S | |||
48 | .as_ref() | 48 | .as_ref() |
49 | .expect("You called MatchFinder::edits after calling MatchFinder::add_search_pattern"); | 49 | .expect("You called MatchFinder::edits after calling MatchFinder::add_search_pattern"); |
50 | let renderer = ReplacementRenderer { match_info, file_src, rules, rule }; | 50 | let renderer = ReplacementRenderer { match_info, file_src, rules, rule }; |
51 | renderer.render_node_children(&template, &mut out); | 51 | renderer.render_node(&template.node, &mut out); |
52 | for comment in &match_info.ignored_comments { | 52 | for comment in &match_info.ignored_comments { |
53 | out.push_str(&comment.syntax().to_string()); | 53 | out.push_str(&comment.syntax().to_string()); |
54 | } | 54 | } |
@@ -68,11 +68,31 @@ impl ReplacementRenderer<'_> { | |||
68 | self.render_token(&token, out); | 68 | self.render_token(&token, out); |
69 | } | 69 | } |
70 | SyntaxElement::Node(child_node) => { | 70 | SyntaxElement::Node(child_node) => { |
71 | self.render_node_children(&child_node, out); | 71 | self.render_node(&child_node, out); |
72 | } | 72 | } |
73 | } | 73 | } |
74 | } | 74 | } |
75 | 75 | ||
76 | fn render_node(&self, node: &SyntaxNode, out: &mut String) { | ||
77 | use ra_syntax::ast::AstNode; | ||
78 | if let Some(mod_path) = self.match_info.rendered_template_paths.get(&node) { | ||
79 | out.push_str(&mod_path.to_string()); | ||
80 | // Emit everything except for the segment's name-ref, since we already effectively | ||
81 | // emitted that as part of `mod_path`. | ||
82 | if let Some(path) = ast::Path::cast(node.clone()) { | ||
83 | if let Some(segment) = path.segment() { | ||
84 | for node_or_token in segment.syntax().children_with_tokens() { | ||
85 | if node_or_token.kind() != SyntaxKind::NAME_REF { | ||
86 | self.render_node_or_token(&node_or_token, out); | ||
87 | } | ||
88 | } | ||
89 | } | ||
90 | } | ||
91 | } else { | ||
92 | self.render_node_children(&node, out); | ||
93 | } | ||
94 | } | ||
95 | |||
76 | fn render_token(&self, token: &SyntaxToken, out: &mut String) { | 96 | fn render_token(&self, token: &SyntaxToken, out: &mut String) { |
77 | if let Some(placeholder) = self.rule.get_placeholder(&token) { | 97 | if let Some(placeholder) = self.rule.get_placeholder(&token) { |
78 | if let Some(placeholder_value) = | 98 | if let Some(placeholder_value) = |
diff --git a/crates/ra_ssr/src/resolving.rs b/crates/ra_ssr/src/resolving.rs new file mode 100644 index 000000000..e9d052111 --- /dev/null +++ b/crates/ra_ssr/src/resolving.rs | |||
@@ -0,0 +1,153 @@ | |||
1 | //! This module is responsible for resolving paths within rules. | ||
2 | |||
3 | use crate::errors::error; | ||
4 | use crate::{parsing, SsrError}; | ||
5 | use parsing::Placeholder; | ||
6 | use ra_syntax::{ast, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken}; | ||
7 | use rustc_hash::{FxHashMap, FxHashSet}; | ||
8 | use test_utils::mark; | ||
9 | |||
10 | pub(crate) struct ResolvedRule { | ||
11 | pub(crate) pattern: ResolvedPattern, | ||
12 | pub(crate) template: Option<ResolvedPattern>, | ||
13 | pub(crate) index: usize, | ||
14 | } | ||
15 | |||
16 | pub(crate) struct ResolvedPattern { | ||
17 | pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>, | ||
18 | pub(crate) node: SyntaxNode, | ||
19 | // Paths in `node` that we've resolved. | ||
20 | pub(crate) resolved_paths: FxHashMap<SyntaxNode, ResolvedPath>, | ||
21 | } | ||
22 | |||
23 | pub(crate) struct ResolvedPath { | ||
24 | pub(crate) resolution: hir::PathResolution, | ||
25 | } | ||
26 | |||
27 | impl ResolvedRule { | ||
28 | pub(crate) fn new( | ||
29 | rule: parsing::ParsedRule, | ||
30 | scope: &hir::SemanticsScope, | ||
31 | hygiene: &hir::Hygiene, | ||
32 | index: usize, | ||
33 | ) -> Result<ResolvedRule, SsrError> { | ||
34 | let resolver = | ||
35 | Resolver { scope, hygiene, placeholders_by_stand_in: rule.placeholders_by_stand_in }; | ||
36 | let resolved_template = if let Some(template) = rule.template { | ||
37 | Some(resolver.resolve_pattern_tree(template)?) | ||
38 | } else { | ||
39 | None | ||
40 | }; | ||
41 | Ok(ResolvedRule { | ||
42 | pattern: resolver.resolve_pattern_tree(rule.pattern)?, | ||
43 | template: resolved_template, | ||
44 | index, | ||
45 | }) | ||
46 | } | ||
47 | |||
48 | pub(crate) fn get_placeholder(&self, token: &SyntaxToken) -> Option<&Placeholder> { | ||
49 | if token.kind() != SyntaxKind::IDENT { | ||
50 | return None; | ||
51 | } | ||
52 | self.pattern.placeholders_by_stand_in.get(token.text()) | ||
53 | } | ||
54 | } | ||
55 | |||
56 | struct Resolver<'a, 'db> { | ||
57 | scope: &'a hir::SemanticsScope<'db>, | ||
58 | hygiene: &'a hir::Hygiene, | ||
59 | placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>, | ||
60 | } | ||
61 | |||
62 | impl Resolver<'_, '_> { | ||
63 | fn resolve_pattern_tree(&self, pattern: SyntaxNode) -> Result<ResolvedPattern, SsrError> { | ||
64 | let mut resolved_paths = FxHashMap::default(); | ||
65 | self.resolve(pattern.clone(), &mut resolved_paths)?; | ||
66 | Ok(ResolvedPattern { | ||
67 | node: pattern, | ||
68 | resolved_paths, | ||
69 | placeholders_by_stand_in: self.placeholders_by_stand_in.clone(), | ||
70 | }) | ||
71 | } | ||
72 | |||
73 | fn resolve( | ||
74 | &self, | ||
75 | node: SyntaxNode, | ||
76 | resolved_paths: &mut FxHashMap<SyntaxNode, ResolvedPath>, | ||
77 | ) -> Result<(), SsrError> { | ||
78 | use ra_syntax::ast::AstNode; | ||
79 | if let Some(path) = ast::Path::cast(node.clone()) { | ||
80 | // Check if this is an appropriate place in the path to resolve. If the path is | ||
81 | // something like `a::B::<i32>::c` then we want to resolve `a::B`. If the path contains | ||
82 | // a placeholder. e.g. `a::$b::c` then we want to resolve `a`. | ||
83 | if !path_contains_type_arguments(path.qualifier()) | ||
84 | && !self.path_contains_placeholder(&path) | ||
85 | { | ||
86 | let resolution = self | ||
87 | .resolve_path(&path) | ||
88 | .ok_or_else(|| error!("Failed to resolve path `{}`", node.text()))?; | ||
89 | resolved_paths.insert(node, ResolvedPath { resolution }); | ||
90 | return Ok(()); | ||
91 | } | ||
92 | } | ||
93 | for node in node.children() { | ||
94 | self.resolve(node, resolved_paths)?; | ||
95 | } | ||
96 | Ok(()) | ||
97 | } | ||
98 | |||
99 | /// Returns whether `path` contains a placeholder, but ignores any placeholders within type | ||
100 | /// arguments. | ||
101 | fn path_contains_placeholder(&self, path: &ast::Path) -> bool { | ||
102 | if let Some(segment) = path.segment() { | ||
103 | if let Some(name_ref) = segment.name_ref() { | ||
104 | if self.placeholders_by_stand_in.contains_key(name_ref.text()) { | ||
105 | return true; | ||
106 | } | ||
107 | } | ||
108 | } | ||
109 | if let Some(qualifier) = path.qualifier() { | ||
110 | return self.path_contains_placeholder(&qualifier); | ||
111 | } | ||
112 | false | ||
113 | } | ||
114 | |||
115 | fn resolve_path(&self, path: &ast::Path) -> Option<hir::PathResolution> { | ||
116 | let hir_path = hir::Path::from_src(path.clone(), self.hygiene)?; | ||
117 | // First try resolving the whole path. This will work for things like | ||
118 | // `std::collections::HashMap`, but will fail for things like | ||
119 | // `std::collections::HashMap::new`. | ||
120 | if let Some(resolution) = self.scope.resolve_hir_path(&hir_path) { | ||
121 | return Some(resolution); | ||
122 | } | ||
123 | // Resolution failed, try resolving the qualifier (e.g. `std::collections::HashMap` and if | ||
124 | // that succeeds, then iterate through the candidates on the resolved type with the provided | ||
125 | // name. | ||
126 | let resolved_qualifier = self.scope.resolve_hir_path_qualifier(&hir_path.qualifier()?)?; | ||
127 | if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier { | ||
128 | adt.ty(self.scope.db).iterate_path_candidates( | ||
129 | self.scope.db, | ||
130 | self.scope.module()?.krate(), | ||
131 | &FxHashSet::default(), | ||
132 | Some(hir_path.segments().last()?.name), | ||
133 | |_ty, assoc_item| Some(hir::PathResolution::AssocItem(assoc_item)), | ||
134 | ) | ||
135 | } else { | ||
136 | None | ||
137 | } | ||
138 | } | ||
139 | } | ||
140 | |||
141 | /// Returns whether `path` or any of its qualifiers contains type arguments. | ||
142 | fn path_contains_type_arguments(path: Option<ast::Path>) -> bool { | ||
143 | if let Some(path) = path { | ||
144 | if let Some(segment) = path.segment() { | ||
145 | if segment.type_arg_list().is_some() { | ||
146 | mark::hit!(type_arguments_within_path); | ||
147 | return true; | ||
148 | } | ||
149 | } | ||
150 | return path_contains_type_arguments(path.qualifier()); | ||
151 | } | ||
152 | false | ||
153 | } | ||
diff --git a/crates/ra_ssr/src/search.rs b/crates/ra_ssr/src/search.rs index a28e9f341..ccc2d544a 100644 --- a/crates/ra_ssr/src/search.rs +++ b/crates/ra_ssr/src/search.rs | |||
@@ -1,6 +1,6 @@ | |||
1 | //! Searching for matches. | 1 | //! Searching for matches. |
2 | 2 | ||
3 | use crate::{matching, parsing::ParsedRule, Match, MatchFinder}; | 3 | use crate::{matching, resolving::ResolvedRule, Match, MatchFinder}; |
4 | use ra_db::FileRange; | 4 | use ra_db::FileRange; |
5 | use ra_syntax::{ast, AstNode, SyntaxNode}; | 5 | use ra_syntax::{ast, AstNode, SyntaxNode}; |
6 | 6 | ||
@@ -8,13 +8,13 @@ impl<'db> MatchFinder<'db> { | |||
8 | /// Adds all matches for `rule` to `matches_out`. Matches may overlap in ways that make | 8 | /// Adds all matches for `rule` to `matches_out`. Matches may overlap in ways that make |
9 | /// replacement impossible, so further processing is required in order to properly nest matches | 9 | /// replacement impossible, so further processing is required in order to properly nest matches |
10 | /// and remove overlapping matches. This is done in the `nesting` module. | 10 | /// and remove overlapping matches. This is done in the `nesting` module. |
11 | pub(crate) fn find_matches_for_rule(&self, rule: &ParsedRule, matches_out: &mut Vec<Match>) { | 11 | pub(crate) fn find_matches_for_rule(&self, rule: &ResolvedRule, matches_out: &mut Vec<Match>) { |
12 | // FIXME: Use resolved paths in the pattern to find places to search instead of always | 12 | // FIXME: Use resolved paths in the pattern to find places to search instead of always |
13 | // scanning every node. | 13 | // scanning every node. |
14 | self.slow_scan(rule, matches_out); | 14 | self.slow_scan(rule, matches_out); |
15 | } | 15 | } |
16 | 16 | ||
17 | fn slow_scan(&self, rule: &ParsedRule, matches_out: &mut Vec<Match>) { | 17 | fn slow_scan(&self, rule: &ResolvedRule, matches_out: &mut Vec<Match>) { |
18 | use ra_db::SourceDatabaseExt; | 18 | use ra_db::SourceDatabaseExt; |
19 | use ra_ide_db::symbol_index::SymbolsDatabase; | 19 | use ra_ide_db::symbol_index::SymbolsDatabase; |
20 | for &root in self.sema.db.local_roots().iter() { | 20 | for &root in self.sema.db.local_roots().iter() { |
@@ -30,7 +30,7 @@ impl<'db> MatchFinder<'db> { | |||
30 | fn slow_scan_node( | 30 | fn slow_scan_node( |
31 | &self, | 31 | &self, |
32 | code: &SyntaxNode, | 32 | code: &SyntaxNode, |
33 | rule: &ParsedRule, | 33 | rule: &ResolvedRule, |
34 | restrict_range: &Option<FileRange>, | 34 | restrict_range: &Option<FileRange>, |
35 | matches_out: &mut Vec<Match>, | 35 | matches_out: &mut Vec<Match>, |
36 | ) { | 36 | ) { |
diff --git a/crates/ra_ssr/src/tests.rs b/crates/ra_ssr/src/tests.rs index 63d527894..33742dc8e 100644 --- a/crates/ra_ssr/src/tests.rs +++ b/crates/ra_ssr/src/tests.rs | |||
@@ -85,7 +85,7 @@ fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) { | |||
85 | let mut match_finder = MatchFinder::in_context(&db, position); | 85 | let mut match_finder = MatchFinder::in_context(&db, position); |
86 | for rule in rules { | 86 | for rule in rules { |
87 | let rule: SsrRule = rule.parse().unwrap(); | 87 | let rule: SsrRule = rule.parse().unwrap(); |
88 | match_finder.add_rule(rule); | 88 | match_finder.add_rule(rule).unwrap(); |
89 | } | 89 | } |
90 | let edits = match_finder.edits(); | 90 | let edits = match_finder.edits(); |
91 | if edits.is_empty() { | 91 | if edits.is_empty() { |
@@ -114,7 +114,7 @@ fn print_match_debug_info(match_finder: &MatchFinder, file_id: FileId, snippet: | |||
114 | fn assert_matches(pattern: &str, code: &str, expected: &[&str]) { | 114 | fn assert_matches(pattern: &str, code: &str, expected: &[&str]) { |
115 | let (db, position) = single_file(code); | 115 | let (db, position) = single_file(code); |
116 | let mut match_finder = MatchFinder::in_context(&db, position); | 116 | let mut match_finder = MatchFinder::in_context(&db, position); |
117 | match_finder.add_search_pattern(pattern.parse().unwrap()); | 117 | match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap(); |
118 | let matched_strings: Vec<String> = | 118 | let matched_strings: Vec<String> = |
119 | match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect(); | 119 | match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect(); |
120 | if matched_strings != expected && !expected.is_empty() { | 120 | if matched_strings != expected && !expected.is_empty() { |
@@ -126,7 +126,7 @@ fn assert_matches(pattern: &str, code: &str, expected: &[&str]) { | |||
126 | fn assert_no_match(pattern: &str, code: &str) { | 126 | fn assert_no_match(pattern: &str, code: &str) { |
127 | let (db, position) = single_file(code); | 127 | let (db, position) = single_file(code); |
128 | let mut match_finder = MatchFinder::in_context(&db, position); | 128 | let mut match_finder = MatchFinder::in_context(&db, position); |
129 | match_finder.add_search_pattern(pattern.parse().unwrap()); | 129 | match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap(); |
130 | let matches = match_finder.matches().flattened().matches; | 130 | let matches = match_finder.matches().flattened().matches; |
131 | if !matches.is_empty() { | 131 | if !matches.is_empty() { |
132 | print_match_debug_info(&match_finder, position.file_id, &matches[0].matched_text()); | 132 | print_match_debug_info(&match_finder, position.file_id, &matches[0].matched_text()); |
@@ -137,7 +137,7 @@ fn assert_no_match(pattern: &str, code: &str) { | |||
137 | fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) { | 137 | fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) { |
138 | let (db, position) = single_file(code); | 138 | let (db, position) = single_file(code); |
139 | let mut match_finder = MatchFinder::in_context(&db, position); | 139 | let mut match_finder = MatchFinder::in_context(&db, position); |
140 | match_finder.add_search_pattern(pattern.parse().unwrap()); | 140 | match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap(); |
141 | let mut reasons = Vec::new(); | 141 | let mut reasons = Vec::new(); |
142 | for d in match_finder.debug_where_text_equal(position.file_id, snippet) { | 142 | for d in match_finder.debug_where_text_equal(position.file_id, snippet) { |
143 | if let Some(reason) = d.match_failure_reason() { | 143 | if let Some(reason) = d.match_failure_reason() { |
@@ -350,6 +350,60 @@ fn match_pattern() { | |||
350 | assert_matches("Some($a)", "struct Some(); fn f() {if let Some(x) = foo() {}}", &["Some(x)"]); | 350 | assert_matches("Some($a)", "struct Some(); fn f() {if let Some(x) = foo() {}}", &["Some(x)"]); |
351 | } | 351 | } |
352 | 352 | ||
353 | // If our pattern has a full path, e.g. a::b::c() and the code has c(), but c resolves to | ||
354 | // a::b::c, then we should match. | ||
355 | #[test] | ||
356 | fn match_fully_qualified_fn_path() { | ||
357 | let code = r#" | ||
358 | mod a { | ||
359 | pub mod b { | ||
360 | pub fn c(_: i32) {} | ||
361 | } | ||
362 | } | ||
363 | use a::b::c; | ||
364 | fn f1() { | ||
365 | c(42); | ||
366 | } | ||
367 | "#; | ||
368 | assert_matches("a::b::c($a)", code, &["c(42)"]); | ||
369 | } | ||
370 | |||
371 | #[test] | ||
372 | fn match_resolved_type_name() { | ||
373 | let code = r#" | ||
374 | mod m1 { | ||
375 | pub mod m2 { | ||
376 | pub trait Foo<T> {} | ||
377 | } | ||
378 | } | ||
379 | mod m3 { | ||
380 | trait Foo<T> {} | ||
381 | fn f1(f: Option<&dyn Foo<bool>>) {} | ||
382 | } | ||
383 | mod m4 { | ||
384 | use crate::m1::m2::Foo; | ||
385 | fn f1(f: Option<&dyn Foo<i32>>) {} | ||
386 | } | ||
387 | "#; | ||
388 | assert_matches("m1::m2::Foo<$t>", code, &["Foo<i32>"]); | ||
389 | } | ||
390 | |||
391 | #[test] | ||
392 | fn type_arguments_within_path() { | ||
393 | mark::check!(type_arguments_within_path); | ||
394 | let code = r#" | ||
395 | mod foo { | ||
396 | pub struct Bar<T> {t: T} | ||
397 | impl<T> Bar<T> { | ||
398 | pub fn baz() {} | ||
399 | } | ||
400 | } | ||
401 | fn f1() {foo::Bar::<i32>::baz();} | ||
402 | "#; | ||
403 | assert_no_match("foo::Bar::<i64>::baz()", code); | ||
404 | assert_matches("foo::Bar::<i32>::baz()", code, &["foo::Bar::<i32>::baz()"]); | ||
405 | } | ||
406 | |||
353 | #[test] | 407 | #[test] |
354 | fn literal_constraint() { | 408 | fn literal_constraint() { |
355 | mark::check!(literal_constraint); | 409 | mark::check!(literal_constraint); |
@@ -483,6 +537,86 @@ fn replace_associated_function_call() { | |||
483 | } | 537 | } |
484 | 538 | ||
485 | #[test] | 539 | #[test] |
540 | fn replace_path_in_different_contexts() { | ||
541 | // Note the <|> inside module a::b which marks the point where the rule is interpreted. We | ||
542 | // replace foo with bar, but both need different path qualifiers in different contexts. In f4, | ||
543 | // foo is unqualified because of a use statement, however the replacement needs to be fully | ||
544 | // qualified. | ||
545 | assert_ssr_transform( | ||
546 | "c::foo() ==>> c::bar()", | ||
547 | r#" | ||
548 | mod a { | ||
549 | pub mod b {<|> | ||
550 | pub mod c { | ||
551 | pub fn foo() {} | ||
552 | pub fn bar() {} | ||
553 | fn f1() { foo() } | ||
554 | } | ||
555 | fn f2() { c::foo() } | ||
556 | } | ||
557 | fn f3() { b::c::foo() } | ||
558 | } | ||
559 | use a::b::c::foo; | ||
560 | fn f4() { foo() } | ||
561 | "#, | ||
562 | expect![[r#" | ||
563 | mod a { | ||
564 | pub mod b { | ||
565 | pub mod c { | ||
566 | pub fn foo() {} | ||
567 | pub fn bar() {} | ||
568 | fn f1() { bar() } | ||
569 | } | ||
570 | fn f2() { c::bar() } | ||
571 | } | ||
572 | fn f3() { b::c::bar() } | ||
573 | } | ||
574 | use a::b::c::foo; | ||
575 | fn f4() { a::b::c::bar() } | ||
576 | "#]], | ||
577 | ); | ||
578 | } | ||
579 | |||
580 | #[test] | ||
581 | fn replace_associated_function_with_generics() { | ||
582 | assert_ssr_transform( | ||
583 | "c::Foo::<$a>::new() ==>> d::Bar::<$a>::default()", | ||
584 | r#" | ||
585 | mod c { | ||
586 | pub struct Foo<T> {v: T} | ||
587 | impl<T> Foo<T> { pub fn new() {} } | ||
588 | fn f1() { | ||
589 | Foo::<i32>::new(); | ||
590 | } | ||
591 | } | ||
592 | mod d { | ||
593 | pub struct Bar<T> {v: T} | ||
594 | impl<T> Bar<T> { pub fn default() {} } | ||
595 | fn f1() { | ||
596 | super::c::Foo::<i32>::new(); | ||
597 | } | ||
598 | } | ||
599 | "#, | ||
600 | expect![[r#" | ||
601 | mod c { | ||
602 | pub struct Foo<T> {v: T} | ||
603 | impl<T> Foo<T> { pub fn new() {} } | ||
604 | fn f1() { | ||
605 | crate::d::Bar::<i32>::default(); | ||
606 | } | ||
607 | } | ||
608 | mod d { | ||
609 | pub struct Bar<T> {v: T} | ||
610 | impl<T> Bar<T> { pub fn default() {} } | ||
611 | fn f1() { | ||
612 | Bar::<i32>::default(); | ||
613 | } | ||
614 | } | ||
615 | "#]], | ||
616 | ); | ||
617 | } | ||
618 | |||
619 | #[test] | ||
486 | fn replace_type() { | 620 | fn replace_type() { |
487 | assert_ssr_transform( | 621 | assert_ssr_transform( |
488 | "Result<(), $a> ==>> Option<$a>", | 622 | "Result<(), $a> ==>> Option<$a>", |
diff --git a/crates/rust-analyzer/src/cli/ssr.rs b/crates/rust-analyzer/src/cli/ssr.rs index 22f5b4be0..194bec008 100644 --- a/crates/rust-analyzer/src/cli/ssr.rs +++ b/crates/rust-analyzer/src/cli/ssr.rs | |||
@@ -9,7 +9,7 @@ pub fn apply_ssr_rules(rules: Vec<SsrRule>) -> Result<()> { | |||
9 | let db = host.raw_database(); | 9 | let db = host.raw_database(); |
10 | let mut match_finder = MatchFinder::at_first_file(db)?; | 10 | let mut match_finder = MatchFinder::at_first_file(db)?; |
11 | for rule in rules { | 11 | for rule in rules { |
12 | match_finder.add_rule(rule); | 12 | match_finder.add_rule(rule)?; |
13 | } | 13 | } |
14 | let edits = match_finder.edits(); | 14 | let edits = match_finder.edits(); |
15 | for edit in edits { | 15 | for edit in edits { |
@@ -32,7 +32,7 @@ pub fn search_for_patterns(patterns: Vec<SsrPattern>, debug_snippet: Option<Stri | |||
32 | let db = host.raw_database(); | 32 | let db = host.raw_database(); |
33 | let mut match_finder = MatchFinder::at_first_file(db)?; | 33 | let mut match_finder = MatchFinder::at_first_file(db)?; |
34 | for pattern in patterns { | 34 | for pattern in patterns { |
35 | match_finder.add_search_pattern(pattern); | 35 | match_finder.add_search_pattern(pattern)?; |
36 | } | 36 | } |
37 | if let Some(debug_snippet) = &debug_snippet { | 37 | if let Some(debug_snippet) = &debug_snippet { |
38 | for &root in db.local_roots().iter() { | 38 | for &root in db.local_roots().iter() { |