diff options
Diffstat (limited to 'crates/ide_ssr')
-rw-r--r-- | crates/ide_ssr/Cargo.toml | 24 | ||||
-rw-r--r-- | crates/ide_ssr/src/errors.rs | 29 | ||||
-rw-r--r-- | crates/ide_ssr/src/lib.rs | 347 | ||||
-rw-r--r-- | crates/ide_ssr/src/matching.rs | 819 | ||||
-rw-r--r-- | crates/ide_ssr/src/nester.rs | 94 | ||||
-rw-r--r-- | crates/ide_ssr/src/parsing.rs | 414 | ||||
-rw-r--r-- | crates/ide_ssr/src/replacing.rs | 237 | ||||
-rw-r--r-- | crates/ide_ssr/src/resolving.rs | 301 | ||||
-rw-r--r-- | crates/ide_ssr/src/search.rs | 282 | ||||
-rw-r--r-- | crates/ide_ssr/src/tests.rs | 1371 |
10 files changed, 3918 insertions, 0 deletions
diff --git a/crates/ide_ssr/Cargo.toml b/crates/ide_ssr/Cargo.toml new file mode 100644 index 000000000..edbc1846b --- /dev/null +++ b/crates/ide_ssr/Cargo.toml | |||
@@ -0,0 +1,24 @@ | |||
1 | [package] | ||
2 | name = "ide_ssr" | ||
3 | version = "0.0.0" | ||
4 | description = "Structural search and replace of Rust code" | ||
5 | license = "MIT OR Apache-2.0" | ||
6 | repository = "https://github.com/rust-analyzer/rust-analyzer" | ||
7 | authors = ["rust-analyzer developers"] | ||
8 | edition = "2018" | ||
9 | |||
10 | [lib] | ||
11 | doctest = false | ||
12 | |||
13 | [dependencies] | ||
14 | rustc-hash = "1.1.0" | ||
15 | itertools = "0.10.0" | ||
16 | |||
17 | text_edit = { path = "../text_edit", version = "0.0.0" } | ||
18 | syntax = { path = "../syntax", version = "0.0.0" } | ||
19 | ide_db = { path = "../ide_db", version = "0.0.0" } | ||
20 | hir = { path = "../hir", version = "0.0.0" } | ||
21 | test_utils = { path = "../test_utils", version = "0.0.0" } | ||
22 | |||
23 | [dev-dependencies] | ||
24 | expect-test = "1.1" | ||
diff --git a/crates/ide_ssr/src/errors.rs b/crates/ide_ssr/src/errors.rs new file mode 100644 index 000000000..c02bacae6 --- /dev/null +++ b/crates/ide_ssr/src/errors.rs | |||
@@ -0,0 +1,29 @@ | |||
1 | //! Code relating to errors produced by SSR. | ||
2 | |||
3 | /// Constructs an SsrError taking arguments like the format macro. | ||
4 | macro_rules! _error { | ||
5 | ($fmt:expr) => {$crate::SsrError::new(format!($fmt))}; | ||
6 | ($fmt:expr, $($arg:tt)+) => {$crate::SsrError::new(format!($fmt, $($arg)+))} | ||
7 | } | ||
8 | pub(crate) use _error as error; | ||
9 | |||
10 | /// Returns from the current function with an error, supplied by arguments as for format! | ||
11 | macro_rules! _bail { | ||
12 | ($($tokens:tt)*) => {return Err(crate::errors::error!($($tokens)*))} | ||
13 | } | ||
14 | pub(crate) use _bail as bail; | ||
15 | |||
16 | #[derive(Debug, PartialEq)] | ||
17 | pub struct SsrError(pub(crate) String); | ||
18 | |||
19 | impl std::fmt::Display for SsrError { | ||
20 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { | ||
21 | write!(f, "Parse error: {}", self.0) | ||
22 | } | ||
23 | } | ||
24 | |||
25 | impl SsrError { | ||
26 | pub(crate) fn new(message: impl Into<String>) -> SsrError { | ||
27 | SsrError(message.into()) | ||
28 | } | ||
29 | } | ||
diff --git a/crates/ide_ssr/src/lib.rs b/crates/ide_ssr/src/lib.rs new file mode 100644 index 000000000..a97fc8bca --- /dev/null +++ b/crates/ide_ssr/src/lib.rs | |||
@@ -0,0 +1,347 @@ | |||
1 | //! Structural Search Replace | ||
2 | //! | ||
3 | //! Allows searching the AST for code that matches one or more patterns and then replacing that code | ||
4 | //! based on a template. | ||
5 | |||
6 | // Feature: Structural Search and Replace | ||
7 | // | ||
8 | // Search and replace with named wildcards that will match any expression, type, path, pattern or item. | ||
9 | // The syntax for a structural search replace command is `<search_pattern> ==>> <replace_pattern>`. | ||
10 | // A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement. | ||
11 | // Within a macro call, a placeholder will match up until whatever token follows the placeholder. | ||
12 | // | ||
13 | // All paths in both the search pattern and the replacement template must resolve in the context | ||
14 | // in which this command is invoked. Paths in the search pattern will then match the code if they | ||
15 | // resolve to the same item, even if they're written differently. For example if we invoke the | ||
16 | // command in the module `foo` with a pattern of `Bar`, then code in the parent module that refers | ||
17 | // to `foo::Bar` will match. | ||
18 | // | ||
19 | // Paths in the replacement template will be rendered appropriately for the context in which the | ||
20 | // replacement occurs. For example if our replacement template is `foo::Bar` and we match some | ||
21 | // code in the `foo` module, we'll insert just `Bar`. | ||
22 | // | ||
23 | // Inherent method calls should generally be written in UFCS form. e.g. `foo::Bar::baz($s, $a)` will | ||
24 | // match `$s.baz($a)`, provided the method call `baz` resolves to the method `foo::Bar::baz`. When a | ||
25 | // placeholder is the receiver of a method call in the search pattern (e.g. `$s.foo()`), but not in | ||
26 | // the replacement template (e.g. `bar($s)`), then *, & and &mut will be added as needed to mirror | ||
27 | // whatever autoderef and autoref was happening implicitly in the matched code. | ||
28 | // | ||
29 | // The scope of the search / replace will be restricted to the current selection if any, otherwise | ||
30 | // it will apply to the whole workspace. | ||
31 | // | ||
32 | // Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`. | ||
33 | // | ||
34 | // Supported constraints: | ||
35 | // | ||
36 | // |=== | ||
37 | // | Constraint | Restricts placeholder | ||
38 | // | ||
39 | // | kind(literal) | Is a literal (e.g. `42` or `"forty two"`) | ||
40 | // | not(a) | Negates the constraint `a` | ||
41 | // |=== | ||
42 | // | ||
43 | // Available via the command `rust-analyzer.ssr`. | ||
44 | // | ||
45 | // ```rust | ||
46 | // // Using structural search replace command [foo($a, $b) ==>> ($a).foo($b)] | ||
47 | // | ||
48 | // // BEFORE | ||
49 | // String::from(foo(y + 5, z)) | ||
50 | // | ||
51 | // // AFTER | ||
52 | // String::from((y + 5).foo(z)) | ||
53 | // ``` | ||
54 | // | ||
55 | // |=== | ||
56 | // | Editor | Action Name | ||
57 | // | ||
58 | // | VS Code | **Rust Analyzer: Structural Search Replace** | ||
59 | // |=== | ||
60 | |||
61 | mod matching; | ||
62 | mod nester; | ||
63 | mod parsing; | ||
64 | mod replacing; | ||
65 | mod resolving; | ||
66 | mod search; | ||
67 | #[macro_use] | ||
68 | mod errors; | ||
69 | #[cfg(test)] | ||
70 | mod tests; | ||
71 | |||
72 | use crate::errors::bail; | ||
73 | pub use crate::errors::SsrError; | ||
74 | pub use crate::matching::Match; | ||
75 | use crate::matching::MatchFailureReason; | ||
76 | use hir::Semantics; | ||
77 | use ide_db::base_db::{FileId, FilePosition, FileRange}; | ||
78 | use resolving::ResolvedRule; | ||
79 | use rustc_hash::FxHashMap; | ||
80 | use syntax::{ast, AstNode, SyntaxNode, TextRange}; | ||
81 | use text_edit::TextEdit; | ||
82 | |||
83 | // A structured search replace rule. Create by calling `parse` on a str. | ||
84 | #[derive(Debug)] | ||
85 | pub struct SsrRule { | ||
86 | /// A structured pattern that we're searching for. | ||
87 | pattern: parsing::RawPattern, | ||
88 | /// What we'll replace it with. | ||
89 | template: parsing::RawPattern, | ||
90 | parsed_rules: Vec<parsing::ParsedRule>, | ||
91 | } | ||
92 | |||
93 | #[derive(Debug)] | ||
94 | pub struct SsrPattern { | ||
95 | raw: parsing::RawPattern, | ||
96 | parsed_rules: Vec<parsing::ParsedRule>, | ||
97 | } | ||
98 | |||
99 | #[derive(Debug, Default)] | ||
100 | pub struct SsrMatches { | ||
101 | pub matches: Vec<Match>, | ||
102 | } | ||
103 | |||
104 | /// Searches a crate for pattern matches and possibly replaces them with something else. | ||
105 | pub struct MatchFinder<'db> { | ||
106 | /// Our source of information about the user's code. | ||
107 | sema: Semantics<'db, ide_db::RootDatabase>, | ||
108 | rules: Vec<ResolvedRule>, | ||
109 | resolution_scope: resolving::ResolutionScope<'db>, | ||
110 | restrict_ranges: Vec<FileRange>, | ||
111 | } | ||
112 | |||
113 | impl<'db> MatchFinder<'db> { | ||
114 | /// Constructs a new instance where names will be looked up as if they appeared at | ||
115 | /// `lookup_context`. | ||
116 | pub fn in_context( | ||
117 | db: &'db ide_db::RootDatabase, | ||
118 | lookup_context: FilePosition, | ||
119 | mut restrict_ranges: Vec<FileRange>, | ||
120 | ) -> MatchFinder<'db> { | ||
121 | restrict_ranges.retain(|range| !range.range.is_empty()); | ||
122 | let sema = Semantics::new(db); | ||
123 | let resolution_scope = resolving::ResolutionScope::new(&sema, lookup_context); | ||
124 | MatchFinder { sema, rules: Vec::new(), resolution_scope, restrict_ranges } | ||
125 | } | ||
126 | |||
127 | /// Constructs an instance using the start of the first file in `db` as the lookup context. | ||
128 | pub fn at_first_file(db: &'db ide_db::RootDatabase) -> Result<MatchFinder<'db>, SsrError> { | ||
129 | use ide_db::base_db::SourceDatabaseExt; | ||
130 | use ide_db::symbol_index::SymbolsDatabase; | ||
131 | if let Some(first_file_id) = db | ||
132 | .local_roots() | ||
133 | .iter() | ||
134 | .next() | ||
135 | .and_then(|root| db.source_root(root.clone()).iter().next()) | ||
136 | { | ||
137 | Ok(MatchFinder::in_context( | ||
138 | db, | ||
139 | FilePosition { file_id: first_file_id, offset: 0.into() }, | ||
140 | vec![], | ||
141 | )) | ||
142 | } else { | ||
143 | bail!("No files to search"); | ||
144 | } | ||
145 | } | ||
146 | |||
147 | /// Adds a rule to be applied. The order in which rules are added matters. Earlier rules take | ||
148 | /// precedence. If a node is matched by an earlier rule, then later rules won't be permitted to | ||
149 | /// match to it. | ||
150 | pub fn add_rule(&mut self, rule: SsrRule) -> Result<(), SsrError> { | ||
151 | for parsed_rule in rule.parsed_rules { | ||
152 | self.rules.push(ResolvedRule::new( | ||
153 | parsed_rule, | ||
154 | &self.resolution_scope, | ||
155 | self.rules.len(), | ||
156 | )?); | ||
157 | } | ||
158 | Ok(()) | ||
159 | } | ||
160 | |||
161 | /// Finds matches for all added rules and returns edits for all found matches. | ||
162 | pub fn edits(&self) -> FxHashMap<FileId, TextEdit> { | ||
163 | use ide_db::base_db::SourceDatabaseExt; | ||
164 | let mut matches_by_file = FxHashMap::default(); | ||
165 | for m in self.matches().matches { | ||
166 | matches_by_file | ||
167 | .entry(m.range.file_id) | ||
168 | .or_insert_with(|| SsrMatches::default()) | ||
169 | .matches | ||
170 | .push(m); | ||
171 | } | ||
172 | matches_by_file | ||
173 | .into_iter() | ||
174 | .map(|(file_id, matches)| { | ||
175 | ( | ||
176 | file_id, | ||
177 | replacing::matches_to_edit( | ||
178 | &matches, | ||
179 | &self.sema.db.file_text(file_id), | ||
180 | &self.rules, | ||
181 | ), | ||
182 | ) | ||
183 | }) | ||
184 | .collect() | ||
185 | } | ||
186 | |||
187 | /// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you | ||
188 | /// intend to do replacement, use `add_rule` instead. | ||
189 | pub fn add_search_pattern(&mut self, pattern: SsrPattern) -> Result<(), SsrError> { | ||
190 | for parsed_rule in pattern.parsed_rules { | ||
191 | self.rules.push(ResolvedRule::new( | ||
192 | parsed_rule, | ||
193 | &self.resolution_scope, | ||
194 | self.rules.len(), | ||
195 | )?); | ||
196 | } | ||
197 | Ok(()) | ||
198 | } | ||
199 | |||
200 | /// Returns matches for all added rules. | ||
201 | pub fn matches(&self) -> SsrMatches { | ||
202 | let mut matches = Vec::new(); | ||
203 | let mut usage_cache = search::UsageCache::default(); | ||
204 | for rule in &self.rules { | ||
205 | self.find_matches_for_rule(rule, &mut usage_cache, &mut matches); | ||
206 | } | ||
207 | nester::nest_and_remove_collisions(matches, &self.sema) | ||
208 | } | ||
209 | |||
210 | /// Finds all nodes in `file_id` whose text is exactly equal to `snippet` and attempts to match | ||
211 | /// them, while recording reasons why they don't match. This API is useful for command | ||
212 | /// line-based debugging where providing a range is difficult. | ||
213 | pub fn debug_where_text_equal(&self, file_id: FileId, snippet: &str) -> Vec<MatchDebugInfo> { | ||
214 | use ide_db::base_db::SourceDatabaseExt; | ||
215 | let file = self.sema.parse(file_id); | ||
216 | let mut res = Vec::new(); | ||
217 | let file_text = self.sema.db.file_text(file_id); | ||
218 | let mut remaining_text = file_text.as_str(); | ||
219 | let mut base = 0; | ||
220 | let len = snippet.len() as u32; | ||
221 | while let Some(offset) = remaining_text.find(snippet) { | ||
222 | let start = base + offset as u32; | ||
223 | let end = start + len; | ||
224 | self.output_debug_for_nodes_at_range( | ||
225 | file.syntax(), | ||
226 | FileRange { file_id, range: TextRange::new(start.into(), end.into()) }, | ||
227 | &None, | ||
228 | &mut res, | ||
229 | ); | ||
230 | remaining_text = &remaining_text[offset + snippet.len()..]; | ||
231 | base = end; | ||
232 | } | ||
233 | res | ||
234 | } | ||
235 | |||
236 | fn output_debug_for_nodes_at_range( | ||
237 | &self, | ||
238 | node: &SyntaxNode, | ||
239 | range: FileRange, | ||
240 | restrict_range: &Option<FileRange>, | ||
241 | out: &mut Vec<MatchDebugInfo>, | ||
242 | ) { | ||
243 | for node in node.children() { | ||
244 | let node_range = self.sema.original_range(&node); | ||
245 | if node_range.file_id != range.file_id || !node_range.range.contains_range(range.range) | ||
246 | { | ||
247 | continue; | ||
248 | } | ||
249 | if node_range.range == range.range { | ||
250 | for rule in &self.rules { | ||
251 | // For now we ignore rules that have a different kind than our node, otherwise | ||
252 | // we get lots of noise. If at some point we add support for restricting rules | ||
253 | // to a particular kind of thing (e.g. only match type references), then we can | ||
254 | // relax this. We special-case expressions, since function calls can match | ||
255 | // method calls. | ||
256 | if rule.pattern.node.kind() != node.kind() | ||
257 | && !(ast::Expr::can_cast(rule.pattern.node.kind()) | ||
258 | && ast::Expr::can_cast(node.kind())) | ||
259 | { | ||
260 | continue; | ||
261 | } | ||
262 | out.push(MatchDebugInfo { | ||
263 | matched: matching::get_match(true, rule, &node, restrict_range, &self.sema) | ||
264 | .map_err(|e| MatchFailureReason { | ||
265 | reason: e.reason.unwrap_or_else(|| { | ||
266 | "Match failed, but no reason was given".to_owned() | ||
267 | }), | ||
268 | }), | ||
269 | pattern: rule.pattern.node.clone(), | ||
270 | node: node.clone(), | ||
271 | }); | ||
272 | } | ||
273 | } else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) { | ||
274 | if let Some(expanded) = self.sema.expand(¯o_call) { | ||
275 | if let Some(tt) = macro_call.token_tree() { | ||
276 | self.output_debug_for_nodes_at_range( | ||
277 | &expanded, | ||
278 | range, | ||
279 | &Some(self.sema.original_range(tt.syntax())), | ||
280 | out, | ||
281 | ); | ||
282 | } | ||
283 | } | ||
284 | } | ||
285 | self.output_debug_for_nodes_at_range(&node, range, restrict_range, out); | ||
286 | } | ||
287 | } | ||
288 | } | ||
289 | |||
290 | pub struct MatchDebugInfo { | ||
291 | node: SyntaxNode, | ||
292 | /// Our search pattern parsed as an expression or item, etc | ||
293 | pattern: SyntaxNode, | ||
294 | matched: Result<Match, MatchFailureReason>, | ||
295 | } | ||
296 | |||
297 | impl std::fmt::Debug for MatchDebugInfo { | ||
298 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { | ||
299 | match &self.matched { | ||
300 | Ok(_) => writeln!(f, "Node matched")?, | ||
301 | Err(reason) => writeln!(f, "Node failed to match because: {}", reason.reason)?, | ||
302 | } | ||
303 | writeln!( | ||
304 | f, | ||
305 | "============ AST ===========\n\ | ||
306 | {:#?}", | ||
307 | self.node | ||
308 | )?; | ||
309 | writeln!(f, "========= PATTERN ==========")?; | ||
310 | writeln!(f, "{:#?}", self.pattern)?; | ||
311 | writeln!(f, "============================")?; | ||
312 | Ok(()) | ||
313 | } | ||
314 | } | ||
315 | |||
316 | impl SsrMatches { | ||
317 | /// Returns `self` with any nested matches removed and made into top-level matches. | ||
318 | pub fn flattened(self) -> SsrMatches { | ||
319 | let mut out = SsrMatches::default(); | ||
320 | self.flatten_into(&mut out); | ||
321 | out | ||
322 | } | ||
323 | |||
324 | fn flatten_into(self, out: &mut SsrMatches) { | ||
325 | for mut m in self.matches { | ||
326 | for p in m.placeholder_values.values_mut() { | ||
327 | std::mem::replace(&mut p.inner_matches, SsrMatches::default()).flatten_into(out); | ||
328 | } | ||
329 | out.matches.push(m); | ||
330 | } | ||
331 | } | ||
332 | } | ||
333 | |||
334 | impl Match { | ||
335 | pub fn matched_text(&self) -> String { | ||
336 | self.matched_node.text().to_string() | ||
337 | } | ||
338 | } | ||
339 | |||
340 | impl std::error::Error for SsrError {} | ||
341 | |||
342 | #[cfg(test)] | ||
343 | impl MatchDebugInfo { | ||
344 | pub(crate) fn match_failure_reason(&self) -> Option<&str> { | ||
345 | self.matched.as_ref().err().map(|r| r.reason.as_str()) | ||
346 | } | ||
347 | } | ||
diff --git a/crates/ide_ssr/src/matching.rs b/crates/ide_ssr/src/matching.rs new file mode 100644 index 000000000..df013bae9 --- /dev/null +++ b/crates/ide_ssr/src/matching.rs | |||
@@ -0,0 +1,819 @@ | |||
1 | //! This module is responsible for matching a search pattern against a node in the AST. In the | ||
2 | //! process of matching, placeholder values are recorded. | ||
3 | |||
4 | use crate::{ | ||
5 | parsing::{Constraint, NodeKind, Placeholder, Var}, | ||
6 | resolving::{ResolvedPattern, ResolvedRule, UfcsCallInfo}, | ||
7 | SsrMatches, | ||
8 | }; | ||
9 | use hir::Semantics; | ||
10 | use ide_db::base_db::FileRange; | ||
11 | use rustc_hash::FxHashMap; | ||
12 | use std::{cell::Cell, iter::Peekable}; | ||
13 | use syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken}; | ||
14 | use syntax::{ | ||
15 | ast::{AstNode, AstToken}, | ||
16 | SmolStr, | ||
17 | }; | ||
18 | use test_utils::mark; | ||
19 | |||
20 | // Creates a match error. If we're currently attempting to match some code that we thought we were | ||
21 | // going to match, as indicated by the --debug-snippet flag, then populate the reason field. | ||
22 | macro_rules! match_error { | ||
23 | ($e:expr) => {{ | ||
24 | MatchFailed { | ||
25 | reason: if recording_match_fail_reasons() { | ||
26 | Some(format!("{}", $e)) | ||
27 | } else { | ||
28 | None | ||
29 | } | ||
30 | } | ||
31 | }}; | ||
32 | ($fmt:expr, $($arg:tt)+) => {{ | ||
33 | MatchFailed { | ||
34 | reason: if recording_match_fail_reasons() { | ||
35 | Some(format!($fmt, $($arg)+)) | ||
36 | } else { | ||
37 | None | ||
38 | } | ||
39 | } | ||
40 | }}; | ||
41 | } | ||
42 | |||
43 | // Fails the current match attempt, recording the supplied reason if we're recording match fail reasons. | ||
44 | macro_rules! fail_match { | ||
45 | ($($args:tt)*) => {return Err(match_error!($($args)*))}; | ||
46 | } | ||
47 | |||
48 | /// Information about a match that was found. | ||
49 | #[derive(Debug)] | ||
50 | pub struct Match { | ||
51 | pub(crate) range: FileRange, | ||
52 | pub(crate) matched_node: SyntaxNode, | ||
53 | pub(crate) placeholder_values: FxHashMap<Var, PlaceholderMatch>, | ||
54 | pub(crate) ignored_comments: Vec<ast::Comment>, | ||
55 | pub(crate) rule_index: usize, | ||
56 | /// The depth of matched_node. | ||
57 | pub(crate) depth: usize, | ||
58 | // Each path in the template rendered for the module in which the match was found. | ||
59 | pub(crate) rendered_template_paths: FxHashMap<SyntaxNode, hir::ModPath>, | ||
60 | } | ||
61 | |||
62 | /// Information about a placeholder bound in a match. | ||
63 | #[derive(Debug)] | ||
64 | pub(crate) struct PlaceholderMatch { | ||
65 | /// The node that the placeholder matched to. If set, then we'll search for further matches | ||
66 | /// within this node. It isn't set when we match tokens within a macro call's token tree. | ||
67 | pub(crate) node: Option<SyntaxNode>, | ||
68 | pub(crate) range: FileRange, | ||
69 | /// More matches, found within `node`. | ||
70 | pub(crate) inner_matches: SsrMatches, | ||
71 | /// How many times the code that the placeholder matched needed to be dereferenced. Will only be | ||
72 | /// non-zero if the placeholder matched to the receiver of a method call. | ||
73 | pub(crate) autoderef_count: usize, | ||
74 | pub(crate) autoref_kind: ast::SelfParamKind, | ||
75 | } | ||
76 | |||
77 | #[derive(Debug)] | ||
78 | pub(crate) struct MatchFailureReason { | ||
79 | pub(crate) reason: String, | ||
80 | } | ||
81 | |||
82 | /// An "error" indicating that matching failed. Use the fail_match! macro to create and return this. | ||
83 | #[derive(Clone)] | ||
84 | pub(crate) struct MatchFailed { | ||
85 | /// The reason why we failed to match. Only present when debug_active true in call to | ||
86 | /// `get_match`. | ||
87 | pub(crate) reason: Option<String>, | ||
88 | } | ||
89 | |||
90 | /// Checks if `code` matches the search pattern found in `search_scope`, returning information about | ||
91 | /// the match, if it does. Since we only do matching in this module and searching is done by the | ||
92 | /// parent module, we don't populate nested matches. | ||
93 | pub(crate) fn get_match( | ||
94 | debug_active: bool, | ||
95 | rule: &ResolvedRule, | ||
96 | code: &SyntaxNode, | ||
97 | restrict_range: &Option<FileRange>, | ||
98 | sema: &Semantics<ide_db::RootDatabase>, | ||
99 | ) -> Result<Match, MatchFailed> { | ||
100 | record_match_fails_reasons_scope(debug_active, || { | ||
101 | Matcher::try_match(rule, code, restrict_range, sema) | ||
102 | }) | ||
103 | } | ||
104 | |||
105 | /// Checks if our search pattern matches a particular node of the AST. | ||
106 | struct Matcher<'db, 'sema> { | ||
107 | sema: &'sema Semantics<'db, ide_db::RootDatabase>, | ||
108 | /// If any placeholders come from anywhere outside of this range, then the match will be | ||
109 | /// rejected. | ||
110 | restrict_range: Option<FileRange>, | ||
111 | rule: &'sema ResolvedRule, | ||
112 | } | ||
113 | |||
114 | /// Which phase of matching we're currently performing. We do two phases because most attempted | ||
115 | /// matches will fail and it means we can defer more expensive checks to the second phase. | ||
116 | enum Phase<'a> { | ||
117 | /// On the first phase, we perform cheap checks. No state is mutated and nothing is recorded. | ||
118 | First, | ||
119 | /// On the second phase, we construct the `Match`. Things like what placeholders bind to is | ||
120 | /// recorded. | ||
121 | Second(&'a mut Match), | ||
122 | } | ||
123 | |||
124 | impl<'db, 'sema> Matcher<'db, 'sema> { | ||
125 | fn try_match( | ||
126 | rule: &ResolvedRule, | ||
127 | code: &SyntaxNode, | ||
128 | restrict_range: &Option<FileRange>, | ||
129 | sema: &'sema Semantics<'db, ide_db::RootDatabase>, | ||
130 | ) -> Result<Match, MatchFailed> { | ||
131 | let match_state = Matcher { sema, restrict_range: restrict_range.clone(), rule }; | ||
132 | // First pass at matching, where we check that node types and idents match. | ||
133 | match_state.attempt_match_node(&mut Phase::First, &rule.pattern.node, code)?; | ||
134 | match_state.validate_range(&sema.original_range(code))?; | ||
135 | let mut the_match = Match { | ||
136 | range: sema.original_range(code), | ||
137 | matched_node: code.clone(), | ||
138 | placeholder_values: FxHashMap::default(), | ||
139 | ignored_comments: Vec::new(), | ||
140 | rule_index: rule.index, | ||
141 | depth: 0, | ||
142 | rendered_template_paths: FxHashMap::default(), | ||
143 | }; | ||
144 | // Second matching pass, where we record placeholder matches, ignored comments and maybe do | ||
145 | // any other more expensive checks that we didn't want to do on the first pass. | ||
146 | match_state.attempt_match_node( | ||
147 | &mut Phase::Second(&mut the_match), | ||
148 | &rule.pattern.node, | ||
149 | code, | ||
150 | )?; | ||
151 | the_match.depth = sema.ancestors_with_macros(the_match.matched_node.clone()).count(); | ||
152 | if let Some(template) = &rule.template { | ||
153 | the_match.render_template_paths(template, sema)?; | ||
154 | } | ||
155 | Ok(the_match) | ||
156 | } | ||
157 | |||
158 | /// Checks that `range` is within the permitted range if any. This is applicable when we're | ||
159 | /// processing a macro expansion and we want to fail the match if we're working with a node that | ||
160 | /// didn't originate from the token tree of the macro call. | ||
161 | fn validate_range(&self, range: &FileRange) -> Result<(), MatchFailed> { | ||
162 | if let Some(restrict_range) = &self.restrict_range { | ||
163 | if restrict_range.file_id != range.file_id | ||
164 | || !restrict_range.range.contains_range(range.range) | ||
165 | { | ||
166 | fail_match!("Node originated from a macro"); | ||
167 | } | ||
168 | } | ||
169 | Ok(()) | ||
170 | } | ||
171 | |||
172 | fn attempt_match_node( | ||
173 | &self, | ||
174 | phase: &mut Phase, | ||
175 | pattern: &SyntaxNode, | ||
176 | code: &SyntaxNode, | ||
177 | ) -> Result<(), MatchFailed> { | ||
178 | // Handle placeholders. | ||
179 | if let Some(placeholder) = self.get_placeholder_for_node(pattern) { | ||
180 | for constraint in &placeholder.constraints { | ||
181 | self.check_constraint(constraint, code)?; | ||
182 | } | ||
183 | if let Phase::Second(matches_out) = phase { | ||
184 | let original_range = self.sema.original_range(code); | ||
185 | // We validated the range for the node when we started the match, so the placeholder | ||
186 | // probably can't fail range validation, but just to be safe... | ||
187 | self.validate_range(&original_range)?; | ||
188 | matches_out.placeholder_values.insert( | ||
189 | placeholder.ident.clone(), | ||
190 | PlaceholderMatch::new(Some(code), original_range), | ||
191 | ); | ||
192 | } | ||
193 | return Ok(()); | ||
194 | } | ||
195 | // We allow a UFCS call to match a method call, provided they resolve to the same function. | ||
196 | if let Some(pattern_ufcs) = self.rule.pattern.ufcs_function_calls.get(pattern) { | ||
197 | if let Some(code) = ast::MethodCallExpr::cast(code.clone()) { | ||
198 | return self.attempt_match_ufcs_to_method_call(phase, pattern_ufcs, &code); | ||
199 | } | ||
200 | if let Some(code) = ast::CallExpr::cast(code.clone()) { | ||
201 | return self.attempt_match_ufcs_to_ufcs(phase, pattern_ufcs, &code); | ||
202 | } | ||
203 | } | ||
204 | if pattern.kind() != code.kind() { | ||
205 | fail_match!( | ||
206 | "Pattern had `{}` ({:?}), code had `{}` ({:?})", | ||
207 | pattern.text(), | ||
208 | pattern.kind(), | ||
209 | code.text(), | ||
210 | code.kind() | ||
211 | ); | ||
212 | } | ||
213 | // Some kinds of nodes have special handling. For everything else, we fall back to default | ||
214 | // matching. | ||
215 | match code.kind() { | ||
216 | SyntaxKind::RECORD_EXPR_FIELD_LIST => { | ||
217 | self.attempt_match_record_field_list(phase, pattern, code) | ||
218 | } | ||
219 | SyntaxKind::TOKEN_TREE => self.attempt_match_token_tree(phase, pattern, code), | ||
220 | SyntaxKind::PATH => self.attempt_match_path(phase, pattern, code), | ||
221 | _ => self.attempt_match_node_children(phase, pattern, code), | ||
222 | } | ||
223 | } | ||
224 | |||
225 | fn attempt_match_node_children( | ||
226 | &self, | ||
227 | phase: &mut Phase, | ||
228 | pattern: &SyntaxNode, | ||
229 | code: &SyntaxNode, | ||
230 | ) -> Result<(), MatchFailed> { | ||
231 | self.attempt_match_sequences( | ||
232 | phase, | ||
233 | PatternIterator::new(pattern), | ||
234 | code.children_with_tokens(), | ||
235 | ) | ||
236 | } | ||
237 | |||
238 | fn attempt_match_sequences( | ||
239 | &self, | ||
240 | phase: &mut Phase, | ||
241 | pattern_it: PatternIterator, | ||
242 | mut code_it: SyntaxElementChildren, | ||
243 | ) -> Result<(), MatchFailed> { | ||
244 | let mut pattern_it = pattern_it.peekable(); | ||
245 | loop { | ||
246 | match phase.next_non_trivial(&mut code_it) { | ||
247 | None => { | ||
248 | if let Some(p) = pattern_it.next() { | ||
249 | fail_match!("Part of the pattern was unmatched: {:?}", p); | ||
250 | } | ||
251 | return Ok(()); | ||
252 | } | ||
253 | Some(SyntaxElement::Token(c)) => { | ||
254 | self.attempt_match_token(phase, &mut pattern_it, &c)?; | ||
255 | } | ||
256 | Some(SyntaxElement::Node(c)) => match pattern_it.next() { | ||
257 | Some(SyntaxElement::Node(p)) => { | ||
258 | self.attempt_match_node(phase, &p, &c)?; | ||
259 | } | ||
260 | Some(p) => fail_match!("Pattern wanted '{}', code has {}", p, c.text()), | ||
261 | None => fail_match!("Pattern reached end, code has {}", c.text()), | ||
262 | }, | ||
263 | } | ||
264 | } | ||
265 | } | ||
266 | |||
267 | fn attempt_match_token( | ||
268 | &self, | ||
269 | phase: &mut Phase, | ||
270 | pattern: &mut Peekable<PatternIterator>, | ||
271 | code: &syntax::SyntaxToken, | ||
272 | ) -> Result<(), MatchFailed> { | ||
273 | phase.record_ignored_comments(code); | ||
274 | // Ignore whitespace and comments. | ||
275 | if code.kind().is_trivia() { | ||
276 | return Ok(()); | ||
277 | } | ||
278 | if let Some(SyntaxElement::Token(p)) = pattern.peek() { | ||
279 | // If the code has a comma and the pattern is about to close something, then accept the | ||
280 | // comma without advancing the pattern. i.e. ignore trailing commas. | ||
281 | if code.kind() == SyntaxKind::COMMA && is_closing_token(p.kind()) { | ||
282 | return Ok(()); | ||
283 | } | ||
284 | // Conversely, if the pattern has a comma and the code doesn't, skip that part of the | ||
285 | // pattern and continue to match the code. | ||
286 | if p.kind() == SyntaxKind::COMMA && is_closing_token(code.kind()) { | ||
287 | pattern.next(); | ||
288 | } | ||
289 | } | ||
290 | // Consume an element from the pattern and make sure it matches. | ||
291 | match pattern.next() { | ||
292 | Some(SyntaxElement::Token(p)) => { | ||
293 | if p.kind() != code.kind() || p.text() != code.text() { | ||
294 | fail_match!( | ||
295 | "Pattern wanted token '{}' ({:?}), but code had token '{}' ({:?})", | ||
296 | p.text(), | ||
297 | p.kind(), | ||
298 | code.text(), | ||
299 | code.kind() | ||
300 | ) | ||
301 | } | ||
302 | } | ||
303 | Some(SyntaxElement::Node(p)) => { | ||
304 | // Not sure if this is actually reachable. | ||
305 | fail_match!( | ||
306 | "Pattern wanted {:?}, but code had token '{}' ({:?})", | ||
307 | p, | ||
308 | code.text(), | ||
309 | code.kind() | ||
310 | ); | ||
311 | } | ||
312 | None => { | ||
313 | fail_match!("Pattern exhausted, while code remains: `{}`", code.text()); | ||
314 | } | ||
315 | } | ||
316 | Ok(()) | ||
317 | } | ||
318 | |||
319 | fn check_constraint( | ||
320 | &self, | ||
321 | constraint: &Constraint, | ||
322 | code: &SyntaxNode, | ||
323 | ) -> Result<(), MatchFailed> { | ||
324 | match constraint { | ||
325 | Constraint::Kind(kind) => { | ||
326 | kind.matches(code)?; | ||
327 | } | ||
328 | Constraint::Not(sub) => { | ||
329 | if self.check_constraint(&*sub, code).is_ok() { | ||
330 | fail_match!("Constraint {:?} failed for '{}'", constraint, code.text()); | ||
331 | } | ||
332 | } | ||
333 | } | ||
334 | Ok(()) | ||
335 | } | ||
336 | |||
337 | /// Paths are matched based on whether they refer to the same thing, even if they're written | ||
338 | /// differently. | ||
339 | fn attempt_match_path( | ||
340 | &self, | ||
341 | phase: &mut Phase, | ||
342 | pattern: &SyntaxNode, | ||
343 | code: &SyntaxNode, | ||
344 | ) -> Result<(), MatchFailed> { | ||
345 | if let Some(pattern_resolved) = self.rule.pattern.resolved_paths.get(pattern) { | ||
346 | let pattern_path = ast::Path::cast(pattern.clone()).unwrap(); | ||
347 | let code_path = ast::Path::cast(code.clone()).unwrap(); | ||
348 | if let (Some(pattern_segment), Some(code_segment)) = | ||
349 | (pattern_path.segment(), code_path.segment()) | ||
350 | { | ||
351 | // Match everything within the segment except for the name-ref, which is handled | ||
352 | // separately via comparing what the path resolves to below. | ||
353 | self.attempt_match_opt( | ||
354 | phase, | ||
355 | pattern_segment.generic_arg_list(), | ||
356 | code_segment.generic_arg_list(), | ||
357 | )?; | ||
358 | self.attempt_match_opt( | ||
359 | phase, | ||
360 | pattern_segment.param_list(), | ||
361 | code_segment.param_list(), | ||
362 | )?; | ||
363 | } | ||
364 | if matches!(phase, Phase::Second(_)) { | ||
365 | let resolution = self | ||
366 | .sema | ||
367 | .resolve_path(&code_path) | ||
368 | .ok_or_else(|| match_error!("Failed to resolve path `{}`", code.text()))?; | ||
369 | if pattern_resolved.resolution != resolution { | ||
370 | fail_match!("Pattern had path `{}` code had `{}`", pattern.text(), code.text()); | ||
371 | } | ||
372 | } | ||
373 | } else { | ||
374 | return self.attempt_match_node_children(phase, pattern, code); | ||
375 | } | ||
376 | Ok(()) | ||
377 | } | ||
378 | |||
379 | fn attempt_match_opt<T: AstNode>( | ||
380 | &self, | ||
381 | phase: &mut Phase, | ||
382 | pattern: Option<T>, | ||
383 | code: Option<T>, | ||
384 | ) -> Result<(), MatchFailed> { | ||
385 | match (pattern, code) { | ||
386 | (Some(p), Some(c)) => self.attempt_match_node(phase, &p.syntax(), &c.syntax()), | ||
387 | (None, None) => Ok(()), | ||
388 | (Some(p), None) => fail_match!("Pattern `{}` had nothing to match", p.syntax().text()), | ||
389 | (None, Some(c)) => { | ||
390 | fail_match!("Nothing in pattern to match code `{}`", c.syntax().text()) | ||
391 | } | ||
392 | } | ||
393 | } | ||
394 | |||
395 | /// We want to allow the records to match in any order, so we have special matching logic for | ||
396 | /// them. | ||
397 | fn attempt_match_record_field_list( | ||
398 | &self, | ||
399 | phase: &mut Phase, | ||
400 | pattern: &SyntaxNode, | ||
401 | code: &SyntaxNode, | ||
402 | ) -> Result<(), MatchFailed> { | ||
403 | // Build a map keyed by field name. | ||
404 | let mut fields_by_name: FxHashMap<SmolStr, SyntaxNode> = FxHashMap::default(); | ||
405 | for child in code.children() { | ||
406 | if let Some(record) = ast::RecordExprField::cast(child.clone()) { | ||
407 | if let Some(name) = record.field_name() { | ||
408 | fields_by_name.insert(name.text().into(), child.clone()); | ||
409 | } | ||
410 | } | ||
411 | } | ||
412 | for p in pattern.children_with_tokens() { | ||
413 | if let SyntaxElement::Node(p) = p { | ||
414 | if let Some(name_element) = p.first_child_or_token() { | ||
415 | if self.get_placeholder(&name_element).is_some() { | ||
416 | // If the pattern is using placeholders for field names then order | ||
417 | // independence doesn't make sense. Fall back to regular ordered | ||
418 | // matching. | ||
419 | return self.attempt_match_node_children(phase, pattern, code); | ||
420 | } | ||
421 | if let Some(ident) = only_ident(name_element) { | ||
422 | let code_record = fields_by_name.remove(ident.text()).ok_or_else(|| { | ||
423 | match_error!( | ||
424 | "Placeholder has record field '{}', but code doesn't", | ||
425 | ident | ||
426 | ) | ||
427 | })?; | ||
428 | self.attempt_match_node(phase, &p, &code_record)?; | ||
429 | } | ||
430 | } | ||
431 | } | ||
432 | } | ||
433 | if let Some(unmatched_fields) = fields_by_name.keys().next() { | ||
434 | fail_match!( | ||
435 | "{} field(s) of a record literal failed to match, starting with {}", | ||
436 | fields_by_name.len(), | ||
437 | unmatched_fields | ||
438 | ); | ||
439 | } | ||
440 | Ok(()) | ||
441 | } | ||
442 | |||
443 | /// Outside of token trees, a placeholder can only match a single AST node, whereas in a token | ||
444 | /// tree it can match a sequence of tokens. Note, that this code will only be used when the | ||
445 | /// pattern matches the macro invocation. For matches within the macro call, we'll already have | ||
446 | /// expanded the macro. | ||
447 | fn attempt_match_token_tree( | ||
448 | &self, | ||
449 | phase: &mut Phase, | ||
450 | pattern: &SyntaxNode, | ||
451 | code: &syntax::SyntaxNode, | ||
452 | ) -> Result<(), MatchFailed> { | ||
453 | let mut pattern = PatternIterator::new(pattern).peekable(); | ||
454 | let mut children = code.children_with_tokens(); | ||
455 | while let Some(child) = children.next() { | ||
456 | if let Some(placeholder) = pattern.peek().and_then(|p| self.get_placeholder(p)) { | ||
457 | pattern.next(); | ||
458 | let next_pattern_token = pattern | ||
459 | .peek() | ||
460 | .and_then(|p| match p { | ||
461 | SyntaxElement::Token(t) => Some(t.clone()), | ||
462 | SyntaxElement::Node(n) => n.first_token(), | ||
463 | }) | ||
464 | .map(|p| p.text().to_string()); | ||
465 | let first_matched_token = child.clone(); | ||
466 | let mut last_matched_token = child; | ||
467 | // Read code tokens util we reach one equal to the next token from our pattern | ||
468 | // or we reach the end of the token tree. | ||
469 | while let Some(next) = children.next() { | ||
470 | match &next { | ||
471 | SyntaxElement::Token(t) => { | ||
472 | if Some(t.to_string()) == next_pattern_token { | ||
473 | pattern.next(); | ||
474 | break; | ||
475 | } | ||
476 | } | ||
477 | SyntaxElement::Node(n) => { | ||
478 | if let Some(first_token) = n.first_token() { | ||
479 | if Some(first_token.text()) == next_pattern_token.as_deref() { | ||
480 | if let Some(SyntaxElement::Node(p)) = pattern.next() { | ||
481 | // We have a subtree that starts with the next token in our pattern. | ||
482 | self.attempt_match_token_tree(phase, &p, &n)?; | ||
483 | break; | ||
484 | } | ||
485 | } | ||
486 | } | ||
487 | } | ||
488 | }; | ||
489 | last_matched_token = next; | ||
490 | } | ||
491 | if let Phase::Second(match_out) = phase { | ||
492 | match_out.placeholder_values.insert( | ||
493 | placeholder.ident.clone(), | ||
494 | PlaceholderMatch::from_range(FileRange { | ||
495 | file_id: self.sema.original_range(code).file_id, | ||
496 | range: first_matched_token | ||
497 | .text_range() | ||
498 | .cover(last_matched_token.text_range()), | ||
499 | }), | ||
500 | ); | ||
501 | } | ||
502 | continue; | ||
503 | } | ||
504 | // Match literal (non-placeholder) tokens. | ||
505 | match child { | ||
506 | SyntaxElement::Token(token) => { | ||
507 | self.attempt_match_token(phase, &mut pattern, &token)?; | ||
508 | } | ||
509 | SyntaxElement::Node(node) => match pattern.next() { | ||
510 | Some(SyntaxElement::Node(p)) => { | ||
511 | self.attempt_match_token_tree(phase, &p, &node)?; | ||
512 | } | ||
513 | Some(SyntaxElement::Token(p)) => fail_match!( | ||
514 | "Pattern has token '{}', code has subtree '{}'", | ||
515 | p.text(), | ||
516 | node.text() | ||
517 | ), | ||
518 | None => fail_match!("Pattern has nothing, code has '{}'", node.text()), | ||
519 | }, | ||
520 | } | ||
521 | } | ||
522 | if let Some(p) = pattern.next() { | ||
523 | fail_match!("Reached end of token tree in code, but pattern still has {:?}", p); | ||
524 | } | ||
525 | Ok(()) | ||
526 | } | ||
527 | |||
528 | fn attempt_match_ufcs_to_method_call( | ||
529 | &self, | ||
530 | phase: &mut Phase, | ||
531 | pattern_ufcs: &UfcsCallInfo, | ||
532 | code: &ast::MethodCallExpr, | ||
533 | ) -> Result<(), MatchFailed> { | ||
534 | use ast::ArgListOwner; | ||
535 | let code_resolved_function = self | ||
536 | .sema | ||
537 | .resolve_method_call(code) | ||
538 | .ok_or_else(|| match_error!("Failed to resolve method call"))?; | ||
539 | if pattern_ufcs.function != code_resolved_function { | ||
540 | fail_match!("Method call resolved to a different function"); | ||
541 | } | ||
542 | // Check arguments. | ||
543 | let mut pattern_args = pattern_ufcs | ||
544 | .call_expr | ||
545 | .arg_list() | ||
546 | .ok_or_else(|| match_error!("Pattern function call has no args"))? | ||
547 | .args(); | ||
548 | // If the function we're calling takes a self parameter, then we store additional | ||
549 | // information on the placeholder match about autoderef and autoref. This allows us to use | ||
550 | // the placeholder in a context where autoderef and autoref don't apply. | ||
551 | if code_resolved_function.self_param(self.sema.db).is_some() { | ||
552 | if let (Some(pattern_type), Some(expr)) = | ||
553 | (&pattern_ufcs.qualifier_type, &code.receiver()) | ||
554 | { | ||
555 | let deref_count = self.check_expr_type(pattern_type, expr)?; | ||
556 | let pattern_receiver = pattern_args.next(); | ||
557 | self.attempt_match_opt(phase, pattern_receiver.clone(), code.receiver())?; | ||
558 | if let Phase::Second(match_out) = phase { | ||
559 | if let Some(placeholder_value) = pattern_receiver | ||
560 | .and_then(|n| self.get_placeholder_for_node(n.syntax())) | ||
561 | .and_then(|placeholder| { | ||
562 | match_out.placeholder_values.get_mut(&placeholder.ident) | ||
563 | }) | ||
564 | { | ||
565 | placeholder_value.autoderef_count = deref_count; | ||
566 | placeholder_value.autoref_kind = self | ||
567 | .sema | ||
568 | .resolve_method_call_as_callable(code) | ||
569 | .and_then(|callable| callable.receiver_param(self.sema.db)) | ||
570 | .map(|self_param| self_param.kind()) | ||
571 | .unwrap_or(ast::SelfParamKind::Owned); | ||
572 | } | ||
573 | } | ||
574 | } | ||
575 | } else { | ||
576 | self.attempt_match_opt(phase, pattern_args.next(), code.receiver())?; | ||
577 | } | ||
578 | let mut code_args = | ||
579 | code.arg_list().ok_or_else(|| match_error!("Code method call has no args"))?.args(); | ||
580 | loop { | ||
581 | match (pattern_args.next(), code_args.next()) { | ||
582 | (None, None) => return Ok(()), | ||
583 | (p, c) => self.attempt_match_opt(phase, p, c)?, | ||
584 | } | ||
585 | } | ||
586 | } | ||
587 | |||
588 | fn attempt_match_ufcs_to_ufcs( | ||
589 | &self, | ||
590 | phase: &mut Phase, | ||
591 | pattern_ufcs: &UfcsCallInfo, | ||
592 | code: &ast::CallExpr, | ||
593 | ) -> Result<(), MatchFailed> { | ||
594 | use ast::ArgListOwner; | ||
595 | // Check that the first argument is the expected type. | ||
596 | if let (Some(pattern_type), Some(expr)) = ( | ||
597 | &pattern_ufcs.qualifier_type, | ||
598 | &code.arg_list().and_then(|code_args| code_args.args().next()), | ||
599 | ) { | ||
600 | self.check_expr_type(pattern_type, expr)?; | ||
601 | } | ||
602 | self.attempt_match_node_children(phase, pattern_ufcs.call_expr.syntax(), code.syntax()) | ||
603 | } | ||
604 | |||
605 | /// Verifies that `expr` matches `pattern_type`, possibly after dereferencing some number of | ||
606 | /// times. Returns the number of times it needed to be dereferenced. | ||
607 | fn check_expr_type( | ||
608 | &self, | ||
609 | pattern_type: &hir::Type, | ||
610 | expr: &ast::Expr, | ||
611 | ) -> Result<usize, MatchFailed> { | ||
612 | use hir::HirDisplay; | ||
613 | let code_type = self.sema.type_of_expr(&expr).ok_or_else(|| { | ||
614 | match_error!("Failed to get receiver type for `{}`", expr.syntax().text()) | ||
615 | })?; | ||
616 | // Temporary needed to make the borrow checker happy. | ||
617 | let res = code_type | ||
618 | .autoderef(self.sema.db) | ||
619 | .enumerate() | ||
620 | .find(|(_, deref_code_type)| pattern_type == deref_code_type) | ||
621 | .map(|(count, _)| count) | ||
622 | .ok_or_else(|| { | ||
623 | match_error!( | ||
624 | "Pattern type `{}` didn't match code type `{}`", | ||
625 | pattern_type.display(self.sema.db), | ||
626 | code_type.display(self.sema.db) | ||
627 | ) | ||
628 | }); | ||
629 | res | ||
630 | } | ||
631 | |||
632 | fn get_placeholder_for_node(&self, node: &SyntaxNode) -> Option<&Placeholder> { | ||
633 | self.get_placeholder(&SyntaxElement::Node(node.clone())) | ||
634 | } | ||
635 | |||
636 | fn get_placeholder(&self, element: &SyntaxElement) -> Option<&Placeholder> { | ||
637 | only_ident(element.clone()).and_then(|ident| self.rule.get_placeholder(&ident)) | ||
638 | } | ||
639 | } | ||
640 | |||
641 | impl Match { | ||
642 | fn render_template_paths( | ||
643 | &mut self, | ||
644 | template: &ResolvedPattern, | ||
645 | sema: &Semantics<ide_db::RootDatabase>, | ||
646 | ) -> Result<(), MatchFailed> { | ||
647 | let module = sema | ||
648 | .scope(&self.matched_node) | ||
649 | .module() | ||
650 | .ok_or_else(|| match_error!("Matched node isn't in a module"))?; | ||
651 | for (path, resolved_path) in &template.resolved_paths { | ||
652 | if let hir::PathResolution::Def(module_def) = resolved_path.resolution { | ||
653 | let mod_path = module.find_use_path(sema.db, module_def).ok_or_else(|| { | ||
654 | match_error!("Failed to render template path `{}` at match location") | ||
655 | })?; | ||
656 | self.rendered_template_paths.insert(path.clone(), mod_path); | ||
657 | } | ||
658 | } | ||
659 | Ok(()) | ||
660 | } | ||
661 | } | ||
662 | |||
663 | impl Phase<'_> { | ||
664 | fn next_non_trivial(&mut self, code_it: &mut SyntaxElementChildren) -> Option<SyntaxElement> { | ||
665 | loop { | ||
666 | let c = code_it.next(); | ||
667 | if let Some(SyntaxElement::Token(t)) = &c { | ||
668 | self.record_ignored_comments(t); | ||
669 | if t.kind().is_trivia() { | ||
670 | continue; | ||
671 | } | ||
672 | } | ||
673 | return c; | ||
674 | } | ||
675 | } | ||
676 | |||
677 | fn record_ignored_comments(&mut self, token: &SyntaxToken) { | ||
678 | if token.kind() == SyntaxKind::COMMENT { | ||
679 | if let Phase::Second(match_out) = self { | ||
680 | if let Some(comment) = ast::Comment::cast(token.clone()) { | ||
681 | match_out.ignored_comments.push(comment); | ||
682 | } | ||
683 | } | ||
684 | } | ||
685 | } | ||
686 | } | ||
687 | |||
688 | fn is_closing_token(kind: SyntaxKind) -> bool { | ||
689 | kind == SyntaxKind::R_PAREN || kind == SyntaxKind::R_CURLY || kind == SyntaxKind::R_BRACK | ||
690 | } | ||
691 | |||
692 | pub(crate) fn record_match_fails_reasons_scope<F, T>(debug_active: bool, f: F) -> T | ||
693 | where | ||
694 | F: Fn() -> T, | ||
695 | { | ||
696 | RECORDING_MATCH_FAIL_REASONS.with(|c| c.set(debug_active)); | ||
697 | let res = f(); | ||
698 | RECORDING_MATCH_FAIL_REASONS.with(|c| c.set(false)); | ||
699 | res | ||
700 | } | ||
701 | |||
702 | // For performance reasons, we don't want to record the reason why every match fails, only the bit | ||
703 | // of code that the user indicated they thought would match. We use a thread local to indicate when | ||
704 | // we are trying to match that bit of code. This saves us having to pass a boolean into all the bits | ||
705 | // of code that can make the decision to not match. | ||
706 | thread_local! { | ||
707 | pub static RECORDING_MATCH_FAIL_REASONS: Cell<bool> = Cell::new(false); | ||
708 | } | ||
709 | |||
710 | fn recording_match_fail_reasons() -> bool { | ||
711 | RECORDING_MATCH_FAIL_REASONS.with(|c| c.get()) | ||
712 | } | ||
713 | |||
714 | impl PlaceholderMatch { | ||
715 | fn new(node: Option<&SyntaxNode>, range: FileRange) -> Self { | ||
716 | Self { | ||
717 | node: node.cloned(), | ||
718 | range, | ||
719 | inner_matches: SsrMatches::default(), | ||
720 | autoderef_count: 0, | ||
721 | autoref_kind: ast::SelfParamKind::Owned, | ||
722 | } | ||
723 | } | ||
724 | |||
725 | fn from_range(range: FileRange) -> Self { | ||
726 | Self::new(None, range) | ||
727 | } | ||
728 | } | ||
729 | |||
730 | impl NodeKind { | ||
731 | fn matches(&self, node: &SyntaxNode) -> Result<(), MatchFailed> { | ||
732 | let ok = match self { | ||
733 | Self::Literal => { | ||
734 | mark::hit!(literal_constraint); | ||
735 | ast::Literal::can_cast(node.kind()) | ||
736 | } | ||
737 | }; | ||
738 | if !ok { | ||
739 | fail_match!("Code '{}' isn't of kind {:?}", node.text(), self); | ||
740 | } | ||
741 | Ok(()) | ||
742 | } | ||
743 | } | ||
744 | |||
745 | // If `node` contains nothing but an ident then return it, otherwise return None. | ||
746 | fn only_ident(element: SyntaxElement) -> Option<SyntaxToken> { | ||
747 | match element { | ||
748 | SyntaxElement::Token(t) => { | ||
749 | if t.kind() == SyntaxKind::IDENT { | ||
750 | return Some(t); | ||
751 | } | ||
752 | } | ||
753 | SyntaxElement::Node(n) => { | ||
754 | let mut children = n.children_with_tokens(); | ||
755 | if let (Some(only_child), None) = (children.next(), children.next()) { | ||
756 | return only_ident(only_child); | ||
757 | } | ||
758 | } | ||
759 | } | ||
760 | None | ||
761 | } | ||
762 | |||
763 | struct PatternIterator { | ||
764 | iter: SyntaxElementChildren, | ||
765 | } | ||
766 | |||
767 | impl Iterator for PatternIterator { | ||
768 | type Item = SyntaxElement; | ||
769 | |||
770 | fn next(&mut self) -> Option<SyntaxElement> { | ||
771 | while let Some(element) = self.iter.next() { | ||
772 | if !element.kind().is_trivia() { | ||
773 | return Some(element); | ||
774 | } | ||
775 | } | ||
776 | None | ||
777 | } | ||
778 | } | ||
779 | |||
780 | impl PatternIterator { | ||
781 | fn new(parent: &SyntaxNode) -> Self { | ||
782 | Self { iter: parent.children_with_tokens() } | ||
783 | } | ||
784 | } | ||
785 | |||
786 | #[cfg(test)] | ||
787 | mod tests { | ||
788 | use super::*; | ||
789 | use crate::{MatchFinder, SsrRule}; | ||
790 | |||
791 | #[test] | ||
792 | fn parse_match_replace() { | ||
793 | let rule: SsrRule = "foo($x) ==>> bar($x)".parse().unwrap(); | ||
794 | let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }"; | ||
795 | |||
796 | let (db, position, selections) = crate::tests::single_file(input); | ||
797 | let mut match_finder = MatchFinder::in_context(&db, position, selections); | ||
798 | match_finder.add_rule(rule).unwrap(); | ||
799 | let matches = match_finder.matches(); | ||
800 | assert_eq!(matches.matches.len(), 1); | ||
801 | assert_eq!(matches.matches[0].matched_node.text(), "foo(1+2)"); | ||
802 | assert_eq!(matches.matches[0].placeholder_values.len(), 1); | ||
803 | assert_eq!( | ||
804 | matches.matches[0].placeholder_values[&Var("x".to_string())] | ||
805 | .node | ||
806 | .as_ref() | ||
807 | .unwrap() | ||
808 | .text(), | ||
809 | "1+2" | ||
810 | ); | ||
811 | |||
812 | let edits = match_finder.edits(); | ||
813 | assert_eq!(edits.len(), 1); | ||
814 | let edit = &edits[&position.file_id]; | ||
815 | let mut after = input.to_string(); | ||
816 | edit.apply(&mut after); | ||
817 | assert_eq!(after, "fn foo() {} fn bar() {} fn main() { bar(1+2); }"); | ||
818 | } | ||
819 | } | ||
diff --git a/crates/ide_ssr/src/nester.rs b/crates/ide_ssr/src/nester.rs new file mode 100644 index 000000000..6ac355dfc --- /dev/null +++ b/crates/ide_ssr/src/nester.rs | |||
@@ -0,0 +1,94 @@ | |||
1 | //! Converts a flat collection of matches into a nested form suitable for replacement. When there | ||
2 | //! are multiple matches for a node, or that overlap, priority is given to the earlier rule. Nested | ||
3 | //! matches are only permitted if the inner match is contained entirely within a placeholder of an | ||
4 | //! outer match. | ||
5 | //! | ||
6 | //! For example, if our search pattern is `foo(foo($a))` and the code had `foo(foo(foo(foo(42))))`, | ||
7 | //! then we'll get 3 matches, however only the outermost and innermost matches can be accepted. The | ||
8 | //! middle match would take the second `foo` from the outer match. | ||
9 | |||
10 | use crate::{Match, SsrMatches}; | ||
11 | use rustc_hash::FxHashMap; | ||
12 | use syntax::SyntaxNode; | ||
13 | |||
14 | pub(crate) fn nest_and_remove_collisions( | ||
15 | mut matches: Vec<Match>, | ||
16 | sema: &hir::Semantics<ide_db::RootDatabase>, | ||
17 | ) -> SsrMatches { | ||
18 | // We sort the matches by depth then by rule index. Sorting by depth means that by the time we | ||
19 | // see a match, any parent matches or conflicting matches will have already been seen. Sorting | ||
20 | // by rule_index means that if there are two matches for the same node, the rule added first | ||
21 | // will take precedence. | ||
22 | matches.sort_by(|a, b| a.depth.cmp(&b.depth).then_with(|| a.rule_index.cmp(&b.rule_index))); | ||
23 | let mut collector = MatchCollector::default(); | ||
24 | for m in matches { | ||
25 | collector.add_match(m, sema); | ||
26 | } | ||
27 | collector.into() | ||
28 | } | ||
29 | |||
30 | #[derive(Default)] | ||
31 | struct MatchCollector { | ||
32 | matches_by_node: FxHashMap<SyntaxNode, Match>, | ||
33 | } | ||
34 | |||
35 | impl MatchCollector { | ||
36 | /// Attempts to add `m` to matches. If it conflicts with an existing match, it is discarded. If | ||
37 | /// it is entirely within the a placeholder of an existing match, then it is added as a child | ||
38 | /// match of the existing match. | ||
39 | fn add_match(&mut self, m: Match, sema: &hir::Semantics<ide_db::RootDatabase>) { | ||
40 | let matched_node = m.matched_node.clone(); | ||
41 | if let Some(existing) = self.matches_by_node.get_mut(&matched_node) { | ||
42 | try_add_sub_match(m, existing, sema); | ||
43 | return; | ||
44 | } | ||
45 | for ancestor in sema.ancestors_with_macros(m.matched_node.clone()) { | ||
46 | if let Some(existing) = self.matches_by_node.get_mut(&ancestor) { | ||
47 | try_add_sub_match(m, existing, sema); | ||
48 | return; | ||
49 | } | ||
50 | } | ||
51 | self.matches_by_node.insert(matched_node, m); | ||
52 | } | ||
53 | } | ||
54 | |||
55 | /// Attempts to add `m` as a sub-match of `existing`. | ||
56 | fn try_add_sub_match(m: Match, existing: &mut Match, sema: &hir::Semantics<ide_db::RootDatabase>) { | ||
57 | for p in existing.placeholder_values.values_mut() { | ||
58 | // Note, no need to check if p.range.file is equal to m.range.file, since we | ||
59 | // already know we're within `existing`. | ||
60 | if p.range.range.contains_range(m.range.range) { | ||
61 | // Convert the inner matches in `p` into a temporary MatchCollector. When | ||
62 | // we're done, we then convert it back into an SsrMatches. If we expected | ||
63 | // lots of inner matches, it might be worthwhile keeping a MatchCollector | ||
64 | // around for each placeholder match. However we expect most placeholder | ||
65 | // will have 0 and a few will have 1. More than that should hopefully be | ||
66 | // exceptional. | ||
67 | let mut collector = MatchCollector::default(); | ||
68 | for m in std::mem::replace(&mut p.inner_matches.matches, Vec::new()) { | ||
69 | collector.matches_by_node.insert(m.matched_node.clone(), m); | ||
70 | } | ||
71 | collector.add_match(m, sema); | ||
72 | p.inner_matches = collector.into(); | ||
73 | break; | ||
74 | } | ||
75 | } | ||
76 | } | ||
77 | |||
78 | impl From<MatchCollector> for SsrMatches { | ||
79 | fn from(mut match_collector: MatchCollector) -> Self { | ||
80 | let mut matches = SsrMatches::default(); | ||
81 | for (_, m) in match_collector.matches_by_node.drain() { | ||
82 | matches.matches.push(m); | ||
83 | } | ||
84 | matches.matches.sort_by(|a, b| { | ||
85 | // Order matches by file_id then by start range. This should be sufficient since ranges | ||
86 | // shouldn't be overlapping. | ||
87 | a.range | ||
88 | .file_id | ||
89 | .cmp(&b.range.file_id) | ||
90 | .then_with(|| a.range.range.start().cmp(&b.range.range.start())) | ||
91 | }); | ||
92 | matches | ||
93 | } | ||
94 | } | ||
diff --git a/crates/ide_ssr/src/parsing.rs b/crates/ide_ssr/src/parsing.rs new file mode 100644 index 000000000..3d5e4feb7 --- /dev/null +++ b/crates/ide_ssr/src/parsing.rs | |||
@@ -0,0 +1,414 @@ | |||
1 | //! This file contains code for parsing SSR rules, which look something like `foo($a) ==>> bar($b)`. | ||
2 | //! We first split everything before and after the separator `==>>`. Next, both the search pattern | ||
3 | //! and the replacement template get tokenized by the Rust tokenizer. Tokens are then searched for | ||
4 | //! placeholders, which start with `$`. For replacement templates, this is the final form. For | ||
5 | //! search patterns, we go further and parse the pattern as each kind of thing that we can match. | ||
6 | //! e.g. expressions, type references etc. | ||
7 | |||
8 | use crate::errors::bail; | ||
9 | use crate::{SsrError, SsrPattern, SsrRule}; | ||
10 | use rustc_hash::{FxHashMap, FxHashSet}; | ||
11 | use std::{fmt::Display, str::FromStr}; | ||
12 | use syntax::{ast, AstNode, SmolStr, SyntaxKind, SyntaxNode, T}; | ||
13 | use test_utils::mark; | ||
14 | |||
15 | #[derive(Debug)] | ||
16 | pub(crate) struct ParsedRule { | ||
17 | pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>, | ||
18 | pub(crate) pattern: SyntaxNode, | ||
19 | pub(crate) template: Option<SyntaxNode>, | ||
20 | } | ||
21 | |||
22 | #[derive(Debug)] | ||
23 | pub(crate) struct RawPattern { | ||
24 | tokens: Vec<PatternElement>, | ||
25 | } | ||
26 | |||
27 | // Part of a search or replace pattern. | ||
28 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
29 | pub(crate) enum PatternElement { | ||
30 | Token(Token), | ||
31 | Placeholder(Placeholder), | ||
32 | } | ||
33 | |||
34 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
35 | pub(crate) struct Placeholder { | ||
36 | /// The name of this placeholder. e.g. for "$a", this would be "a" | ||
37 | pub(crate) ident: Var, | ||
38 | /// A unique name used in place of this placeholder when we parse the pattern as Rust code. | ||
39 | stand_in_name: String, | ||
40 | pub(crate) constraints: Vec<Constraint>, | ||
41 | } | ||
42 | |||
43 | /// Represents a `$var` in an SSR query. | ||
44 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
45 | pub(crate) struct Var(pub(crate) String); | ||
46 | |||
47 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
48 | pub(crate) enum Constraint { | ||
49 | Kind(NodeKind), | ||
50 | Not(Box<Constraint>), | ||
51 | } | ||
52 | |||
53 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
54 | pub(crate) enum NodeKind { | ||
55 | Literal, | ||
56 | } | ||
57 | |||
58 | #[derive(Debug, Clone, PartialEq, Eq)] | ||
59 | pub(crate) struct Token { | ||
60 | kind: SyntaxKind, | ||
61 | pub(crate) text: SmolStr, | ||
62 | } | ||
63 | |||
64 | impl ParsedRule { | ||
65 | fn new( | ||
66 | pattern: &RawPattern, | ||
67 | template: Option<&RawPattern>, | ||
68 | ) -> Result<Vec<ParsedRule>, SsrError> { | ||
69 | let raw_pattern = pattern.as_rust_code(); | ||
70 | let raw_template = template.map(|t| t.as_rust_code()); | ||
71 | let raw_template = raw_template.as_ref().map(|s| s.as_str()); | ||
72 | let mut builder = RuleBuilder { | ||
73 | placeholders_by_stand_in: pattern.placeholders_by_stand_in(), | ||
74 | rules: Vec::new(), | ||
75 | }; | ||
76 | |||
77 | let raw_template_stmt = raw_template.map(ast::Stmt::parse); | ||
78 | if let raw_template_expr @ Some(Ok(_)) = raw_template.map(ast::Expr::parse) { | ||
79 | builder.try_add(ast::Expr::parse(&raw_pattern), raw_template_expr); | ||
80 | } else { | ||
81 | builder.try_add(ast::Expr::parse(&raw_pattern), raw_template_stmt.clone()); | ||
82 | } | ||
83 | builder.try_add(ast::Type::parse(&raw_pattern), raw_template.map(ast::Type::parse)); | ||
84 | builder.try_add(ast::Item::parse(&raw_pattern), raw_template.map(ast::Item::parse)); | ||
85 | builder.try_add(ast::Path::parse(&raw_pattern), raw_template.map(ast::Path::parse)); | ||
86 | builder.try_add(ast::Pat::parse(&raw_pattern), raw_template.map(ast::Pat::parse)); | ||
87 | builder.try_add(ast::Stmt::parse(&raw_pattern), raw_template_stmt); | ||
88 | builder.build() | ||
89 | } | ||
90 | } | ||
91 | |||
92 | struct RuleBuilder { | ||
93 | placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>, | ||
94 | rules: Vec<ParsedRule>, | ||
95 | } | ||
96 | |||
97 | impl RuleBuilder { | ||
98 | fn try_add<T: AstNode, T2: AstNode>( | ||
99 | &mut self, | ||
100 | pattern: Result<T, ()>, | ||
101 | template: Option<Result<T2, ()>>, | ||
102 | ) { | ||
103 | match (pattern, template) { | ||
104 | (Ok(pattern), Some(Ok(template))) => self.rules.push(ParsedRule { | ||
105 | placeholders_by_stand_in: self.placeholders_by_stand_in.clone(), | ||
106 | pattern: pattern.syntax().clone(), | ||
107 | template: Some(template.syntax().clone()), | ||
108 | }), | ||
109 | (Ok(pattern), None) => self.rules.push(ParsedRule { | ||
110 | placeholders_by_stand_in: self.placeholders_by_stand_in.clone(), | ||
111 | pattern: pattern.syntax().clone(), | ||
112 | template: None, | ||
113 | }), | ||
114 | _ => {} | ||
115 | } | ||
116 | } | ||
117 | |||
118 | fn build(mut self) -> Result<Vec<ParsedRule>, SsrError> { | ||
119 | if self.rules.is_empty() { | ||
120 | bail!("Not a valid Rust expression, type, item, path or pattern"); | ||
121 | } | ||
122 | // If any rules contain paths, then we reject any rules that don't contain paths. Allowing a | ||
123 | // mix leads to strange semantics, since the path-based rules only match things where the | ||
124 | // path refers to semantically the same thing, whereas the non-path-based rules could match | ||
125 | // anything. Specifically, if we have a rule like `foo ==>> bar` we only want to match the | ||
126 | // `foo` that is in the current scope, not any `foo`. However "foo" can be parsed as a | ||
127 | // pattern (IDENT_PAT -> NAME -> IDENT). Allowing such a rule through would result in | ||
128 | // renaming everything called `foo` to `bar`. It'd also be slow, since without a path, we'd | ||
129 | // have to use the slow-scan search mechanism. | ||
130 | if self.rules.iter().any(|rule| contains_path(&rule.pattern)) { | ||
131 | let old_len = self.rules.len(); | ||
132 | self.rules.retain(|rule| contains_path(&rule.pattern)); | ||
133 | if self.rules.len() < old_len { | ||
134 | mark::hit!(pattern_is_a_single_segment_path); | ||
135 | } | ||
136 | } | ||
137 | Ok(self.rules) | ||
138 | } | ||
139 | } | ||
140 | |||
141 | /// Returns whether there are any paths in `node`. | ||
142 | fn contains_path(node: &SyntaxNode) -> bool { | ||
143 | node.kind() == SyntaxKind::PATH | ||
144 | || node.descendants().any(|node| node.kind() == SyntaxKind::PATH) | ||
145 | } | ||
146 | |||
147 | impl FromStr for SsrRule { | ||
148 | type Err = SsrError; | ||
149 | |||
150 | fn from_str(query: &str) -> Result<SsrRule, SsrError> { | ||
151 | let mut it = query.split("==>>"); | ||
152 | let pattern = it.next().expect("at least empty string").trim(); | ||
153 | let template = it | ||
154 | .next() | ||
155 | .ok_or_else(|| SsrError("Cannot find delimiter `==>>`".into()))? | ||
156 | .trim() | ||
157 | .to_string(); | ||
158 | if it.next().is_some() { | ||
159 | return Err(SsrError("More than one delimiter found".into())); | ||
160 | } | ||
161 | let raw_pattern = pattern.parse()?; | ||
162 | let raw_template = template.parse()?; | ||
163 | let parsed_rules = ParsedRule::new(&raw_pattern, Some(&raw_template))?; | ||
164 | let rule = SsrRule { pattern: raw_pattern, template: raw_template, parsed_rules }; | ||
165 | validate_rule(&rule)?; | ||
166 | Ok(rule) | ||
167 | } | ||
168 | } | ||
169 | |||
170 | impl FromStr for RawPattern { | ||
171 | type Err = SsrError; | ||
172 | |||
173 | fn from_str(pattern_str: &str) -> Result<RawPattern, SsrError> { | ||
174 | Ok(RawPattern { tokens: parse_pattern(pattern_str)? }) | ||
175 | } | ||
176 | } | ||
177 | |||
178 | impl RawPattern { | ||
179 | /// Returns this search pattern as Rust source code that we can feed to the Rust parser. | ||
180 | fn as_rust_code(&self) -> String { | ||
181 | let mut res = String::new(); | ||
182 | for t in &self.tokens { | ||
183 | res.push_str(match t { | ||
184 | PatternElement::Token(token) => token.text.as_str(), | ||
185 | PatternElement::Placeholder(placeholder) => placeholder.stand_in_name.as_str(), | ||
186 | }); | ||
187 | } | ||
188 | res | ||
189 | } | ||
190 | |||
191 | pub(crate) fn placeholders_by_stand_in(&self) -> FxHashMap<SmolStr, Placeholder> { | ||
192 | let mut res = FxHashMap::default(); | ||
193 | for t in &self.tokens { | ||
194 | if let PatternElement::Placeholder(placeholder) = t { | ||
195 | res.insert(SmolStr::new(placeholder.stand_in_name.clone()), placeholder.clone()); | ||
196 | } | ||
197 | } | ||
198 | res | ||
199 | } | ||
200 | } | ||
201 | |||
202 | impl FromStr for SsrPattern { | ||
203 | type Err = SsrError; | ||
204 | |||
205 | fn from_str(pattern_str: &str) -> Result<SsrPattern, SsrError> { | ||
206 | let raw_pattern = pattern_str.parse()?; | ||
207 | let parsed_rules = ParsedRule::new(&raw_pattern, None)?; | ||
208 | Ok(SsrPattern { raw: raw_pattern, parsed_rules }) | ||
209 | } | ||
210 | } | ||
211 | |||
212 | /// Returns `pattern_str`, parsed as a search or replace pattern. If `remove_whitespace` is true, | ||
213 | /// then any whitespace tokens will be removed, which we do for the search pattern, but not for the | ||
214 | /// replace pattern. | ||
215 | fn parse_pattern(pattern_str: &str) -> Result<Vec<PatternElement>, SsrError> { | ||
216 | let mut res = Vec::new(); | ||
217 | let mut placeholder_names = FxHashSet::default(); | ||
218 | let mut tokens = tokenize(pattern_str)?.into_iter(); | ||
219 | while let Some(token) = tokens.next() { | ||
220 | if token.kind == T![$] { | ||
221 | let placeholder = parse_placeholder(&mut tokens)?; | ||
222 | if !placeholder_names.insert(placeholder.ident.clone()) { | ||
223 | bail!("Placeholder `{}` repeats more than once", placeholder.ident); | ||
224 | } | ||
225 | res.push(PatternElement::Placeholder(placeholder)); | ||
226 | } else { | ||
227 | res.push(PatternElement::Token(token)); | ||
228 | } | ||
229 | } | ||
230 | Ok(res) | ||
231 | } | ||
232 | |||
233 | /// Checks for errors in a rule. e.g. the replace pattern referencing placeholders that the search | ||
234 | /// pattern didn't define. | ||
235 | fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> { | ||
236 | let mut defined_placeholders = FxHashSet::default(); | ||
237 | for p in &rule.pattern.tokens { | ||
238 | if let PatternElement::Placeholder(placeholder) = p { | ||
239 | defined_placeholders.insert(&placeholder.ident); | ||
240 | } | ||
241 | } | ||
242 | let mut undefined = Vec::new(); | ||
243 | for p in &rule.template.tokens { | ||
244 | if let PatternElement::Placeholder(placeholder) = p { | ||
245 | if !defined_placeholders.contains(&placeholder.ident) { | ||
246 | undefined.push(placeholder.ident.to_string()); | ||
247 | } | ||
248 | if !placeholder.constraints.is_empty() { | ||
249 | bail!("Replacement placeholders cannot have constraints"); | ||
250 | } | ||
251 | } | ||
252 | } | ||
253 | if !undefined.is_empty() { | ||
254 | bail!("Replacement contains undefined placeholders: {}", undefined.join(", ")); | ||
255 | } | ||
256 | Ok(()) | ||
257 | } | ||
258 | |||
259 | fn tokenize(source: &str) -> Result<Vec<Token>, SsrError> { | ||
260 | let mut start = 0; | ||
261 | let (raw_tokens, errors) = syntax::tokenize(source); | ||
262 | if let Some(first_error) = errors.first() { | ||
263 | bail!("Failed to parse pattern: {}", first_error); | ||
264 | } | ||
265 | let mut tokens: Vec<Token> = Vec::new(); | ||
266 | for raw_token in raw_tokens { | ||
267 | let token_len = usize::from(raw_token.len); | ||
268 | tokens.push(Token { | ||
269 | kind: raw_token.kind, | ||
270 | text: SmolStr::new(&source[start..start + token_len]), | ||
271 | }); | ||
272 | start += token_len; | ||
273 | } | ||
274 | Ok(tokens) | ||
275 | } | ||
276 | |||
277 | fn parse_placeholder(tokens: &mut std::vec::IntoIter<Token>) -> Result<Placeholder, SsrError> { | ||
278 | let mut name = None; | ||
279 | let mut constraints = Vec::new(); | ||
280 | if let Some(token) = tokens.next() { | ||
281 | match token.kind { | ||
282 | SyntaxKind::IDENT => { | ||
283 | name = Some(token.text); | ||
284 | } | ||
285 | T!['{'] => { | ||
286 | let token = | ||
287 | tokens.next().ok_or_else(|| SsrError::new("Unexpected end of placeholder"))?; | ||
288 | if token.kind == SyntaxKind::IDENT { | ||
289 | name = Some(token.text); | ||
290 | } | ||
291 | loop { | ||
292 | let token = tokens | ||
293 | .next() | ||
294 | .ok_or_else(|| SsrError::new("Placeholder is missing closing brace '}'"))?; | ||
295 | match token.kind { | ||
296 | T![:] => { | ||
297 | constraints.push(parse_constraint(tokens)?); | ||
298 | } | ||
299 | T!['}'] => break, | ||
300 | _ => bail!("Unexpected token while parsing placeholder: '{}'", token.text), | ||
301 | } | ||
302 | } | ||
303 | } | ||
304 | _ => { | ||
305 | bail!("Placeholders should either be $name or ${{name:constraints}}"); | ||
306 | } | ||
307 | } | ||
308 | } | ||
309 | let name = name.ok_or_else(|| SsrError::new("Placeholder ($) with no name"))?; | ||
310 | Ok(Placeholder::new(name, constraints)) | ||
311 | } | ||
312 | |||
313 | fn parse_constraint(tokens: &mut std::vec::IntoIter<Token>) -> Result<Constraint, SsrError> { | ||
314 | let constraint_type = tokens | ||
315 | .next() | ||
316 | .ok_or_else(|| SsrError::new("Found end of placeholder while looking for a constraint"))? | ||
317 | .text | ||
318 | .to_string(); | ||
319 | match constraint_type.as_str() { | ||
320 | "kind" => { | ||
321 | expect_token(tokens, "(")?; | ||
322 | let t = tokens.next().ok_or_else(|| { | ||
323 | SsrError::new("Unexpected end of constraint while looking for kind") | ||
324 | })?; | ||
325 | if t.kind != SyntaxKind::IDENT { | ||
326 | bail!("Expected ident, found {:?} while parsing kind constraint", t.kind); | ||
327 | } | ||
328 | expect_token(tokens, ")")?; | ||
329 | Ok(Constraint::Kind(NodeKind::from(&t.text)?)) | ||
330 | } | ||
331 | "not" => { | ||
332 | expect_token(tokens, "(")?; | ||
333 | let sub = parse_constraint(tokens)?; | ||
334 | expect_token(tokens, ")")?; | ||
335 | Ok(Constraint::Not(Box::new(sub))) | ||
336 | } | ||
337 | x => bail!("Unsupported constraint type '{}'", x), | ||
338 | } | ||
339 | } | ||
340 | |||
341 | fn expect_token(tokens: &mut std::vec::IntoIter<Token>, expected: &str) -> Result<(), SsrError> { | ||
342 | if let Some(t) = tokens.next() { | ||
343 | if t.text == expected { | ||
344 | return Ok(()); | ||
345 | } | ||
346 | bail!("Expected {} found {}", expected, t.text); | ||
347 | } | ||
348 | bail!("Expected {} found end of stream", expected); | ||
349 | } | ||
350 | |||
351 | impl NodeKind { | ||
352 | fn from(name: &SmolStr) -> Result<NodeKind, SsrError> { | ||
353 | Ok(match name.as_str() { | ||
354 | "literal" => NodeKind::Literal, | ||
355 | _ => bail!("Unknown node kind '{}'", name), | ||
356 | }) | ||
357 | } | ||
358 | } | ||
359 | |||
360 | impl Placeholder { | ||
361 | fn new(name: SmolStr, constraints: Vec<Constraint>) -> Self { | ||
362 | Self { | ||
363 | stand_in_name: format!("__placeholder_{}", name), | ||
364 | constraints, | ||
365 | ident: Var(name.to_string()), | ||
366 | } | ||
367 | } | ||
368 | } | ||
369 | |||
370 | impl Display for Var { | ||
371 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { | ||
372 | write!(f, "${}", self.0) | ||
373 | } | ||
374 | } | ||
375 | |||
376 | #[cfg(test)] | ||
377 | mod tests { | ||
378 | use super::*; | ||
379 | |||
380 | #[test] | ||
381 | fn parser_happy_case() { | ||
382 | fn token(kind: SyntaxKind, text: &str) -> PatternElement { | ||
383 | PatternElement::Token(Token { kind, text: SmolStr::new(text) }) | ||
384 | } | ||
385 | fn placeholder(name: &str) -> PatternElement { | ||
386 | PatternElement::Placeholder(Placeholder::new(SmolStr::new(name), Vec::new())) | ||
387 | } | ||
388 | let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap(); | ||
389 | assert_eq!( | ||
390 | result.pattern.tokens, | ||
391 | vec![ | ||
392 | token(SyntaxKind::IDENT, "foo"), | ||
393 | token(T!['('], "("), | ||
394 | placeholder("a"), | ||
395 | token(T![,], ","), | ||
396 | token(SyntaxKind::WHITESPACE, " "), | ||
397 | placeholder("b"), | ||
398 | token(T![')'], ")"), | ||
399 | ] | ||
400 | ); | ||
401 | assert_eq!( | ||
402 | result.template.tokens, | ||
403 | vec![ | ||
404 | token(SyntaxKind::IDENT, "bar"), | ||
405 | token(T!['('], "("), | ||
406 | placeholder("b"), | ||
407 | token(T![,], ","), | ||
408 | token(SyntaxKind::WHITESPACE, " "), | ||
409 | placeholder("a"), | ||
410 | token(T![')'], ")"), | ||
411 | ] | ||
412 | ); | ||
413 | } | ||
414 | } | ||
diff --git a/crates/ide_ssr/src/replacing.rs b/crates/ide_ssr/src/replacing.rs new file mode 100644 index 000000000..06a94a46c --- /dev/null +++ b/crates/ide_ssr/src/replacing.rs | |||
@@ -0,0 +1,237 @@ | |||
1 | //! Code for applying replacement templates for matches that have previously been found. | ||
2 | |||
3 | use crate::{resolving::ResolvedRule, Match, SsrMatches}; | ||
4 | use itertools::Itertools; | ||
5 | use rustc_hash::{FxHashMap, FxHashSet}; | ||
6 | use syntax::ast::{self, AstNode, AstToken}; | ||
7 | use syntax::{SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize}; | ||
8 | use test_utils::mark; | ||
9 | use text_edit::TextEdit; | ||
10 | |||
11 | /// Returns a text edit that will replace each match in `matches` with its corresponding replacement | ||
12 | /// template. Placeholders in the template will have been substituted with whatever they matched to | ||
13 | /// in the original code. | ||
14 | pub(crate) fn matches_to_edit( | ||
15 | matches: &SsrMatches, | ||
16 | file_src: &str, | ||
17 | rules: &[ResolvedRule], | ||
18 | ) -> TextEdit { | ||
19 | matches_to_edit_at_offset(matches, file_src, 0.into(), rules) | ||
20 | } | ||
21 | |||
22 | fn matches_to_edit_at_offset( | ||
23 | matches: &SsrMatches, | ||
24 | file_src: &str, | ||
25 | relative_start: TextSize, | ||
26 | rules: &[ResolvedRule], | ||
27 | ) -> TextEdit { | ||
28 | let mut edit_builder = TextEdit::builder(); | ||
29 | for m in &matches.matches { | ||
30 | edit_builder.replace( | ||
31 | m.range.range.checked_sub(relative_start).unwrap(), | ||
32 | render_replace(m, file_src, rules), | ||
33 | ); | ||
34 | } | ||
35 | edit_builder.finish() | ||
36 | } | ||
37 | |||
38 | struct ReplacementRenderer<'a> { | ||
39 | match_info: &'a Match, | ||
40 | file_src: &'a str, | ||
41 | rules: &'a [ResolvedRule], | ||
42 | rule: &'a ResolvedRule, | ||
43 | out: String, | ||
44 | // Map from a range within `out` to a token in `template` that represents a placeholder. This is | ||
45 | // used to validate that the generated source code doesn't split any placeholder expansions (see | ||
46 | // below). | ||
47 | placeholder_tokens_by_range: FxHashMap<TextRange, SyntaxToken>, | ||
48 | // Which placeholder tokens need to be wrapped in parenthesis in order to ensure that when `out` | ||
49 | // is parsed, placeholders don't get split. e.g. if a template of `$a.to_string()` results in `1 | ||
50 | // + 2.to_string()` then the placeholder value `1 + 2` was split and needs parenthesis. | ||
51 | placeholder_tokens_requiring_parenthesis: FxHashSet<SyntaxToken>, | ||
52 | } | ||
53 | |||
54 | fn render_replace(match_info: &Match, file_src: &str, rules: &[ResolvedRule]) -> String { | ||
55 | let rule = &rules[match_info.rule_index]; | ||
56 | let template = rule | ||
57 | .template | ||
58 | .as_ref() | ||
59 | .expect("You called MatchFinder::edits after calling MatchFinder::add_search_pattern"); | ||
60 | let mut renderer = ReplacementRenderer { | ||
61 | match_info, | ||
62 | file_src, | ||
63 | rules, | ||
64 | rule, | ||
65 | out: String::new(), | ||
66 | placeholder_tokens_requiring_parenthesis: FxHashSet::default(), | ||
67 | placeholder_tokens_by_range: FxHashMap::default(), | ||
68 | }; | ||
69 | renderer.render_node(&template.node); | ||
70 | renderer.maybe_rerender_with_extra_parenthesis(&template.node); | ||
71 | for comment in &match_info.ignored_comments { | ||
72 | renderer.out.push_str(&comment.syntax().to_string()); | ||
73 | } | ||
74 | renderer.out | ||
75 | } | ||
76 | |||
77 | impl ReplacementRenderer<'_> { | ||
78 | fn render_node_children(&mut self, node: &SyntaxNode) { | ||
79 | for node_or_token in node.children_with_tokens() { | ||
80 | self.render_node_or_token(&node_or_token); | ||
81 | } | ||
82 | } | ||
83 | |||
84 | fn render_node_or_token(&mut self, node_or_token: &SyntaxElement) { | ||
85 | match node_or_token { | ||
86 | SyntaxElement::Token(token) => { | ||
87 | self.render_token(&token); | ||
88 | } | ||
89 | SyntaxElement::Node(child_node) => { | ||
90 | self.render_node(&child_node); | ||
91 | } | ||
92 | } | ||
93 | } | ||
94 | |||
95 | fn render_node(&mut self, node: &SyntaxNode) { | ||
96 | if let Some(mod_path) = self.match_info.rendered_template_paths.get(&node) { | ||
97 | self.out.push_str(&mod_path.to_string()); | ||
98 | // Emit everything except for the segment's name-ref, since we already effectively | ||
99 | // emitted that as part of `mod_path`. | ||
100 | if let Some(path) = ast::Path::cast(node.clone()) { | ||
101 | if let Some(segment) = path.segment() { | ||
102 | for node_or_token in segment.syntax().children_with_tokens() { | ||
103 | if node_or_token.kind() != SyntaxKind::NAME_REF { | ||
104 | self.render_node_or_token(&node_or_token); | ||
105 | } | ||
106 | } | ||
107 | } | ||
108 | } | ||
109 | } else { | ||
110 | self.render_node_children(&node); | ||
111 | } | ||
112 | } | ||
113 | |||
114 | fn render_token(&mut self, token: &SyntaxToken) { | ||
115 | if let Some(placeholder) = self.rule.get_placeholder(&token) { | ||
116 | if let Some(placeholder_value) = | ||
117 | self.match_info.placeholder_values.get(&placeholder.ident) | ||
118 | { | ||
119 | let range = &placeholder_value.range.range; | ||
120 | let mut matched_text = | ||
121 | self.file_src[usize::from(range.start())..usize::from(range.end())].to_owned(); | ||
122 | // If a method call is performed directly on the placeholder, then autoderef and | ||
123 | // autoref will apply, so we can just substitute whatever the placeholder matched to | ||
124 | // directly. If we're not applying a method call, then we need to add explicitly | ||
125 | // deref and ref in order to match whatever was being done implicitly at the match | ||
126 | // site. | ||
127 | if !token_is_method_call_receiver(token) | ||
128 | && (placeholder_value.autoderef_count > 0 | ||
129 | || placeholder_value.autoref_kind != ast::SelfParamKind::Owned) | ||
130 | { | ||
131 | mark::hit!(replace_autoref_autoderef_capture); | ||
132 | let ref_kind = match placeholder_value.autoref_kind { | ||
133 | ast::SelfParamKind::Owned => "", | ||
134 | ast::SelfParamKind::Ref => "&", | ||
135 | ast::SelfParamKind::MutRef => "&mut ", | ||
136 | }; | ||
137 | matched_text = format!( | ||
138 | "{}{}{}", | ||
139 | ref_kind, | ||
140 | "*".repeat(placeholder_value.autoderef_count), | ||
141 | matched_text | ||
142 | ); | ||
143 | } | ||
144 | let edit = matches_to_edit_at_offset( | ||
145 | &placeholder_value.inner_matches, | ||
146 | self.file_src, | ||
147 | range.start(), | ||
148 | self.rules, | ||
149 | ); | ||
150 | let needs_parenthesis = | ||
151 | self.placeholder_tokens_requiring_parenthesis.contains(token); | ||
152 | edit.apply(&mut matched_text); | ||
153 | if needs_parenthesis { | ||
154 | self.out.push('('); | ||
155 | } | ||
156 | self.placeholder_tokens_by_range.insert( | ||
157 | TextRange::new( | ||
158 | TextSize::of(&self.out), | ||
159 | TextSize::of(&self.out) + TextSize::of(&matched_text), | ||
160 | ), | ||
161 | token.clone(), | ||
162 | ); | ||
163 | self.out.push_str(&matched_text); | ||
164 | if needs_parenthesis { | ||
165 | self.out.push(')'); | ||
166 | } | ||
167 | } else { | ||
168 | // We validated that all placeholder references were valid before we | ||
169 | // started, so this shouldn't happen. | ||
170 | panic!( | ||
171 | "Internal error: replacement referenced unknown placeholder {}", | ||
172 | placeholder.ident | ||
173 | ); | ||
174 | } | ||
175 | } else { | ||
176 | self.out.push_str(token.text()); | ||
177 | } | ||
178 | } | ||
179 | |||
180 | // Checks if the resulting code, when parsed doesn't split any placeholders due to different | ||
181 | // order of operations between the search pattern and the replacement template. If any do, then | ||
182 | // we rerender the template and wrap the problematic placeholders with parenthesis. | ||
183 | fn maybe_rerender_with_extra_parenthesis(&mut self, template: &SyntaxNode) { | ||
184 | if let Some(node) = parse_as_kind(&self.out, template.kind()) { | ||
185 | self.remove_node_ranges(node); | ||
186 | if self.placeholder_tokens_by_range.is_empty() { | ||
187 | return; | ||
188 | } | ||
189 | self.placeholder_tokens_requiring_parenthesis = | ||
190 | self.placeholder_tokens_by_range.values().cloned().collect(); | ||
191 | self.out.clear(); | ||
192 | self.render_node(template); | ||
193 | } | ||
194 | } | ||
195 | |||
196 | fn remove_node_ranges(&mut self, node: SyntaxNode) { | ||
197 | self.placeholder_tokens_by_range.remove(&node.text_range()); | ||
198 | for child in node.children() { | ||
199 | self.remove_node_ranges(child); | ||
200 | } | ||
201 | } | ||
202 | } | ||
203 | |||
204 | /// Returns whether token is the receiver of a method call. Note, being within the receiver of a | ||
205 | /// method call doesn't count. e.g. if the token is `$a`, then `$a.foo()` will return true, while | ||
206 | /// `($a + $b).foo()` or `x.foo($a)` will return false. | ||
207 | fn token_is_method_call_receiver(token: &SyntaxToken) -> bool { | ||
208 | // Find the first method call among the ancestors of `token`, then check if the only token | ||
209 | // within the receiver is `token`. | ||
210 | if let Some(receiver) = | ||
211 | token.ancestors().find_map(ast::MethodCallExpr::cast).and_then(|call| call.receiver()) | ||
212 | { | ||
213 | let tokens = receiver.syntax().descendants_with_tokens().filter_map(|node_or_token| { | ||
214 | match node_or_token { | ||
215 | SyntaxElement::Token(t) => Some(t), | ||
216 | _ => None, | ||
217 | } | ||
218 | }); | ||
219 | if let Some((only_token,)) = tokens.collect_tuple() { | ||
220 | return only_token == *token; | ||
221 | } | ||
222 | } | ||
223 | false | ||
224 | } | ||
225 | |||
226 | fn parse_as_kind(code: &str, kind: SyntaxKind) -> Option<SyntaxNode> { | ||
227 | if ast::Expr::can_cast(kind) { | ||
228 | if let Ok(expr) = ast::Expr::parse(code) { | ||
229 | return Some(expr.syntax().clone()); | ||
230 | } | ||
231 | } else if ast::Item::can_cast(kind) { | ||
232 | if let Ok(item) = ast::Item::parse(code) { | ||
233 | return Some(item.syntax().clone()); | ||
234 | } | ||
235 | } | ||
236 | None | ||
237 | } | ||
diff --git a/crates/ide_ssr/src/resolving.rs b/crates/ide_ssr/src/resolving.rs new file mode 100644 index 000000000..14e5a3b69 --- /dev/null +++ b/crates/ide_ssr/src/resolving.rs | |||
@@ -0,0 +1,301 @@ | |||
1 | //! This module is responsible for resolving paths within rules. | ||
2 | |||
3 | use crate::errors::error; | ||
4 | use crate::{parsing, SsrError}; | ||
5 | use ide_db::base_db::FilePosition; | ||
6 | use parsing::Placeholder; | ||
7 | use rustc_hash::FxHashMap; | ||
8 | use syntax::{ast, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken}; | ||
9 | use test_utils::mark; | ||
10 | |||
11 | pub(crate) struct ResolutionScope<'db> { | ||
12 | scope: hir::SemanticsScope<'db>, | ||
13 | node: SyntaxNode, | ||
14 | } | ||
15 | |||
16 | pub(crate) struct ResolvedRule { | ||
17 | pub(crate) pattern: ResolvedPattern, | ||
18 | pub(crate) template: Option<ResolvedPattern>, | ||
19 | pub(crate) index: usize, | ||
20 | } | ||
21 | |||
22 | pub(crate) struct ResolvedPattern { | ||
23 | pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>, | ||
24 | pub(crate) node: SyntaxNode, | ||
25 | // Paths in `node` that we've resolved. | ||
26 | pub(crate) resolved_paths: FxHashMap<SyntaxNode, ResolvedPath>, | ||
27 | pub(crate) ufcs_function_calls: FxHashMap<SyntaxNode, UfcsCallInfo>, | ||
28 | pub(crate) contains_self: bool, | ||
29 | } | ||
30 | |||
31 | pub(crate) struct ResolvedPath { | ||
32 | pub(crate) resolution: hir::PathResolution, | ||
33 | /// The depth of the ast::Path that was resolved within the pattern. | ||
34 | pub(crate) depth: u32, | ||
35 | } | ||
36 | |||
37 | pub(crate) struct UfcsCallInfo { | ||
38 | pub(crate) call_expr: ast::CallExpr, | ||
39 | pub(crate) function: hir::Function, | ||
40 | pub(crate) qualifier_type: Option<hir::Type>, | ||
41 | } | ||
42 | |||
43 | impl ResolvedRule { | ||
44 | pub(crate) fn new( | ||
45 | rule: parsing::ParsedRule, | ||
46 | resolution_scope: &ResolutionScope, | ||
47 | index: usize, | ||
48 | ) -> Result<ResolvedRule, SsrError> { | ||
49 | let resolver = | ||
50 | Resolver { resolution_scope, placeholders_by_stand_in: rule.placeholders_by_stand_in }; | ||
51 | let resolved_template = if let Some(template) = rule.template { | ||
52 | Some(resolver.resolve_pattern_tree(template)?) | ||
53 | } else { | ||
54 | None | ||
55 | }; | ||
56 | Ok(ResolvedRule { | ||
57 | pattern: resolver.resolve_pattern_tree(rule.pattern)?, | ||
58 | template: resolved_template, | ||
59 | index, | ||
60 | }) | ||
61 | } | ||
62 | |||
63 | pub(crate) fn get_placeholder(&self, token: &SyntaxToken) -> Option<&Placeholder> { | ||
64 | if token.kind() != SyntaxKind::IDENT { | ||
65 | return None; | ||
66 | } | ||
67 | self.pattern.placeholders_by_stand_in.get(token.text()) | ||
68 | } | ||
69 | } | ||
70 | |||
71 | struct Resolver<'a, 'db> { | ||
72 | resolution_scope: &'a ResolutionScope<'db>, | ||
73 | placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>, | ||
74 | } | ||
75 | |||
76 | impl Resolver<'_, '_> { | ||
77 | fn resolve_pattern_tree(&self, pattern: SyntaxNode) -> Result<ResolvedPattern, SsrError> { | ||
78 | use syntax::ast::AstNode; | ||
79 | use syntax::{SyntaxElement, T}; | ||
80 | let mut resolved_paths = FxHashMap::default(); | ||
81 | self.resolve(pattern.clone(), 0, &mut resolved_paths)?; | ||
82 | let ufcs_function_calls = resolved_paths | ||
83 | .iter() | ||
84 | .filter_map(|(path_node, resolved)| { | ||
85 | if let Some(grandparent) = path_node.parent().and_then(|parent| parent.parent()) { | ||
86 | if let Some(call_expr) = ast::CallExpr::cast(grandparent.clone()) { | ||
87 | if let hir::PathResolution::AssocItem(hir::AssocItem::Function(function)) = | ||
88 | resolved.resolution | ||
89 | { | ||
90 | let qualifier_type = self.resolution_scope.qualifier_type(path_node); | ||
91 | return Some(( | ||
92 | grandparent, | ||
93 | UfcsCallInfo { call_expr, function, qualifier_type }, | ||
94 | )); | ||
95 | } | ||
96 | } | ||
97 | } | ||
98 | None | ||
99 | }) | ||
100 | .collect(); | ||
101 | let contains_self = | ||
102 | pattern.descendants_with_tokens().any(|node_or_token| match node_or_token { | ||
103 | SyntaxElement::Token(t) => t.kind() == T![self], | ||
104 | _ => false, | ||
105 | }); | ||
106 | Ok(ResolvedPattern { | ||
107 | node: pattern, | ||
108 | resolved_paths, | ||
109 | placeholders_by_stand_in: self.placeholders_by_stand_in.clone(), | ||
110 | ufcs_function_calls, | ||
111 | contains_self, | ||
112 | }) | ||
113 | } | ||
114 | |||
115 | fn resolve( | ||
116 | &self, | ||
117 | node: SyntaxNode, | ||
118 | depth: u32, | ||
119 | resolved_paths: &mut FxHashMap<SyntaxNode, ResolvedPath>, | ||
120 | ) -> Result<(), SsrError> { | ||
121 | use syntax::ast::AstNode; | ||
122 | if let Some(path) = ast::Path::cast(node.clone()) { | ||
123 | if is_self(&path) { | ||
124 | // Self cannot be resolved like other paths. | ||
125 | return Ok(()); | ||
126 | } | ||
127 | // Check if this is an appropriate place in the path to resolve. If the path is | ||
128 | // something like `a::B::<i32>::c` then we want to resolve `a::B`. If the path contains | ||
129 | // a placeholder. e.g. `a::$b::c` then we want to resolve `a`. | ||
130 | if !path_contains_type_arguments(path.qualifier()) | ||
131 | && !self.path_contains_placeholder(&path) | ||
132 | { | ||
133 | let resolution = self | ||
134 | .resolution_scope | ||
135 | .resolve_path(&path) | ||
136 | .ok_or_else(|| error!("Failed to resolve path `{}`", node.text()))?; | ||
137 | if self.ok_to_use_path_resolution(&resolution) { | ||
138 | resolved_paths.insert(node, ResolvedPath { resolution, depth }); | ||
139 | return Ok(()); | ||
140 | } | ||
141 | } | ||
142 | } | ||
143 | for node in node.children() { | ||
144 | self.resolve(node, depth + 1, resolved_paths)?; | ||
145 | } | ||
146 | Ok(()) | ||
147 | } | ||
148 | |||
149 | /// Returns whether `path` contains a placeholder, but ignores any placeholders within type | ||
150 | /// arguments. | ||
151 | fn path_contains_placeholder(&self, path: &ast::Path) -> bool { | ||
152 | if let Some(segment) = path.segment() { | ||
153 | if let Some(name_ref) = segment.name_ref() { | ||
154 | if self.placeholders_by_stand_in.contains_key(name_ref.text()) { | ||
155 | return true; | ||
156 | } | ||
157 | } | ||
158 | } | ||
159 | if let Some(qualifier) = path.qualifier() { | ||
160 | return self.path_contains_placeholder(&qualifier); | ||
161 | } | ||
162 | false | ||
163 | } | ||
164 | |||
165 | fn ok_to_use_path_resolution(&self, resolution: &hir::PathResolution) -> bool { | ||
166 | match resolution { | ||
167 | hir::PathResolution::AssocItem(hir::AssocItem::Function(function)) => { | ||
168 | if function.self_param(self.resolution_scope.scope.db).is_some() { | ||
169 | // If we don't use this path resolution, then we won't be able to match method | ||
170 | // calls. e.g. `Foo::bar($s)` should match `x.bar()`. | ||
171 | true | ||
172 | } else { | ||
173 | mark::hit!(replace_associated_trait_default_function_call); | ||
174 | false | ||
175 | } | ||
176 | } | ||
177 | hir::PathResolution::AssocItem(_) => { | ||
178 | // Not a function. Could be a constant or an associated type. | ||
179 | mark::hit!(replace_associated_trait_constant); | ||
180 | false | ||
181 | } | ||
182 | _ => true, | ||
183 | } | ||
184 | } | ||
185 | } | ||
186 | |||
187 | impl<'db> ResolutionScope<'db> { | ||
188 | pub(crate) fn new( | ||
189 | sema: &hir::Semantics<'db, ide_db::RootDatabase>, | ||
190 | resolve_context: FilePosition, | ||
191 | ) -> ResolutionScope<'db> { | ||
192 | use syntax::ast::AstNode; | ||
193 | let file = sema.parse(resolve_context.file_id); | ||
194 | // Find a node at the requested position, falling back to the whole file. | ||
195 | let node = file | ||
196 | .syntax() | ||
197 | .token_at_offset(resolve_context.offset) | ||
198 | .left_biased() | ||
199 | .map(|token| token.parent()) | ||
200 | .unwrap_or_else(|| file.syntax().clone()); | ||
201 | let node = pick_node_for_resolution(node); | ||
202 | let scope = sema.scope(&node); | ||
203 | ResolutionScope { scope, node } | ||
204 | } | ||
205 | |||
206 | /// Returns the function in which SSR was invoked, if any. | ||
207 | pub(crate) fn current_function(&self) -> Option<SyntaxNode> { | ||
208 | self.node.ancestors().find(|node| node.kind() == SyntaxKind::FN) | ||
209 | } | ||
210 | |||
211 | fn resolve_path(&self, path: &ast::Path) -> Option<hir::PathResolution> { | ||
212 | // First try resolving the whole path. This will work for things like | ||
213 | // `std::collections::HashMap`, but will fail for things like | ||
214 | // `std::collections::HashMap::new`. | ||
215 | if let Some(resolution) = self.scope.speculative_resolve(&path) { | ||
216 | return Some(resolution); | ||
217 | } | ||
218 | // Resolution failed, try resolving the qualifier (e.g. `std::collections::HashMap` and if | ||
219 | // that succeeds, then iterate through the candidates on the resolved type with the provided | ||
220 | // name. | ||
221 | let resolved_qualifier = self.scope.speculative_resolve(&path.qualifier()?)?; | ||
222 | if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier { | ||
223 | let name = path.segment()?.name_ref()?; | ||
224 | adt.ty(self.scope.db).iterate_path_candidates( | ||
225 | self.scope.db, | ||
226 | self.scope.module()?.krate(), | ||
227 | &self.scope.traits_in_scope(), | ||
228 | None, | ||
229 | |_ty, assoc_item| { | ||
230 | let item_name = assoc_item.name(self.scope.db)?; | ||
231 | if item_name.to_string().as_str() == name.text() { | ||
232 | Some(hir::PathResolution::AssocItem(assoc_item)) | ||
233 | } else { | ||
234 | None | ||
235 | } | ||
236 | }, | ||
237 | ) | ||
238 | } else { | ||
239 | None | ||
240 | } | ||
241 | } | ||
242 | |||
243 | fn qualifier_type(&self, path: &SyntaxNode) -> Option<hir::Type> { | ||
244 | use syntax::ast::AstNode; | ||
245 | if let Some(path) = ast::Path::cast(path.clone()) { | ||
246 | if let Some(qualifier) = path.qualifier() { | ||
247 | if let Some(resolved_qualifier) = self.resolve_path(&qualifier) { | ||
248 | if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier { | ||
249 | return Some(adt.ty(self.scope.db)); | ||
250 | } | ||
251 | } | ||
252 | } | ||
253 | } | ||
254 | None | ||
255 | } | ||
256 | } | ||
257 | |||
258 | fn is_self(path: &ast::Path) -> bool { | ||
259 | path.segment().map(|segment| segment.self_token().is_some()).unwrap_or(false) | ||
260 | } | ||
261 | |||
262 | /// Returns a suitable node for resolving paths in the current scope. If we create a scope based on | ||
263 | /// a statement node, then we can't resolve local variables that were defined in the current scope | ||
264 | /// (only in parent scopes). So we find another node, ideally a child of the statement where local | ||
265 | /// variable resolution is permitted. | ||
266 | fn pick_node_for_resolution(node: SyntaxNode) -> SyntaxNode { | ||
267 | match node.kind() { | ||
268 | SyntaxKind::EXPR_STMT => { | ||
269 | if let Some(n) = node.first_child() { | ||
270 | mark::hit!(cursor_after_semicolon); | ||
271 | return n; | ||
272 | } | ||
273 | } | ||
274 | SyntaxKind::LET_STMT | SyntaxKind::IDENT_PAT => { | ||
275 | if let Some(next) = node.next_sibling() { | ||
276 | return pick_node_for_resolution(next); | ||
277 | } | ||
278 | } | ||
279 | SyntaxKind::NAME => { | ||
280 | if let Some(parent) = node.parent() { | ||
281 | return pick_node_for_resolution(parent); | ||
282 | } | ||
283 | } | ||
284 | _ => {} | ||
285 | } | ||
286 | node | ||
287 | } | ||
288 | |||
289 | /// Returns whether `path` or any of its qualifiers contains type arguments. | ||
290 | fn path_contains_type_arguments(path: Option<ast::Path>) -> bool { | ||
291 | if let Some(path) = path { | ||
292 | if let Some(segment) = path.segment() { | ||
293 | if segment.generic_arg_list().is_some() { | ||
294 | mark::hit!(type_arguments_within_path); | ||
295 | return true; | ||
296 | } | ||
297 | } | ||
298 | return path_contains_type_arguments(path.qualifier()); | ||
299 | } | ||
300 | false | ||
301 | } | ||
diff --git a/crates/ide_ssr/src/search.rs b/crates/ide_ssr/src/search.rs new file mode 100644 index 000000000..836eb94b2 --- /dev/null +++ b/crates/ide_ssr/src/search.rs | |||
@@ -0,0 +1,282 @@ | |||
1 | //! Searching for matches. | ||
2 | |||
3 | use crate::{ | ||
4 | matching, | ||
5 | resolving::{ResolvedPath, ResolvedPattern, ResolvedRule}, | ||
6 | Match, MatchFinder, | ||
7 | }; | ||
8 | use ide_db::{ | ||
9 | base_db::{FileId, FileRange}, | ||
10 | defs::Definition, | ||
11 | search::{SearchScope, UsageSearchResult}, | ||
12 | }; | ||
13 | use rustc_hash::FxHashSet; | ||
14 | use syntax::{ast, AstNode, SyntaxKind, SyntaxNode}; | ||
15 | use test_utils::mark; | ||
16 | |||
17 | /// A cache for the results of find_usages. This is for when we have multiple patterns that have the | ||
18 | /// same path. e.g. if the pattern was `foo::Bar` that can parse as a path, an expression, a type | ||
19 | /// and as a pattern. In each, the usages of `foo::Bar` are the same and we'd like to avoid finding | ||
20 | /// them more than once. | ||
21 | #[derive(Default)] | ||
22 | pub(crate) struct UsageCache { | ||
23 | usages: Vec<(Definition, UsageSearchResult)>, | ||
24 | } | ||
25 | |||
26 | impl<'db> MatchFinder<'db> { | ||
27 | /// Adds all matches for `rule` to `matches_out`. Matches may overlap in ways that make | ||
28 | /// replacement impossible, so further processing is required in order to properly nest matches | ||
29 | /// and remove overlapping matches. This is done in the `nesting` module. | ||
30 | pub(crate) fn find_matches_for_rule( | ||
31 | &self, | ||
32 | rule: &ResolvedRule, | ||
33 | usage_cache: &mut UsageCache, | ||
34 | matches_out: &mut Vec<Match>, | ||
35 | ) { | ||
36 | if rule.pattern.contains_self { | ||
37 | // If the pattern contains `self` we restrict the scope of the search to just the | ||
38 | // current method. No other method can reference the same `self`. This makes the | ||
39 | // behavior of `self` consistent with other variables. | ||
40 | if let Some(current_function) = self.resolution_scope.current_function() { | ||
41 | self.slow_scan_node(¤t_function, rule, &None, matches_out); | ||
42 | } | ||
43 | return; | ||
44 | } | ||
45 | if pick_path_for_usages(&rule.pattern).is_none() { | ||
46 | self.slow_scan(rule, matches_out); | ||
47 | return; | ||
48 | } | ||
49 | self.find_matches_for_pattern_tree(rule, &rule.pattern, usage_cache, matches_out); | ||
50 | } | ||
51 | |||
52 | fn find_matches_for_pattern_tree( | ||
53 | &self, | ||
54 | rule: &ResolvedRule, | ||
55 | pattern: &ResolvedPattern, | ||
56 | usage_cache: &mut UsageCache, | ||
57 | matches_out: &mut Vec<Match>, | ||
58 | ) { | ||
59 | if let Some(resolved_path) = pick_path_for_usages(pattern) { | ||
60 | let definition: Definition = resolved_path.resolution.clone().into(); | ||
61 | for file_range in self.find_usages(usage_cache, definition).file_ranges() { | ||
62 | if let Some(node_to_match) = self.find_node_to_match(resolved_path, file_range) { | ||
63 | if !is_search_permitted_ancestors(&node_to_match) { | ||
64 | mark::hit!(use_declaration_with_braces); | ||
65 | continue; | ||
66 | } | ||
67 | self.try_add_match(rule, &node_to_match, &None, matches_out); | ||
68 | } | ||
69 | } | ||
70 | } | ||
71 | } | ||
72 | |||
73 | fn find_node_to_match( | ||
74 | &self, | ||
75 | resolved_path: &ResolvedPath, | ||
76 | file_range: FileRange, | ||
77 | ) -> Option<SyntaxNode> { | ||
78 | let file = self.sema.parse(file_range.file_id); | ||
79 | let depth = resolved_path.depth as usize; | ||
80 | let offset = file_range.range.start(); | ||
81 | if let Some(path) = | ||
82 | self.sema.find_node_at_offset_with_descend::<ast::Path>(file.syntax(), offset) | ||
83 | { | ||
84 | self.sema.ancestors_with_macros(path.syntax().clone()).skip(depth).next() | ||
85 | } else if let Some(path) = | ||
86 | self.sema.find_node_at_offset_with_descend::<ast::MethodCallExpr>(file.syntax(), offset) | ||
87 | { | ||
88 | // If the pattern contained a path and we found a reference to that path that wasn't | ||
89 | // itself a path, but was a method call, then we need to adjust how far up to try | ||
90 | // matching by how deep the path was within a CallExpr. The structure would have been | ||
91 | // CallExpr, PathExpr, Path - i.e. a depth offset of 2. We don't need to check if the | ||
92 | // path was part of a CallExpr because if it wasn't then all that will happen is we'll | ||
93 | // fail to match, which is the desired behavior. | ||
94 | const PATH_DEPTH_IN_CALL_EXPR: usize = 2; | ||
95 | if depth < PATH_DEPTH_IN_CALL_EXPR { | ||
96 | return None; | ||
97 | } | ||
98 | self.sema | ||
99 | .ancestors_with_macros(path.syntax().clone()) | ||
100 | .skip(depth - PATH_DEPTH_IN_CALL_EXPR) | ||
101 | .next() | ||
102 | } else { | ||
103 | None | ||
104 | } | ||
105 | } | ||
106 | |||
107 | fn find_usages<'a>( | ||
108 | &self, | ||
109 | usage_cache: &'a mut UsageCache, | ||
110 | definition: Definition, | ||
111 | ) -> &'a UsageSearchResult { | ||
112 | // Logically if a lookup succeeds we should just return it. Unfortunately returning it would | ||
113 | // extend the lifetime of the borrow, then we wouldn't be able to do the insertion on a | ||
114 | // cache miss. This is a limitation of NLL and is fixed with Polonius. For now we do two | ||
115 | // lookups in the case of a cache hit. | ||
116 | if usage_cache.find(&definition).is_none() { | ||
117 | let usages = definition.usages(&self.sema).in_scope(self.search_scope()).all(); | ||
118 | usage_cache.usages.push((definition, usages)); | ||
119 | return &usage_cache.usages.last().unwrap().1; | ||
120 | } | ||
121 | usage_cache.find(&definition).unwrap() | ||
122 | } | ||
123 | |||
124 | /// Returns the scope within which we want to search. We don't want un unrestricted search | ||
125 | /// scope, since we don't want to find references in external dependencies. | ||
126 | fn search_scope(&self) -> SearchScope { | ||
127 | // FIXME: We should ideally have a test that checks that we edit local roots and not library | ||
128 | // roots. This probably would require some changes to fixtures, since currently everything | ||
129 | // seems to get put into a single source root. | ||
130 | let mut files = Vec::new(); | ||
131 | self.search_files_do(|file_id| { | ||
132 | files.push(file_id); | ||
133 | }); | ||
134 | SearchScope::files(&files) | ||
135 | } | ||
136 | |||
137 | fn slow_scan(&self, rule: &ResolvedRule, matches_out: &mut Vec<Match>) { | ||
138 | self.search_files_do(|file_id| { | ||
139 | let file = self.sema.parse(file_id); | ||
140 | let code = file.syntax(); | ||
141 | self.slow_scan_node(code, rule, &None, matches_out); | ||
142 | }) | ||
143 | } | ||
144 | |||
145 | fn search_files_do(&self, mut callback: impl FnMut(FileId)) { | ||
146 | if self.restrict_ranges.is_empty() { | ||
147 | // Unrestricted search. | ||
148 | use ide_db::base_db::SourceDatabaseExt; | ||
149 | use ide_db::symbol_index::SymbolsDatabase; | ||
150 | for &root in self.sema.db.local_roots().iter() { | ||
151 | let sr = self.sema.db.source_root(root); | ||
152 | for file_id in sr.iter() { | ||
153 | callback(file_id); | ||
154 | } | ||
155 | } | ||
156 | } else { | ||
157 | // Search is restricted, deduplicate file IDs (generally only one). | ||
158 | let mut files = FxHashSet::default(); | ||
159 | for range in &self.restrict_ranges { | ||
160 | if files.insert(range.file_id) { | ||
161 | callback(range.file_id); | ||
162 | } | ||
163 | } | ||
164 | } | ||
165 | } | ||
166 | |||
167 | fn slow_scan_node( | ||
168 | &self, | ||
169 | code: &SyntaxNode, | ||
170 | rule: &ResolvedRule, | ||
171 | restrict_range: &Option<FileRange>, | ||
172 | matches_out: &mut Vec<Match>, | ||
173 | ) { | ||
174 | if !is_search_permitted(code) { | ||
175 | return; | ||
176 | } | ||
177 | self.try_add_match(rule, &code, restrict_range, matches_out); | ||
178 | // If we've got a macro call, we already tried matching it pre-expansion, which is the only | ||
179 | // way to match the whole macro, now try expanding it and matching the expansion. | ||
180 | if let Some(macro_call) = ast::MacroCall::cast(code.clone()) { | ||
181 | if let Some(expanded) = self.sema.expand(¯o_call) { | ||
182 | if let Some(tt) = macro_call.token_tree() { | ||
183 | // When matching within a macro expansion, we only want to allow matches of | ||
184 | // nodes that originated entirely from within the token tree of the macro call. | ||
185 | // i.e. we don't want to match something that came from the macro itself. | ||
186 | self.slow_scan_node( | ||
187 | &expanded, | ||
188 | rule, | ||
189 | &Some(self.sema.original_range(tt.syntax())), | ||
190 | matches_out, | ||
191 | ); | ||
192 | } | ||
193 | } | ||
194 | } | ||
195 | for child in code.children() { | ||
196 | self.slow_scan_node(&child, rule, restrict_range, matches_out); | ||
197 | } | ||
198 | } | ||
199 | |||
200 | fn try_add_match( | ||
201 | &self, | ||
202 | rule: &ResolvedRule, | ||
203 | code: &SyntaxNode, | ||
204 | restrict_range: &Option<FileRange>, | ||
205 | matches_out: &mut Vec<Match>, | ||
206 | ) { | ||
207 | if !self.within_range_restrictions(code) { | ||
208 | mark::hit!(replace_nonpath_within_selection); | ||
209 | return; | ||
210 | } | ||
211 | if let Ok(m) = matching::get_match(false, rule, code, restrict_range, &self.sema) { | ||
212 | matches_out.push(m); | ||
213 | } | ||
214 | } | ||
215 | |||
216 | /// Returns whether `code` is within one of our range restrictions if we have any. No range | ||
217 | /// restrictions is considered unrestricted and always returns true. | ||
218 | fn within_range_restrictions(&self, code: &SyntaxNode) -> bool { | ||
219 | if self.restrict_ranges.is_empty() { | ||
220 | // There is no range restriction. | ||
221 | return true; | ||
222 | } | ||
223 | let node_range = self.sema.original_range(code); | ||
224 | for range in &self.restrict_ranges { | ||
225 | if range.file_id == node_range.file_id && range.range.contains_range(node_range.range) { | ||
226 | return true; | ||
227 | } | ||
228 | } | ||
229 | false | ||
230 | } | ||
231 | } | ||
232 | |||
233 | /// Returns whether we support matching within `node` and all of its ancestors. | ||
234 | fn is_search_permitted_ancestors(node: &SyntaxNode) -> bool { | ||
235 | if let Some(parent) = node.parent() { | ||
236 | if !is_search_permitted_ancestors(&parent) { | ||
237 | return false; | ||
238 | } | ||
239 | } | ||
240 | is_search_permitted(node) | ||
241 | } | ||
242 | |||
243 | /// Returns whether we support matching within this kind of node. | ||
244 | fn is_search_permitted(node: &SyntaxNode) -> bool { | ||
245 | // FIXME: Properly handle use declarations. At the moment, if our search pattern is `foo::bar` | ||
246 | // and the code is `use foo::{baz, bar}`, we'll match `bar`, since it resolves to `foo::bar`. | ||
247 | // However we'll then replace just the part we matched `bar`. We probably need to instead remove | ||
248 | // `bar` and insert a new use declaration. | ||
249 | node.kind() != SyntaxKind::USE | ||
250 | } | ||
251 | |||
252 | impl UsageCache { | ||
253 | fn find(&mut self, definition: &Definition) -> Option<&UsageSearchResult> { | ||
254 | // We expect a very small number of cache entries (generally 1), so a linear scan should be | ||
255 | // fast enough and avoids the need to implement Hash for Definition. | ||
256 | for (d, refs) in &self.usages { | ||
257 | if d == definition { | ||
258 | return Some(refs); | ||
259 | } | ||
260 | } | ||
261 | None | ||
262 | } | ||
263 | } | ||
264 | |||
265 | /// Returns a path that's suitable for path resolution. We exclude builtin types, since they aren't | ||
266 | /// something that we can find references to. We then somewhat arbitrarily pick the path that is the | ||
267 | /// longest as this is hopefully more likely to be less common, making it faster to find. | ||
268 | fn pick_path_for_usages(pattern: &ResolvedPattern) -> Option<&ResolvedPath> { | ||
269 | // FIXME: Take the scope of the resolved path into account. e.g. if there are any paths that are | ||
270 | // private to the current module, then we definitely would want to pick them over say a path | ||
271 | // from std. Possibly we should go further than this and intersect the search scopes for all | ||
272 | // resolved paths then search only in that scope. | ||
273 | pattern | ||
274 | .resolved_paths | ||
275 | .iter() | ||
276 | .filter(|(_, p)| { | ||
277 | !matches!(p.resolution, hir::PathResolution::Def(hir::ModuleDef::BuiltinType(_))) | ||
278 | }) | ||
279 | .map(|(node, resolved)| (node.text().len(), resolved)) | ||
280 | .max_by(|(a, _), (b, _)| a.cmp(b)) | ||
281 | .map(|(_, resolved)| resolved) | ||
282 | } | ||
diff --git a/crates/ide_ssr/src/tests.rs b/crates/ide_ssr/src/tests.rs new file mode 100644 index 000000000..a3ea44f23 --- /dev/null +++ b/crates/ide_ssr/src/tests.rs | |||
@@ -0,0 +1,1371 @@ | |||
1 | use crate::{MatchFinder, SsrRule}; | ||
2 | use expect_test::{expect, Expect}; | ||
3 | use ide_db::base_db::{salsa::Durability, FileId, FilePosition, FileRange, SourceDatabaseExt}; | ||
4 | use rustc_hash::FxHashSet; | ||
5 | use std::sync::Arc; | ||
6 | use test_utils::{mark, RangeOrOffset}; | ||
7 | |||
8 | fn parse_error_text(query: &str) -> String { | ||
9 | format!("{}", query.parse::<SsrRule>().unwrap_err()) | ||
10 | } | ||
11 | |||
12 | #[test] | ||
13 | fn parser_empty_query() { | ||
14 | assert_eq!(parse_error_text(""), "Parse error: Cannot find delimiter `==>>`"); | ||
15 | } | ||
16 | |||
17 | #[test] | ||
18 | fn parser_no_delimiter() { | ||
19 | assert_eq!(parse_error_text("foo()"), "Parse error: Cannot find delimiter `==>>`"); | ||
20 | } | ||
21 | |||
22 | #[test] | ||
23 | fn parser_two_delimiters() { | ||
24 | assert_eq!( | ||
25 | parse_error_text("foo() ==>> a ==>> b "), | ||
26 | "Parse error: More than one delimiter found" | ||
27 | ); | ||
28 | } | ||
29 | |||
30 | #[test] | ||
31 | fn parser_repeated_name() { | ||
32 | assert_eq!( | ||
33 | parse_error_text("foo($a, $a) ==>>"), | ||
34 | "Parse error: Placeholder `$a` repeats more than once" | ||
35 | ); | ||
36 | } | ||
37 | |||
38 | #[test] | ||
39 | fn parser_invalid_pattern() { | ||
40 | assert_eq!( | ||
41 | parse_error_text(" ==>> ()"), | ||
42 | "Parse error: Not a valid Rust expression, type, item, path or pattern" | ||
43 | ); | ||
44 | } | ||
45 | |||
46 | #[test] | ||
47 | fn parser_invalid_template() { | ||
48 | assert_eq!( | ||
49 | parse_error_text("() ==>> )"), | ||
50 | "Parse error: Not a valid Rust expression, type, item, path or pattern" | ||
51 | ); | ||
52 | } | ||
53 | |||
54 | #[test] | ||
55 | fn parser_undefined_placeholder_in_replacement() { | ||
56 | assert_eq!( | ||
57 | parse_error_text("42 ==>> $a"), | ||
58 | "Parse error: Replacement contains undefined placeholders: $a" | ||
59 | ); | ||
60 | } | ||
61 | |||
62 | /// `code` may optionally contain a cursor marker `$0`. If it doesn't, then the position will be | ||
63 | /// the start of the file. If there's a second cursor marker, then we'll return a single range. | ||
64 | pub(crate) fn single_file(code: &str) -> (ide_db::RootDatabase, FilePosition, Vec<FileRange>) { | ||
65 | use ide_db::base_db::fixture::WithFixture; | ||
66 | use ide_db::symbol_index::SymbolsDatabase; | ||
67 | let (mut db, file_id, range_or_offset) = if code.contains(test_utils::CURSOR_MARKER) { | ||
68 | ide_db::RootDatabase::with_range_or_offset(code) | ||
69 | } else { | ||
70 | let (db, file_id) = ide_db::RootDatabase::with_single_file(code); | ||
71 | (db, file_id, RangeOrOffset::Offset(0.into())) | ||
72 | }; | ||
73 | let selections; | ||
74 | let position; | ||
75 | match range_or_offset { | ||
76 | RangeOrOffset::Range(range) => { | ||
77 | position = FilePosition { file_id, offset: range.start() }; | ||
78 | selections = vec![FileRange { file_id, range: range }]; | ||
79 | } | ||
80 | RangeOrOffset::Offset(offset) => { | ||
81 | position = FilePosition { file_id, offset }; | ||
82 | selections = vec![]; | ||
83 | } | ||
84 | } | ||
85 | let mut local_roots = FxHashSet::default(); | ||
86 | local_roots.insert(ide_db::base_db::fixture::WORKSPACE); | ||
87 | db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); | ||
88 | (db, position, selections) | ||
89 | } | ||
90 | |||
91 | fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) { | ||
92 | assert_ssr_transforms(&[rule], input, expected); | ||
93 | } | ||
94 | |||
95 | fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) { | ||
96 | let (db, position, selections) = single_file(input); | ||
97 | let mut match_finder = MatchFinder::in_context(&db, position, selections); | ||
98 | for rule in rules { | ||
99 | let rule: SsrRule = rule.parse().unwrap(); | ||
100 | match_finder.add_rule(rule).unwrap(); | ||
101 | } | ||
102 | let edits = match_finder.edits(); | ||
103 | if edits.is_empty() { | ||
104 | panic!("No edits were made"); | ||
105 | } | ||
106 | // Note, db.file_text is not necessarily the same as `input`, since fixture parsing alters | ||
107 | // stuff. | ||
108 | let mut actual = db.file_text(position.file_id).to_string(); | ||
109 | edits[&position.file_id].apply(&mut actual); | ||
110 | expected.assert_eq(&actual); | ||
111 | } | ||
112 | |||
113 | fn print_match_debug_info(match_finder: &MatchFinder, file_id: FileId, snippet: &str) { | ||
114 | let debug_info = match_finder.debug_where_text_equal(file_id, snippet); | ||
115 | println!( | ||
116 | "Match debug info: {} nodes had text exactly equal to '{}'", | ||
117 | debug_info.len(), | ||
118 | snippet | ||
119 | ); | ||
120 | for (index, d) in debug_info.iter().enumerate() { | ||
121 | println!("Node #{}\n{:#?}\n", index, d); | ||
122 | } | ||
123 | } | ||
124 | |||
125 | fn assert_matches(pattern: &str, code: &str, expected: &[&str]) { | ||
126 | let (db, position, selections) = single_file(code); | ||
127 | let mut match_finder = MatchFinder::in_context(&db, position, selections); | ||
128 | match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap(); | ||
129 | let matched_strings: Vec<String> = | ||
130 | match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect(); | ||
131 | if matched_strings != expected && !expected.is_empty() { | ||
132 | print_match_debug_info(&match_finder, position.file_id, &expected[0]); | ||
133 | } | ||
134 | assert_eq!(matched_strings, expected); | ||
135 | } | ||
136 | |||
137 | fn assert_no_match(pattern: &str, code: &str) { | ||
138 | let (db, position, selections) = single_file(code); | ||
139 | let mut match_finder = MatchFinder::in_context(&db, position, selections); | ||
140 | match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap(); | ||
141 | let matches = match_finder.matches().flattened().matches; | ||
142 | if !matches.is_empty() { | ||
143 | print_match_debug_info(&match_finder, position.file_id, &matches[0].matched_text()); | ||
144 | panic!("Got {} matches when we expected none: {:#?}", matches.len(), matches); | ||
145 | } | ||
146 | } | ||
147 | |||
148 | fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) { | ||
149 | let (db, position, selections) = single_file(code); | ||
150 | let mut match_finder = MatchFinder::in_context(&db, position, selections); | ||
151 | match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap(); | ||
152 | let mut reasons = Vec::new(); | ||
153 | for d in match_finder.debug_where_text_equal(position.file_id, snippet) { | ||
154 | if let Some(reason) = d.match_failure_reason() { | ||
155 | reasons.push(reason.to_owned()); | ||
156 | } | ||
157 | } | ||
158 | assert_eq!(reasons, vec![expected_reason]); | ||
159 | } | ||
160 | |||
161 | #[test] | ||
162 | fn ssr_let_stmt_in_macro_match() { | ||
163 | assert_matches( | ||
164 | "let a = 0", | ||
165 | r#" | ||
166 | macro_rules! m1 { ($a:stmt) => {$a}; } | ||
167 | fn f() {m1!{ let a = 0 };}"#, | ||
168 | // FIXME: Whitespace is not part of the matched block | ||
169 | &["leta=0"], | ||
170 | ); | ||
171 | } | ||
172 | |||
173 | #[test] | ||
174 | fn ssr_let_stmt_in_fn_match() { | ||
175 | assert_matches("let $a = 10;", "fn main() { let x = 10; x }", &["let x = 10;"]); | ||
176 | assert_matches("let $a = $b;", "fn main() { let x = 10; x }", &["let x = 10;"]); | ||
177 | } | ||
178 | |||
179 | #[test] | ||
180 | fn ssr_block_expr_match() { | ||
181 | assert_matches("{ let $a = $b; }", "fn main() { let x = 10; }", &["{ let x = 10; }"]); | ||
182 | assert_matches("{ let $a = $b; $c }", "fn main() { let x = 10; x }", &["{ let x = 10; x }"]); | ||
183 | } | ||
184 | |||
185 | #[test] | ||
186 | fn ssr_let_stmt_replace() { | ||
187 | // Pattern and template with trailing semicolon | ||
188 | assert_ssr_transform( | ||
189 | "let $a = $b; ==>> let $a = 11;", | ||
190 | "fn main() { let x = 10; x }", | ||
191 | expect![["fn main() { let x = 11; x }"]], | ||
192 | ); | ||
193 | } | ||
194 | |||
195 | #[test] | ||
196 | fn ssr_let_stmt_replace_expr() { | ||
197 | // Trailing semicolon should be dropped from the new expression | ||
198 | assert_ssr_transform( | ||
199 | "let $a = $b; ==>> $b", | ||
200 | "fn main() { let x = 10; }", | ||
201 | expect![["fn main() { 10 }"]], | ||
202 | ); | ||
203 | } | ||
204 | |||
205 | #[test] | ||
206 | fn ssr_blockexpr_replace_stmt_with_stmt() { | ||
207 | assert_ssr_transform( | ||
208 | "if $a() {$b;} ==>> $b;", | ||
209 | "{ | ||
210 | if foo() { | ||
211 | bar(); | ||
212 | } | ||
213 | Ok(()) | ||
214 | }", | ||
215 | expect![[r#"{ | ||
216 | bar(); | ||
217 | Ok(()) | ||
218 | }"#]], | ||
219 | ); | ||
220 | } | ||
221 | |||
222 | #[test] | ||
223 | fn ssr_blockexpr_match_trailing_expr() { | ||
224 | assert_matches( | ||
225 | "if $a() {$b;}", | ||
226 | "{ | ||
227 | if foo() { | ||
228 | bar(); | ||
229 | } | ||
230 | }", | ||
231 | &["if foo() { | ||
232 | bar(); | ||
233 | }"], | ||
234 | ); | ||
235 | } | ||
236 | |||
237 | #[test] | ||
238 | fn ssr_blockexpr_replace_trailing_expr_with_stmt() { | ||
239 | assert_ssr_transform( | ||
240 | "if $a() {$b;} ==>> $b;", | ||
241 | "{ | ||
242 | if foo() { | ||
243 | bar(); | ||
244 | } | ||
245 | }", | ||
246 | expect![["{ | ||
247 | bar(); | ||
248 | }"]], | ||
249 | ); | ||
250 | } | ||
251 | |||
252 | #[test] | ||
253 | fn ssr_function_to_method() { | ||
254 | assert_ssr_transform( | ||
255 | "my_function($a, $b) ==>> ($a).my_method($b)", | ||
256 | "fn my_function() {} fn main() { loop { my_function( other_func(x, y), z + w) } }", | ||
257 | expect![["fn my_function() {} fn main() { loop { (other_func(x, y)).my_method(z + w) } }"]], | ||
258 | ) | ||
259 | } | ||
260 | |||
261 | #[test] | ||
262 | fn ssr_nested_function() { | ||
263 | assert_ssr_transform( | ||
264 | "foo($a, $b, $c) ==>> bar($c, baz($a, $b))", | ||
265 | r#" | ||
266 | //- /lib.rs crate:foo | ||
267 | fn foo() {} | ||
268 | fn bar() {} | ||
269 | fn baz() {} | ||
270 | fn main { foo (x + value.method(b), x+y-z, true && false) } | ||
271 | "#, | ||
272 | expect![[r#" | ||
273 | fn foo() {} | ||
274 | fn bar() {} | ||
275 | fn baz() {} | ||
276 | fn main { bar(true && false, baz(x + value.method(b), x+y-z)) } | ||
277 | "#]], | ||
278 | ) | ||
279 | } | ||
280 | |||
281 | #[test] | ||
282 | fn ssr_expected_spacing() { | ||
283 | assert_ssr_transform( | ||
284 | "foo($x) + bar() ==>> bar($x)", | ||
285 | "fn foo() {} fn bar() {} fn main() { foo(5) + bar() }", | ||
286 | expect![["fn foo() {} fn bar() {} fn main() { bar(5) }"]], | ||
287 | ); | ||
288 | } | ||
289 | |||
290 | #[test] | ||
291 | fn ssr_with_extra_space() { | ||
292 | assert_ssr_transform( | ||
293 | "foo($x ) + bar() ==>> bar($x)", | ||
294 | "fn foo() {} fn bar() {} fn main() { foo( 5 ) +bar( ) }", | ||
295 | expect![["fn foo() {} fn bar() {} fn main() { bar(5) }"]], | ||
296 | ); | ||
297 | } | ||
298 | |||
299 | #[test] | ||
300 | fn ssr_keeps_nested_comment() { | ||
301 | assert_ssr_transform( | ||
302 | "foo($x) ==>> bar($x)", | ||
303 | "fn foo() {} fn bar() {} fn main() { foo(other(5 /* using 5 */)) }", | ||
304 | expect![["fn foo() {} fn bar() {} fn main() { bar(other(5 /* using 5 */)) }"]], | ||
305 | ) | ||
306 | } | ||
307 | |||
308 | #[test] | ||
309 | fn ssr_keeps_comment() { | ||
310 | assert_ssr_transform( | ||
311 | "foo($x) ==>> bar($x)", | ||
312 | "fn foo() {} fn bar() {} fn main() { foo(5 /* using 5 */) }", | ||
313 | expect![["fn foo() {} fn bar() {} fn main() { bar(5)/* using 5 */ }"]], | ||
314 | ) | ||
315 | } | ||
316 | |||
317 | #[test] | ||
318 | fn ssr_struct_lit() { | ||
319 | assert_ssr_transform( | ||
320 | "Foo{a: $a, b: $b} ==>> Foo::new($a, $b)", | ||
321 | r#" | ||
322 | struct Foo() {} | ||
323 | impl Foo { fn new() {} } | ||
324 | fn main() { Foo{b:2, a:1} } | ||
325 | "#, | ||
326 | expect![[r#" | ||
327 | struct Foo() {} | ||
328 | impl Foo { fn new() {} } | ||
329 | fn main() { Foo::new(1, 2) } | ||
330 | "#]], | ||
331 | ) | ||
332 | } | ||
333 | |||
334 | #[test] | ||
335 | fn ignores_whitespace() { | ||
336 | assert_matches("1+2", "fn f() -> i32 {1 + 2}", &["1 + 2"]); | ||
337 | assert_matches("1 + 2", "fn f() -> i32 {1+2}", &["1+2"]); | ||
338 | } | ||
339 | |||
340 | #[test] | ||
341 | fn no_match() { | ||
342 | assert_no_match("1 + 3", "fn f() -> i32 {1 + 2}"); | ||
343 | } | ||
344 | |||
345 | #[test] | ||
346 | fn match_fn_definition() { | ||
347 | assert_matches("fn $a($b: $t) {$c}", "fn f(a: i32) {bar()}", &["fn f(a: i32) {bar()}"]); | ||
348 | } | ||
349 | |||
350 | #[test] | ||
351 | fn match_struct_definition() { | ||
352 | let code = r#" | ||
353 | struct Option<T> {} | ||
354 | struct Bar {} | ||
355 | struct Foo {name: Option<String>}"#; | ||
356 | assert_matches("struct $n {$f: Option<String>}", code, &["struct Foo {name: Option<String>}"]); | ||
357 | } | ||
358 | |||
359 | #[test] | ||
360 | fn match_expr() { | ||
361 | let code = r#" | ||
362 | fn foo() {} | ||
363 | fn f() -> i32 {foo(40 + 2, 42)}"#; | ||
364 | assert_matches("foo($a, $b)", code, &["foo(40 + 2, 42)"]); | ||
365 | assert_no_match("foo($a, $b, $c)", code); | ||
366 | assert_no_match("foo($a)", code); | ||
367 | } | ||
368 | |||
369 | #[test] | ||
370 | fn match_nested_method_calls() { | ||
371 | assert_matches( | ||
372 | "$a.z().z().z()", | ||
373 | "fn f() {h().i().j().z().z().z().d().e()}", | ||
374 | &["h().i().j().z().z().z()"], | ||
375 | ); | ||
376 | } | ||
377 | |||
378 | // Make sure that our node matching semantics don't differ within macro calls. | ||
379 | #[test] | ||
380 | fn match_nested_method_calls_with_macro_call() { | ||
381 | assert_matches( | ||
382 | "$a.z().z().z()", | ||
383 | r#" | ||
384 | macro_rules! m1 { ($a:expr) => {$a}; } | ||
385 | fn f() {m1!(h().i().j().z().z().z().d().e())}"#, | ||
386 | &["h().i().j().z().z().z()"], | ||
387 | ); | ||
388 | } | ||
389 | |||
390 | #[test] | ||
391 | fn match_complex_expr() { | ||
392 | let code = r#" | ||
393 | fn foo() {} fn bar() {} | ||
394 | fn f() -> i32 {foo(bar(40, 2), 42)}"#; | ||
395 | assert_matches("foo($a, $b)", code, &["foo(bar(40, 2), 42)"]); | ||
396 | assert_no_match("foo($a, $b, $c)", code); | ||
397 | assert_no_match("foo($a)", code); | ||
398 | assert_matches("bar($a, $b)", code, &["bar(40, 2)"]); | ||
399 | } | ||
400 | |||
401 | // Trailing commas in the code should be ignored. | ||
402 | #[test] | ||
403 | fn match_with_trailing_commas() { | ||
404 | // Code has comma, pattern doesn't. | ||
405 | assert_matches("foo($a, $b)", "fn foo() {} fn f() {foo(1, 2,);}", &["foo(1, 2,)"]); | ||
406 | assert_matches("Foo{$a, $b}", "struct Foo {} fn f() {Foo{1, 2,};}", &["Foo{1, 2,}"]); | ||
407 | |||
408 | // Pattern has comma, code doesn't. | ||
409 | assert_matches("foo($a, $b,)", "fn foo() {} fn f() {foo(1, 2);}", &["foo(1, 2)"]); | ||
410 | assert_matches("Foo{$a, $b,}", "struct Foo {} fn f() {Foo{1, 2};}", &["Foo{1, 2}"]); | ||
411 | } | ||
412 | |||
413 | #[test] | ||
414 | fn match_type() { | ||
415 | assert_matches("i32", "fn f() -> i32 {1 + 2}", &["i32"]); | ||
416 | assert_matches( | ||
417 | "Option<$a>", | ||
418 | "struct Option<T> {} fn f() -> Option<i32> {42}", | ||
419 | &["Option<i32>"], | ||
420 | ); | ||
421 | assert_no_match( | ||
422 | "Option<$a>", | ||
423 | "struct Option<T> {} struct Result<T, E> {} fn f() -> Result<i32, ()> {42}", | ||
424 | ); | ||
425 | } | ||
426 | |||
427 | #[test] | ||
428 | fn match_struct_instantiation() { | ||
429 | let code = r#" | ||
430 | struct Foo {bar: i32, baz: i32} | ||
431 | fn f() {Foo {bar: 1, baz: 2}}"#; | ||
432 | assert_matches("Foo {bar: 1, baz: 2}", code, &["Foo {bar: 1, baz: 2}"]); | ||
433 | // Now with placeholders for all parts of the struct. | ||
434 | assert_matches("Foo {$a: $b, $c: $d}", code, &["Foo {bar: 1, baz: 2}"]); | ||
435 | assert_matches("Foo {}", "struct Foo {} fn f() {Foo {}}", &["Foo {}"]); | ||
436 | } | ||
437 | |||
438 | #[test] | ||
439 | fn match_path() { | ||
440 | let code = r#" | ||
441 | mod foo { | ||
442 | pub fn bar() {} | ||
443 | } | ||
444 | fn f() {foo::bar(42)}"#; | ||
445 | assert_matches("foo::bar", code, &["foo::bar"]); | ||
446 | assert_matches("$a::bar", code, &["foo::bar"]); | ||
447 | assert_matches("foo::$b", code, &["foo::bar"]); | ||
448 | } | ||
449 | |||
450 | #[test] | ||
451 | fn match_pattern() { | ||
452 | assert_matches("Some($a)", "struct Some(); fn f() {if let Some(x) = foo() {}}", &["Some(x)"]); | ||
453 | } | ||
454 | |||
455 | // If our pattern has a full path, e.g. a::b::c() and the code has c(), but c resolves to | ||
456 | // a::b::c, then we should match. | ||
457 | #[test] | ||
458 | fn match_fully_qualified_fn_path() { | ||
459 | let code = r#" | ||
460 | mod a { | ||
461 | pub mod b { | ||
462 | pub fn c(_: i32) {} | ||
463 | } | ||
464 | } | ||
465 | use a::b::c; | ||
466 | fn f1() { | ||
467 | c(42); | ||
468 | } | ||
469 | "#; | ||
470 | assert_matches("a::b::c($a)", code, &["c(42)"]); | ||
471 | } | ||
472 | |||
473 | #[test] | ||
474 | fn match_resolved_type_name() { | ||
475 | let code = r#" | ||
476 | mod m1 { | ||
477 | pub mod m2 { | ||
478 | pub trait Foo<T> {} | ||
479 | } | ||
480 | } | ||
481 | mod m3 { | ||
482 | trait Foo<T> {} | ||
483 | fn f1(f: Option<&dyn Foo<bool>>) {} | ||
484 | } | ||
485 | mod m4 { | ||
486 | use crate::m1::m2::Foo; | ||
487 | fn f1(f: Option<&dyn Foo<i32>>) {} | ||
488 | } | ||
489 | "#; | ||
490 | assert_matches("m1::m2::Foo<$t>", code, &["Foo<i32>"]); | ||
491 | } | ||
492 | |||
493 | #[test] | ||
494 | fn type_arguments_within_path() { | ||
495 | mark::check!(type_arguments_within_path); | ||
496 | let code = r#" | ||
497 | mod foo { | ||
498 | pub struct Bar<T> {t: T} | ||
499 | impl<T> Bar<T> { | ||
500 | pub fn baz() {} | ||
501 | } | ||
502 | } | ||
503 | fn f1() {foo::Bar::<i32>::baz();} | ||
504 | "#; | ||
505 | assert_no_match("foo::Bar::<i64>::baz()", code); | ||
506 | assert_matches("foo::Bar::<i32>::baz()", code, &["foo::Bar::<i32>::baz()"]); | ||
507 | } | ||
508 | |||
509 | #[test] | ||
510 | fn literal_constraint() { | ||
511 | mark::check!(literal_constraint); | ||
512 | let code = r#" | ||
513 | enum Option<T> { Some(T), None } | ||
514 | use Option::Some; | ||
515 | fn f1() { | ||
516 | let x1 = Some(42); | ||
517 | let x2 = Some("foo"); | ||
518 | let x3 = Some(x1); | ||
519 | let x4 = Some(40 + 2); | ||
520 | let x5 = Some(true); | ||
521 | } | ||
522 | "#; | ||
523 | assert_matches("Some(${a:kind(literal)})", code, &["Some(42)", "Some(\"foo\")", "Some(true)"]); | ||
524 | assert_matches("Some(${a:not(kind(literal))})", code, &["Some(x1)", "Some(40 + 2)"]); | ||
525 | } | ||
526 | |||
527 | #[test] | ||
528 | fn match_reordered_struct_instantiation() { | ||
529 | assert_matches( | ||
530 | "Foo {aa: 1, b: 2, ccc: 3}", | ||
531 | "struct Foo {} fn f() {Foo {b: 2, ccc: 3, aa: 1}}", | ||
532 | &["Foo {b: 2, ccc: 3, aa: 1}"], | ||
533 | ); | ||
534 | assert_no_match("Foo {a: 1}", "struct Foo {} fn f() {Foo {b: 1}}"); | ||
535 | assert_no_match("Foo {a: 1}", "struct Foo {} fn f() {Foo {a: 2}}"); | ||
536 | assert_no_match("Foo {a: 1, b: 2}", "struct Foo {} fn f() {Foo {a: 1}}"); | ||
537 | assert_no_match("Foo {a: 1, b: 2}", "struct Foo {} fn f() {Foo {b: 2}}"); | ||
538 | assert_no_match("Foo {a: 1, }", "struct Foo {} fn f() {Foo {a: 1, b: 2}}"); | ||
539 | assert_no_match("Foo {a: 1, z: 9}", "struct Foo {} fn f() {Foo {a: 1}}"); | ||
540 | } | ||
541 | |||
542 | #[test] | ||
543 | fn match_macro_invocation() { | ||
544 | assert_matches( | ||
545 | "foo!($a)", | ||
546 | "macro_rules! foo {() => {}} fn() {foo(foo!(foo()))}", | ||
547 | &["foo!(foo())"], | ||
548 | ); | ||
549 | assert_matches( | ||
550 | "foo!(41, $a, 43)", | ||
551 | "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43)}", | ||
552 | &["foo!(41, 42, 43)"], | ||
553 | ); | ||
554 | assert_no_match("foo!(50, $a, 43)", "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43}"); | ||
555 | assert_no_match("foo!(41, $a, 50)", "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43}"); | ||
556 | assert_matches( | ||
557 | "foo!($a())", | ||
558 | "macro_rules! foo {() => {}} fn() {foo!(bar())}", | ||
559 | &["foo!(bar())"], | ||
560 | ); | ||
561 | } | ||
562 | |||
563 | // When matching within a macro expansion, we only allow matches of nodes that originated from | ||
564 | // the macro call, not from the macro definition. | ||
565 | #[test] | ||
566 | fn no_match_expression_from_macro() { | ||
567 | assert_no_match( | ||
568 | "$a.clone()", | ||
569 | r#" | ||
570 | macro_rules! m1 { | ||
571 | () => {42.clone()} | ||
572 | } | ||
573 | fn f1() {m1!()} | ||
574 | "#, | ||
575 | ); | ||
576 | } | ||
577 | |||
578 | // We definitely don't want to allow matching of an expression that part originates from the | ||
579 | // macro call `42` and part from the macro definition `.clone()`. | ||
580 | #[test] | ||
581 | fn no_match_split_expression() { | ||
582 | assert_no_match( | ||
583 | "$a.clone()", | ||
584 | r#" | ||
585 | macro_rules! m1 { | ||
586 | ($x:expr) => {$x.clone()} | ||
587 | } | ||
588 | fn f1() {m1!(42)} | ||
589 | "#, | ||
590 | ); | ||
591 | } | ||
592 | |||
593 | #[test] | ||
594 | fn replace_function_call() { | ||
595 | // This test also makes sure that we ignore empty-ranges. | ||
596 | assert_ssr_transform( | ||
597 | "foo() ==>> bar()", | ||
598 | "fn foo() {$0$0} fn bar() {} fn f1() {foo(); foo();}", | ||
599 | expect![["fn foo() {} fn bar() {} fn f1() {bar(); bar();}"]], | ||
600 | ); | ||
601 | } | ||
602 | |||
603 | #[test] | ||
604 | fn replace_function_call_with_placeholders() { | ||
605 | assert_ssr_transform( | ||
606 | "foo($a, $b) ==>> bar($b, $a)", | ||
607 | "fn foo() {} fn bar() {} fn f1() {foo(5, 42)}", | ||
608 | expect![["fn foo() {} fn bar() {} fn f1() {bar(42, 5)}"]], | ||
609 | ); | ||
610 | } | ||
611 | |||
612 | #[test] | ||
613 | fn replace_nested_function_calls() { | ||
614 | assert_ssr_transform( | ||
615 | "foo($a) ==>> bar($a)", | ||
616 | "fn foo() {} fn bar() {} fn f1() {foo(foo(42))}", | ||
617 | expect![["fn foo() {} fn bar() {} fn f1() {bar(bar(42))}"]], | ||
618 | ); | ||
619 | } | ||
620 | |||
621 | #[test] | ||
622 | fn replace_associated_function_call() { | ||
623 | assert_ssr_transform( | ||
624 | "Foo::new() ==>> Bar::new()", | ||
625 | r#" | ||
626 | struct Foo {} | ||
627 | impl Foo { fn new() {} } | ||
628 | struct Bar {} | ||
629 | impl Bar { fn new() {} } | ||
630 | fn f1() {Foo::new();} | ||
631 | "#, | ||
632 | expect![[r#" | ||
633 | struct Foo {} | ||
634 | impl Foo { fn new() {} } | ||
635 | struct Bar {} | ||
636 | impl Bar { fn new() {} } | ||
637 | fn f1() {Bar::new();} | ||
638 | "#]], | ||
639 | ); | ||
640 | } | ||
641 | |||
642 | #[test] | ||
643 | fn replace_associated_trait_default_function_call() { | ||
644 | mark::check!(replace_associated_trait_default_function_call); | ||
645 | assert_ssr_transform( | ||
646 | "Bar2::foo() ==>> Bar2::foo2()", | ||
647 | r#" | ||
648 | trait Foo { fn foo() {} } | ||
649 | pub struct Bar {} | ||
650 | impl Foo for Bar {} | ||
651 | pub struct Bar2 {} | ||
652 | impl Foo for Bar2 {} | ||
653 | impl Bar2 { fn foo2() {} } | ||
654 | fn main() { | ||
655 | Bar::foo(); | ||
656 | Bar2::foo(); | ||
657 | } | ||
658 | "#, | ||
659 | expect![[r#" | ||
660 | trait Foo { fn foo() {} } | ||
661 | pub struct Bar {} | ||
662 | impl Foo for Bar {} | ||
663 | pub struct Bar2 {} | ||
664 | impl Foo for Bar2 {} | ||
665 | impl Bar2 { fn foo2() {} } | ||
666 | fn main() { | ||
667 | Bar::foo(); | ||
668 | Bar2::foo2(); | ||
669 | } | ||
670 | "#]], | ||
671 | ); | ||
672 | } | ||
673 | |||
674 | #[test] | ||
675 | fn replace_associated_trait_constant() { | ||
676 | mark::check!(replace_associated_trait_constant); | ||
677 | assert_ssr_transform( | ||
678 | "Bar2::VALUE ==>> Bar2::VALUE_2222", | ||
679 | r#" | ||
680 | trait Foo { const VALUE: i32; const VALUE_2222: i32; } | ||
681 | pub struct Bar {} | ||
682 | impl Foo for Bar { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; } | ||
683 | pub struct Bar2 {} | ||
684 | impl Foo for Bar2 { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; } | ||
685 | impl Bar2 { fn foo2() {} } | ||
686 | fn main() { | ||
687 | Bar::VALUE; | ||
688 | Bar2::VALUE; | ||
689 | } | ||
690 | "#, | ||
691 | expect![[r#" | ||
692 | trait Foo { const VALUE: i32; const VALUE_2222: i32; } | ||
693 | pub struct Bar {} | ||
694 | impl Foo for Bar { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; } | ||
695 | pub struct Bar2 {} | ||
696 | impl Foo for Bar2 { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; } | ||
697 | impl Bar2 { fn foo2() {} } | ||
698 | fn main() { | ||
699 | Bar::VALUE; | ||
700 | Bar2::VALUE_2222; | ||
701 | } | ||
702 | "#]], | ||
703 | ); | ||
704 | } | ||
705 | |||
706 | #[test] | ||
707 | fn replace_path_in_different_contexts() { | ||
708 | // Note the $0 inside module a::b which marks the point where the rule is interpreted. We | ||
709 | // replace foo with bar, but both need different path qualifiers in different contexts. In f4, | ||
710 | // foo is unqualified because of a use statement, however the replacement needs to be fully | ||
711 | // qualified. | ||
712 | assert_ssr_transform( | ||
713 | "c::foo() ==>> c::bar()", | ||
714 | r#" | ||
715 | mod a { | ||
716 | pub mod b {$0 | ||
717 | pub mod c { | ||
718 | pub fn foo() {} | ||
719 | pub fn bar() {} | ||
720 | fn f1() { foo() } | ||
721 | } | ||
722 | fn f2() { c::foo() } | ||
723 | } | ||
724 | fn f3() { b::c::foo() } | ||
725 | } | ||
726 | use a::b::c::foo; | ||
727 | fn f4() { foo() } | ||
728 | "#, | ||
729 | expect![[r#" | ||
730 | mod a { | ||
731 | pub mod b { | ||
732 | pub mod c { | ||
733 | pub fn foo() {} | ||
734 | pub fn bar() {} | ||
735 | fn f1() { bar() } | ||
736 | } | ||
737 | fn f2() { c::bar() } | ||
738 | } | ||
739 | fn f3() { b::c::bar() } | ||
740 | } | ||
741 | use a::b::c::foo; | ||
742 | fn f4() { a::b::c::bar() } | ||
743 | "#]], | ||
744 | ); | ||
745 | } | ||
746 | |||
747 | #[test] | ||
748 | fn replace_associated_function_with_generics() { | ||
749 | assert_ssr_transform( | ||
750 | "c::Foo::<$a>::new() ==>> d::Bar::<$a>::default()", | ||
751 | r#" | ||
752 | mod c { | ||
753 | pub struct Foo<T> {v: T} | ||
754 | impl<T> Foo<T> { pub fn new() {} } | ||
755 | fn f1() { | ||
756 | Foo::<i32>::new(); | ||
757 | } | ||
758 | } | ||
759 | mod d { | ||
760 | pub struct Bar<T> {v: T} | ||
761 | impl<T> Bar<T> { pub fn default() {} } | ||
762 | fn f1() { | ||
763 | super::c::Foo::<i32>::new(); | ||
764 | } | ||
765 | } | ||
766 | "#, | ||
767 | expect![[r#" | ||
768 | mod c { | ||
769 | pub struct Foo<T> {v: T} | ||
770 | impl<T> Foo<T> { pub fn new() {} } | ||
771 | fn f1() { | ||
772 | crate::d::Bar::<i32>::default(); | ||
773 | } | ||
774 | } | ||
775 | mod d { | ||
776 | pub struct Bar<T> {v: T} | ||
777 | impl<T> Bar<T> { pub fn default() {} } | ||
778 | fn f1() { | ||
779 | Bar::<i32>::default(); | ||
780 | } | ||
781 | } | ||
782 | "#]], | ||
783 | ); | ||
784 | } | ||
785 | |||
786 | #[test] | ||
787 | fn replace_type() { | ||
788 | assert_ssr_transform( | ||
789 | "Result<(), $a> ==>> Option<$a>", | ||
790 | "struct Result<T, E> {} struct Option<T> {} fn f1() -> Result<(), Vec<Error>> {foo()}", | ||
791 | expect![[ | ||
792 | "struct Result<T, E> {} struct Option<T> {} fn f1() -> Option<Vec<Error>> {foo()}" | ||
793 | ]], | ||
794 | ); | ||
795 | } | ||
796 | |||
797 | #[test] | ||
798 | fn replace_macro_invocations() { | ||
799 | assert_ssr_transform( | ||
800 | "try!($a) ==>> $a?", | ||
801 | "macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(try!(foo()));}", | ||
802 | expect![["macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(foo()?);}"]], | ||
803 | ); | ||
804 | assert_ssr_transform( | ||
805 | "foo!($a($b)) ==>> foo($b, $a)", | ||
806 | "macro_rules! foo {() => {}} fn f1() {foo!(abc(def() + 2));}", | ||
807 | expect![["macro_rules! foo {() => {}} fn f1() {foo(def() + 2, abc);}"]], | ||
808 | ); | ||
809 | } | ||
810 | |||
811 | #[test] | ||
812 | fn replace_binary_op() { | ||
813 | assert_ssr_transform( | ||
814 | "$a + $b ==>> $b + $a", | ||
815 | "fn f() {2 * 3 + 4 * 5}", | ||
816 | expect![["fn f() {4 * 5 + 2 * 3}"]], | ||
817 | ); | ||
818 | assert_ssr_transform( | ||
819 | "$a + $b ==>> $b + $a", | ||
820 | "fn f() {1 + 2 + 3 + 4}", | ||
821 | expect![[r#"fn f() {4 + (3 + (2 + 1))}"#]], | ||
822 | ); | ||
823 | } | ||
824 | |||
825 | #[test] | ||
826 | fn match_binary_op() { | ||
827 | assert_matches("$a + $b", "fn f() {1 + 2 + 3 + 4}", &["1 + 2", "1 + 2 + 3", "1 + 2 + 3 + 4"]); | ||
828 | } | ||
829 | |||
830 | #[test] | ||
831 | fn multiple_rules() { | ||
832 | assert_ssr_transforms( | ||
833 | &["$a + 1 ==>> add_one($a)", "$a + $b ==>> add($a, $b)"], | ||
834 | "fn add() {} fn add_one() {} fn f() -> i32 {3 + 2 + 1}", | ||
835 | expect![["fn add() {} fn add_one() {} fn f() -> i32 {add_one(add(3, 2))}"]], | ||
836 | ) | ||
837 | } | ||
838 | |||
839 | #[test] | ||
840 | fn multiple_rules_with_nested_matches() { | ||
841 | assert_ssr_transforms( | ||
842 | &["foo1($a) ==>> bar1($a)", "foo2($a) ==>> bar2($a)"], | ||
843 | r#" | ||
844 | fn foo1() {} fn foo2() {} fn bar1() {} fn bar2() {} | ||
845 | fn f() {foo1(foo2(foo1(foo2(foo1(42)))))} | ||
846 | "#, | ||
847 | expect![[r#" | ||
848 | fn foo1() {} fn foo2() {} fn bar1() {} fn bar2() {} | ||
849 | fn f() {bar1(bar2(bar1(bar2(bar1(42)))))} | ||
850 | "#]], | ||
851 | ) | ||
852 | } | ||
853 | |||
854 | #[test] | ||
855 | fn match_within_macro_invocation() { | ||
856 | let code = r#" | ||
857 | macro_rules! foo { | ||
858 | ($a:stmt; $b:expr) => { | ||
859 | $b | ||
860 | }; | ||
861 | } | ||
862 | struct A {} | ||
863 | impl A { | ||
864 | fn bar() {} | ||
865 | } | ||
866 | fn f1() { | ||
867 | let aaa = A {}; | ||
868 | foo!(macro_ignores_this(); aaa.bar()); | ||
869 | } | ||
870 | "#; | ||
871 | assert_matches("$a.bar()", code, &["aaa.bar()"]); | ||
872 | } | ||
873 | |||
874 | #[test] | ||
875 | fn replace_within_macro_expansion() { | ||
876 | assert_ssr_transform( | ||
877 | "$a.foo() ==>> bar($a)", | ||
878 | r#" | ||
879 | macro_rules! macro1 { | ||
880 | ($a:expr) => {$a} | ||
881 | } | ||
882 | fn bar() {} | ||
883 | fn f() {macro1!(5.x().foo().o2())} | ||
884 | "#, | ||
885 | expect![[r#" | ||
886 | macro_rules! macro1 { | ||
887 | ($a:expr) => {$a} | ||
888 | } | ||
889 | fn bar() {} | ||
890 | fn f() {macro1!(bar(5.x()).o2())} | ||
891 | "#]], | ||
892 | ) | ||
893 | } | ||
894 | |||
895 | #[test] | ||
896 | fn replace_outside_and_within_macro_expansion() { | ||
897 | assert_ssr_transform( | ||
898 | "foo($a) ==>> bar($a)", | ||
899 | r#" | ||
900 | fn foo() {} fn bar() {} | ||
901 | macro_rules! macro1 { | ||
902 | ($a:expr) => {$a} | ||
903 | } | ||
904 | fn f() {foo(foo(macro1!(foo(foo(42)))))} | ||
905 | "#, | ||
906 | expect![[r#" | ||
907 | fn foo() {} fn bar() {} | ||
908 | macro_rules! macro1 { | ||
909 | ($a:expr) => {$a} | ||
910 | } | ||
911 | fn f() {bar(bar(macro1!(bar(bar(42)))))} | ||
912 | "#]], | ||
913 | ) | ||
914 | } | ||
915 | |||
916 | #[test] | ||
917 | fn preserves_whitespace_within_macro_expansion() { | ||
918 | assert_ssr_transform( | ||
919 | "$a + $b ==>> $b - $a", | ||
920 | r#" | ||
921 | macro_rules! macro1 { | ||
922 | ($a:expr) => {$a} | ||
923 | } | ||
924 | fn f() {macro1!(1 * 2 + 3 + 4} | ||
925 | "#, | ||
926 | expect![[r#" | ||
927 | macro_rules! macro1 { | ||
928 | ($a:expr) => {$a} | ||
929 | } | ||
930 | fn f() {macro1!(4 - (3 - 1 * 2)} | ||
931 | "#]], | ||
932 | ) | ||
933 | } | ||
934 | |||
935 | #[test] | ||
936 | fn add_parenthesis_when_necessary() { | ||
937 | assert_ssr_transform( | ||
938 | "foo($a) ==>> $a.to_string()", | ||
939 | r#" | ||
940 | fn foo(_: i32) {} | ||
941 | fn bar3(v: i32) { | ||
942 | foo(1 + 2); | ||
943 | foo(-v); | ||
944 | } | ||
945 | "#, | ||
946 | expect![[r#" | ||
947 | fn foo(_: i32) {} | ||
948 | fn bar3(v: i32) { | ||
949 | (1 + 2).to_string(); | ||
950 | (-v).to_string(); | ||
951 | } | ||
952 | "#]], | ||
953 | ) | ||
954 | } | ||
955 | |||
956 | #[test] | ||
957 | fn match_failure_reasons() { | ||
958 | let code = r#" | ||
959 | fn bar() {} | ||
960 | macro_rules! foo { | ||
961 | ($a:expr) => { | ||
962 | 1 + $a + 2 | ||
963 | }; | ||
964 | } | ||
965 | fn f1() { | ||
966 | bar(1, 2); | ||
967 | foo!(5 + 43.to_string() + 5); | ||
968 | } | ||
969 | "#; | ||
970 | assert_match_failure_reason( | ||
971 | "bar($a, 3)", | ||
972 | code, | ||
973 | "bar(1, 2)", | ||
974 | r#"Pattern wanted token '3' (INT_NUMBER), but code had token '2' (INT_NUMBER)"#, | ||
975 | ); | ||
976 | assert_match_failure_reason( | ||
977 | "42.to_string()", | ||
978 | code, | ||
979 | "43.to_string()", | ||
980 | r#"Pattern wanted token '42' (INT_NUMBER), but code had token '43' (INT_NUMBER)"#, | ||
981 | ); | ||
982 | } | ||
983 | |||
984 | #[test] | ||
985 | fn overlapping_possible_matches() { | ||
986 | // There are three possible matches here, however the middle one, `foo(foo(foo(42)))` shouldn't | ||
987 | // match because it overlaps with the outer match. The inner match is permitted since it's is | ||
988 | // contained entirely within the placeholder of the outer match. | ||
989 | assert_matches( | ||
990 | "foo(foo($a))", | ||
991 | "fn foo() {} fn main() {foo(foo(foo(foo(42))))}", | ||
992 | &["foo(foo(42))", "foo(foo(foo(foo(42))))"], | ||
993 | ); | ||
994 | } | ||
995 | |||
996 | #[test] | ||
997 | fn use_declaration_with_braces() { | ||
998 | // It would be OK for a path rule to match and alter a use declaration. We shouldn't mess it up | ||
999 | // though. In particular, we must not change `use foo::{baz, bar}` to `use foo::{baz, | ||
1000 | // foo2::bar2}`. | ||
1001 | mark::check!(use_declaration_with_braces); | ||
1002 | assert_ssr_transform( | ||
1003 | "foo::bar ==>> foo2::bar2", | ||
1004 | r#" | ||
1005 | mod foo { pub fn bar() {} pub fn baz() {} } | ||
1006 | mod foo2 { pub fn bar2() {} } | ||
1007 | use foo::{baz, bar}; | ||
1008 | fn main() { bar() } | ||
1009 | "#, | ||
1010 | expect![[" | ||
1011 | mod foo { pub fn bar() {} pub fn baz() {} } | ||
1012 | mod foo2 { pub fn bar2() {} } | ||
1013 | use foo::{baz, bar}; | ||
1014 | fn main() { foo2::bar2() } | ||
1015 | "]], | ||
1016 | ) | ||
1017 | } | ||
1018 | |||
1019 | #[test] | ||
1020 | fn ufcs_matches_method_call() { | ||
1021 | let code = r#" | ||
1022 | struct Foo {} | ||
1023 | impl Foo { | ||
1024 | fn new(_: i32) -> Foo { Foo {} } | ||
1025 | fn do_stuff(&self, _: i32) {} | ||
1026 | } | ||
1027 | struct Bar {} | ||
1028 | impl Bar { | ||
1029 | fn new(_: i32) -> Bar { Bar {} } | ||
1030 | fn do_stuff(&self, v: i32) {} | ||
1031 | } | ||
1032 | fn main() { | ||
1033 | let b = Bar {}; | ||
1034 | let f = Foo {}; | ||
1035 | b.do_stuff(1); | ||
1036 | f.do_stuff(2); | ||
1037 | Foo::new(4).do_stuff(3); | ||
1038 | // Too many / too few args - should never match | ||
1039 | f.do_stuff(2, 10); | ||
1040 | f.do_stuff(); | ||
1041 | } | ||
1042 | "#; | ||
1043 | assert_matches("Foo::do_stuff($a, $b)", code, &["f.do_stuff(2)", "Foo::new(4).do_stuff(3)"]); | ||
1044 | // The arguments needs special handling in the case of a function call matching a method call | ||
1045 | // and the first argument is different. | ||
1046 | assert_matches("Foo::do_stuff($a, 2)", code, &["f.do_stuff(2)"]); | ||
1047 | assert_matches("Foo::do_stuff(Foo::new(4), $b)", code, &["Foo::new(4).do_stuff(3)"]); | ||
1048 | |||
1049 | assert_ssr_transform( | ||
1050 | "Foo::do_stuff(Foo::new($a), $b) ==>> Bar::new($b).do_stuff($a)", | ||
1051 | code, | ||
1052 | expect![[r#" | ||
1053 | struct Foo {} | ||
1054 | impl Foo { | ||
1055 | fn new(_: i32) -> Foo { Foo {} } | ||
1056 | fn do_stuff(&self, _: i32) {} | ||
1057 | } | ||
1058 | struct Bar {} | ||
1059 | impl Bar { | ||
1060 | fn new(_: i32) -> Bar { Bar {} } | ||
1061 | fn do_stuff(&self, v: i32) {} | ||
1062 | } | ||
1063 | fn main() { | ||
1064 | let b = Bar {}; | ||
1065 | let f = Foo {}; | ||
1066 | b.do_stuff(1); | ||
1067 | f.do_stuff(2); | ||
1068 | Bar::new(3).do_stuff(4); | ||
1069 | // Too many / too few args - should never match | ||
1070 | f.do_stuff(2, 10); | ||
1071 | f.do_stuff(); | ||
1072 | } | ||
1073 | "#]], | ||
1074 | ); | ||
1075 | } | ||
1076 | |||
1077 | #[test] | ||
1078 | fn pattern_is_a_single_segment_path() { | ||
1079 | mark::check!(pattern_is_a_single_segment_path); | ||
1080 | // The first function should not be altered because the `foo` in scope at the cursor position is | ||
1081 | // a different `foo`. This case is special because "foo" can be parsed as a pattern (IDENT_PAT -> | ||
1082 | // NAME -> IDENT), which contains no path. If we're not careful we'll end up matching the `foo` | ||
1083 | // in `let foo` from the first function. Whether we should match the `let foo` in the second | ||
1084 | // function is less clear. At the moment, we don't. Doing so sounds like a rename operation, | ||
1085 | // which isn't really what SSR is for, especially since the replacement `bar` must be able to be | ||
1086 | // resolved, which means if we rename `foo` we'll get a name collision. | ||
1087 | assert_ssr_transform( | ||
1088 | "foo ==>> bar", | ||
1089 | r#" | ||
1090 | fn f1() -> i32 { | ||
1091 | let foo = 1; | ||
1092 | let bar = 2; | ||
1093 | foo | ||
1094 | } | ||
1095 | fn f1() -> i32 { | ||
1096 | let foo = 1; | ||
1097 | let bar = 2; | ||
1098 | foo$0 | ||
1099 | } | ||
1100 | "#, | ||
1101 | expect![[r#" | ||
1102 | fn f1() -> i32 { | ||
1103 | let foo = 1; | ||
1104 | let bar = 2; | ||
1105 | foo | ||
1106 | } | ||
1107 | fn f1() -> i32 { | ||
1108 | let foo = 1; | ||
1109 | let bar = 2; | ||
1110 | bar | ||
1111 | } | ||
1112 | "#]], | ||
1113 | ); | ||
1114 | } | ||
1115 | |||
1116 | #[test] | ||
1117 | fn replace_local_variable_reference() { | ||
1118 | // The pattern references a local variable `foo` in the block containing the cursor. We should | ||
1119 | // only replace references to this variable `foo`, not other variables that just happen to have | ||
1120 | // the same name. | ||
1121 | mark::check!(cursor_after_semicolon); | ||
1122 | assert_ssr_transform( | ||
1123 | "foo + $a ==>> $a - foo", | ||
1124 | r#" | ||
1125 | fn bar1() -> i32 { | ||
1126 | let mut res = 0; | ||
1127 | let foo = 5; | ||
1128 | res += foo + 1; | ||
1129 | let foo = 10; | ||
1130 | res += foo + 2;$0 | ||
1131 | res += foo + 3; | ||
1132 | let foo = 15; | ||
1133 | res += foo + 4; | ||
1134 | res | ||
1135 | } | ||
1136 | "#, | ||
1137 | expect![[r#" | ||
1138 | fn bar1() -> i32 { | ||
1139 | let mut res = 0; | ||
1140 | let foo = 5; | ||
1141 | res += foo + 1; | ||
1142 | let foo = 10; | ||
1143 | res += 2 - foo; | ||
1144 | res += 3 - foo; | ||
1145 | let foo = 15; | ||
1146 | res += foo + 4; | ||
1147 | res | ||
1148 | } | ||
1149 | "#]], | ||
1150 | ) | ||
1151 | } | ||
1152 | |||
1153 | #[test] | ||
1154 | fn replace_path_within_selection() { | ||
1155 | assert_ssr_transform( | ||
1156 | "foo ==>> bar", | ||
1157 | r#" | ||
1158 | fn main() { | ||
1159 | let foo = 41; | ||
1160 | let bar = 42; | ||
1161 | do_stuff(foo); | ||
1162 | do_stuff(foo);$0 | ||
1163 | do_stuff(foo); | ||
1164 | do_stuff(foo);$0 | ||
1165 | do_stuff(foo); | ||
1166 | }"#, | ||
1167 | expect![[r#" | ||
1168 | fn main() { | ||
1169 | let foo = 41; | ||
1170 | let bar = 42; | ||
1171 | do_stuff(foo); | ||
1172 | do_stuff(foo); | ||
1173 | do_stuff(bar); | ||
1174 | do_stuff(bar); | ||
1175 | do_stuff(foo); | ||
1176 | }"#]], | ||
1177 | ); | ||
1178 | } | ||
1179 | |||
1180 | #[test] | ||
1181 | fn replace_nonpath_within_selection() { | ||
1182 | mark::check!(replace_nonpath_within_selection); | ||
1183 | assert_ssr_transform( | ||
1184 | "$a + $b ==>> $b * $a", | ||
1185 | r#" | ||
1186 | fn main() { | ||
1187 | let v = 1 + 2;$0 | ||
1188 | let v2 = 3 + 3; | ||
1189 | let v3 = 4 + 5;$0 | ||
1190 | let v4 = 6 + 7; | ||
1191 | }"#, | ||
1192 | expect![[r#" | ||
1193 | fn main() { | ||
1194 | let v = 1 + 2; | ||
1195 | let v2 = 3 * 3; | ||
1196 | let v3 = 5 * 4; | ||
1197 | let v4 = 6 + 7; | ||
1198 | }"#]], | ||
1199 | ); | ||
1200 | } | ||
1201 | |||
1202 | #[test] | ||
1203 | fn replace_self() { | ||
1204 | // `foo(self)` occurs twice in the code, however only the first occurrence is the `self` that's | ||
1205 | // in scope where the rule is invoked. | ||
1206 | assert_ssr_transform( | ||
1207 | "foo(self) ==>> bar(self)", | ||
1208 | r#" | ||
1209 | struct S1 {} | ||
1210 | fn foo(_: &S1) {} | ||
1211 | fn bar(_: &S1) {} | ||
1212 | impl S1 { | ||
1213 | fn f1(&self) { | ||
1214 | foo(self)$0 | ||
1215 | } | ||
1216 | fn f2(&self) { | ||
1217 | foo(self) | ||
1218 | } | ||
1219 | } | ||
1220 | "#, | ||
1221 | expect![[r#" | ||
1222 | struct S1 {} | ||
1223 | fn foo(_: &S1) {} | ||
1224 | fn bar(_: &S1) {} | ||
1225 | impl S1 { | ||
1226 | fn f1(&self) { | ||
1227 | bar(self) | ||
1228 | } | ||
1229 | fn f2(&self) { | ||
1230 | foo(self) | ||
1231 | } | ||
1232 | } | ||
1233 | "#]], | ||
1234 | ); | ||
1235 | } | ||
1236 | |||
1237 | #[test] | ||
1238 | fn match_trait_method_call() { | ||
1239 | // `Bar::foo` and `Bar2::foo` resolve to the same function. Make sure we only match if the type | ||
1240 | // matches what's in the pattern. Also checks that we handle autoderef. | ||
1241 | let code = r#" | ||
1242 | pub struct Bar {} | ||
1243 | pub struct Bar2 {} | ||
1244 | pub trait Foo { | ||
1245 | fn foo(&self, _: i32) {} | ||
1246 | } | ||
1247 | impl Foo for Bar {} | ||
1248 | impl Foo for Bar2 {} | ||
1249 | fn main() { | ||
1250 | let v1 = Bar {}; | ||
1251 | let v2 = Bar2 {}; | ||
1252 | let v1_ref = &v1; | ||
1253 | let v2_ref = &v2; | ||
1254 | v1.foo(1); | ||
1255 | v2.foo(2); | ||
1256 | Bar::foo(&v1, 3); | ||
1257 | Bar2::foo(&v2, 4); | ||
1258 | v1_ref.foo(5); | ||
1259 | v2_ref.foo(6); | ||
1260 | } | ||
1261 | "#; | ||
1262 | assert_matches("Bar::foo($a, $b)", code, &["v1.foo(1)", "Bar::foo(&v1, 3)", "v1_ref.foo(5)"]); | ||
1263 | assert_matches("Bar2::foo($a, $b)", code, &["v2.foo(2)", "Bar2::foo(&v2, 4)", "v2_ref.foo(6)"]); | ||
1264 | } | ||
1265 | |||
1266 | #[test] | ||
1267 | fn replace_autoref_autoderef_capture() { | ||
1268 | // Here we have several calls to `$a.foo()`. In the first case autoref is applied, in the | ||
1269 | // second, we already have a reference, so it isn't. When $a is used in a context where autoref | ||
1270 | // doesn't apply, we need to prefix it with `&`. Finally, we have some cases where autoderef | ||
1271 | // needs to be applied. | ||
1272 | mark::check!(replace_autoref_autoderef_capture); | ||
1273 | let code = r#" | ||
1274 | struct Foo {} | ||
1275 | impl Foo { | ||
1276 | fn foo(&self) {} | ||
1277 | fn foo2(&self) {} | ||
1278 | } | ||
1279 | fn bar(_: &Foo) {} | ||
1280 | fn main() { | ||
1281 | let f = Foo {}; | ||
1282 | let fr = &f; | ||
1283 | let fr2 = &fr; | ||
1284 | let fr3 = &fr2; | ||
1285 | f.foo(); | ||
1286 | fr.foo(); | ||
1287 | fr2.foo(); | ||
1288 | fr3.foo(); | ||
1289 | } | ||
1290 | "#; | ||
1291 | assert_ssr_transform( | ||
1292 | "Foo::foo($a) ==>> bar($a)", | ||
1293 | code, | ||
1294 | expect![[r#" | ||
1295 | struct Foo {} | ||
1296 | impl Foo { | ||
1297 | fn foo(&self) {} | ||
1298 | fn foo2(&self) {} | ||
1299 | } | ||
1300 | fn bar(_: &Foo) {} | ||
1301 | fn main() { | ||
1302 | let f = Foo {}; | ||
1303 | let fr = &f; | ||
1304 | let fr2 = &fr; | ||
1305 | let fr3 = &fr2; | ||
1306 | bar(&f); | ||
1307 | bar(&*fr); | ||
1308 | bar(&**fr2); | ||
1309 | bar(&***fr3); | ||
1310 | } | ||
1311 | "#]], | ||
1312 | ); | ||
1313 | // If the placeholder is used as the receiver of another method call, then we don't need to | ||
1314 | // explicitly autoderef or autoref. | ||
1315 | assert_ssr_transform( | ||
1316 | "Foo::foo($a) ==>> $a.foo2()", | ||
1317 | code, | ||
1318 | expect![[r#" | ||
1319 | struct Foo {} | ||
1320 | impl Foo { | ||
1321 | fn foo(&self) {} | ||
1322 | fn foo2(&self) {} | ||
1323 | } | ||
1324 | fn bar(_: &Foo) {} | ||
1325 | fn main() { | ||
1326 | let f = Foo {}; | ||
1327 | let fr = &f; | ||
1328 | let fr2 = &fr; | ||
1329 | let fr3 = &fr2; | ||
1330 | f.foo2(); | ||
1331 | fr.foo2(); | ||
1332 | fr2.foo2(); | ||
1333 | fr3.foo2(); | ||
1334 | } | ||
1335 | "#]], | ||
1336 | ); | ||
1337 | } | ||
1338 | |||
1339 | #[test] | ||
1340 | fn replace_autoref_mut() { | ||
1341 | let code = r#" | ||
1342 | struct Foo {} | ||
1343 | impl Foo { | ||
1344 | fn foo(&mut self) {} | ||
1345 | } | ||
1346 | fn bar(_: &mut Foo) {} | ||
1347 | fn main() { | ||
1348 | let mut f = Foo {}; | ||
1349 | f.foo(); | ||
1350 | let fr = &mut f; | ||
1351 | fr.foo(); | ||
1352 | } | ||
1353 | "#; | ||
1354 | assert_ssr_transform( | ||
1355 | "Foo::foo($a) ==>> bar($a)", | ||
1356 | code, | ||
1357 | expect![[r#" | ||
1358 | struct Foo {} | ||
1359 | impl Foo { | ||
1360 | fn foo(&mut self) {} | ||
1361 | } | ||
1362 | fn bar(_: &mut Foo) {} | ||
1363 | fn main() { | ||
1364 | let mut f = Foo {}; | ||
1365 | bar(&mut f); | ||
1366 | let fr = &mut f; | ||
1367 | bar(&mut *fr); | ||
1368 | } | ||
1369 | "#]], | ||
1370 | ); | ||
1371 | } | ||