diff options
Diffstat (limited to 'crates/ra_ssr/src')
-rw-r--r-- | crates/ra_ssr/src/errors.rs | 29 | ||||
-rw-r--r-- | crates/ra_ssr/src/lib.rs | 285 | ||||
-rw-r--r-- | crates/ra_ssr/src/matching.rs | 623 | ||||
-rw-r--r-- | crates/ra_ssr/src/parsing.rs | 343 | ||||
-rw-r--r-- | crates/ra_ssr/src/replacing.rs | 66 | ||||
-rw-r--r-- | crates/ra_ssr/src/tests.rs | 582 |
6 files changed, 1928 insertions, 0 deletions
diff --git a/crates/ra_ssr/src/errors.rs b/crates/ra_ssr/src/errors.rs new file mode 100644 index 000000000..c02bacae6 --- /dev/null +++ b/crates/ra_ssr/src/errors.rs | |||
@@ -0,0 +1,29 @@ | |||
1 | //! Code relating to errors produced by SSR. | ||
2 | |||
3 | /// Constructs an SsrError taking arguments like the format macro. | ||
4 | macro_rules! _error { | ||
5 | ($fmt:expr) => {$crate::SsrError::new(format!($fmt))}; | ||
6 | ($fmt:expr, $($arg:tt)+) => {$crate::SsrError::new(format!($fmt, $($arg)+))} | ||
7 | } | ||
8 | pub(crate) use _error as error; | ||
9 | |||
10 | /// Returns from the current function with an error, supplied by arguments as for format! | ||
11 | macro_rules! _bail { | ||
12 | ($($tokens:tt)*) => {return Err(crate::errors::error!($($tokens)*))} | ||
13 | } | ||
14 | pub(crate) use _bail as bail; | ||
15 | |||
16 | #[derive(Debug, PartialEq)] | ||
17 | pub struct SsrError(pub(crate) String); | ||
18 | |||
19 | impl std::fmt::Display for SsrError { | ||
20 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { | ||
21 | write!(f, "Parse error: {}", self.0) | ||
22 | } | ||
23 | } | ||
24 | |||
25 | impl SsrError { | ||
26 | pub(crate) fn new(message: impl Into<String>) -> SsrError { | ||
27 | SsrError(message.into()) | ||
28 | } | ||
29 | } | ||
diff --git a/crates/ra_ssr/src/lib.rs b/crates/ra_ssr/src/lib.rs new file mode 100644 index 000000000..cca4576ce --- /dev/null +++ b/crates/ra_ssr/src/lib.rs | |||
@@ -0,0 +1,285 @@ | |||
1 | //! Structural Search Replace | ||
2 | //! | ||
3 | //! Allows searching the AST for code that matches one or more patterns and then replacing that code | ||
4 | //! based on a template. | ||
5 | |||
6 | mod matching; | ||
7 | mod parsing; | ||
8 | mod replacing; | ||
9 | #[macro_use] | ||
10 | mod errors; | ||
11 | #[cfg(test)] | ||
12 | mod tests; | ||
13 | |||
14 | pub use crate::errors::SsrError; | ||
15 | pub use crate::matching::Match; | ||
16 | use crate::matching::{record_match_fails_reasons_scope, MatchFailureReason}; | ||
17 | use hir::Semantics; | ||
18 | use ra_db::{FileId, FileRange}; | ||
19 | use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind, SyntaxNode, TextRange}; | ||
20 | use ra_text_edit::TextEdit; | ||
21 | use rustc_hash::FxHashMap; | ||
22 | |||
23 | // A structured search replace rule. Create by calling `parse` on a str. | ||
24 | #[derive(Debug)] | ||
25 | pub struct SsrRule { | ||
26 | /// A structured pattern that we're searching for. | ||
27 | pattern: SsrPattern, | ||
28 | /// What we'll replace it with. | ||
29 | template: parsing::SsrTemplate, | ||
30 | } | ||
31 | |||
32 | #[derive(Debug)] | ||
33 | pub struct SsrPattern { | ||
34 | raw: parsing::RawSearchPattern, | ||
35 | /// Placeholders keyed by the stand-in ident that we use in Rust source code. | ||
36 | placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>, | ||
37 | // We store our search pattern, parsed as each different kind of thing we can look for. As we | ||
38 | // traverse the AST, we get the appropriate one of these for the type of node we're on. For many | ||
39 | // search patterns, only some of these will be present. | ||
40 | expr: Option<SyntaxNode>, | ||
41 | type_ref: Option<SyntaxNode>, | ||
42 | item: Option<SyntaxNode>, | ||
43 | path: Option<SyntaxNode>, | ||
44 | pattern: Option<SyntaxNode>, | ||
45 | } | ||
46 | |||
47 | #[derive(Debug, Default)] | ||
48 | pub struct SsrMatches { | ||
49 | pub matches: Vec<Match>, | ||
50 | } | ||
51 | |||
52 | /// Searches a crate for pattern matches and possibly replaces them with something else. | ||
53 | pub struct MatchFinder<'db> { | ||
54 | /// Our source of information about the user's code. | ||
55 | sema: Semantics<'db, ra_ide_db::RootDatabase>, | ||
56 | rules: Vec<SsrRule>, | ||
57 | } | ||
58 | |||
59 | impl<'db> MatchFinder<'db> { | ||
60 | pub fn new(db: &'db ra_ide_db::RootDatabase) -> MatchFinder<'db> { | ||
61 | MatchFinder { sema: Semantics::new(db), rules: Vec::new() } | ||
62 | } | ||
63 | |||
64 | pub fn add_rule(&mut self, rule: SsrRule) { | ||
65 | self.rules.push(rule); | ||
66 | } | ||
67 | |||
68 | /// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you | ||
69 | /// intend to do replacement, use `add_rule` instead. | ||
70 | pub fn add_search_pattern(&mut self, pattern: SsrPattern) { | ||
71 | self.add_rule(SsrRule { pattern, template: "()".parse().unwrap() }) | ||
72 | } | ||
73 | |||
74 | pub fn edits_for_file(&self, file_id: FileId) -> Option<TextEdit> { | ||
75 | let matches = self.find_matches_in_file(file_id); | ||
76 | if matches.matches.is_empty() { | ||
77 | None | ||
78 | } else { | ||
79 | use ra_db::SourceDatabaseExt; | ||
80 | Some(replacing::matches_to_edit(&matches, &self.sema.db.file_text(file_id))) | ||
81 | } | ||
82 | } | ||
83 | |||
84 | pub fn find_matches_in_file(&self, file_id: FileId) -> SsrMatches { | ||
85 | let file = self.sema.parse(file_id); | ||
86 | let code = file.syntax(); | ||
87 | let mut matches = SsrMatches::default(); | ||
88 | self.find_matches(code, &None, &mut matches); | ||
89 | matches | ||
90 | } | ||
91 | |||
92 | /// Finds all nodes in `file_id` whose text is exactly equal to `snippet` and attempts to match | ||
93 | /// them, while recording reasons why they don't match. This API is useful for command | ||
94 | /// line-based debugging where providing a range is difficult. | ||
95 | pub fn debug_where_text_equal(&self, file_id: FileId, snippet: &str) -> Vec<MatchDebugInfo> { | ||
96 | use ra_db::SourceDatabaseExt; | ||
97 | let file = self.sema.parse(file_id); | ||
98 | let mut res = Vec::new(); | ||
99 | let file_text = self.sema.db.file_text(file_id); | ||
100 | let mut remaining_text = file_text.as_str(); | ||
101 | let mut base = 0; | ||
102 | let len = snippet.len() as u32; | ||
103 | while let Some(offset) = remaining_text.find(snippet) { | ||
104 | let start = base + offset as u32; | ||
105 | let end = start + len; | ||
106 | self.output_debug_for_nodes_at_range( | ||
107 | file.syntax(), | ||
108 | FileRange { file_id, range: TextRange::new(start.into(), end.into()) }, | ||
109 | &None, | ||
110 | &mut res, | ||
111 | ); | ||
112 | remaining_text = &remaining_text[offset + snippet.len()..]; | ||
113 | base = end; | ||
114 | } | ||
115 | res | ||
116 | } | ||
117 | |||
118 | fn find_matches( | ||
119 | &self, | ||
120 | code: &SyntaxNode, | ||
121 | restrict_range: &Option<FileRange>, | ||
122 | matches_out: &mut SsrMatches, | ||
123 | ) { | ||
124 | for rule in &self.rules { | ||
125 | if let Ok(mut m) = matching::get_match(false, rule, &code, restrict_range, &self.sema) { | ||
126 | // Continue searching in each of our placeholders. | ||
127 | for placeholder_value in m.placeholder_values.values_mut() { | ||
128 | if let Some(placeholder_node) = &placeholder_value.node { | ||
129 | // Don't search our placeholder if it's the entire matched node, otherwise we'd | ||
130 | // find the same match over and over until we got a stack overflow. | ||
131 | if placeholder_node != code { | ||
132 | self.find_matches( | ||
133 | placeholder_node, | ||
134 | restrict_range, | ||
135 | &mut placeholder_value.inner_matches, | ||
136 | ); | ||
137 | } | ||
138 | } | ||
139 | } | ||
140 | matches_out.matches.push(m); | ||
141 | return; | ||
142 | } | ||
143 | } | ||
144 | // If we've got a macro call, we already tried matching it pre-expansion, which is the only | ||
145 | // way to match the whole macro, now try expanding it and matching the expansion. | ||
146 | if let Some(macro_call) = ast::MacroCall::cast(code.clone()) { | ||
147 | if let Some(expanded) = self.sema.expand(¯o_call) { | ||
148 | if let Some(tt) = macro_call.token_tree() { | ||
149 | // When matching within a macro expansion, we only want to allow matches of | ||
150 | // nodes that originated entirely from within the token tree of the macro call. | ||
151 | // i.e. we don't want to match something that came from the macro itself. | ||
152 | self.find_matches( | ||
153 | &expanded, | ||
154 | &Some(self.sema.original_range(tt.syntax())), | ||
155 | matches_out, | ||
156 | ); | ||
157 | } | ||
158 | } | ||
159 | } | ||
160 | for child in code.children() { | ||
161 | self.find_matches(&child, restrict_range, matches_out); | ||
162 | } | ||
163 | } | ||
164 | |||
165 | fn output_debug_for_nodes_at_range( | ||
166 | &self, | ||
167 | node: &SyntaxNode, | ||
168 | range: FileRange, | ||
169 | restrict_range: &Option<FileRange>, | ||
170 | out: &mut Vec<MatchDebugInfo>, | ||
171 | ) { | ||
172 | for node in node.children() { | ||
173 | let node_range = self.sema.original_range(&node); | ||
174 | if node_range.file_id != range.file_id || !node_range.range.contains_range(range.range) | ||
175 | { | ||
176 | continue; | ||
177 | } | ||
178 | if node_range.range == range.range { | ||
179 | for rule in &self.rules { | ||
180 | let pattern = | ||
181 | rule.pattern.tree_for_kind_with_reason(node.kind()).map(|p| p.clone()); | ||
182 | out.push(MatchDebugInfo { | ||
183 | matched: matching::get_match(true, rule, &node, restrict_range, &self.sema) | ||
184 | .map_err(|e| MatchFailureReason { | ||
185 | reason: e.reason.unwrap_or_else(|| { | ||
186 | "Match failed, but no reason was given".to_owned() | ||
187 | }), | ||
188 | }), | ||
189 | pattern, | ||
190 | node: node.clone(), | ||
191 | }); | ||
192 | } | ||
193 | } else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) { | ||
194 | if let Some(expanded) = self.sema.expand(¯o_call) { | ||
195 | if let Some(tt) = macro_call.token_tree() { | ||
196 | self.output_debug_for_nodes_at_range( | ||
197 | &expanded, | ||
198 | range, | ||
199 | &Some(self.sema.original_range(tt.syntax())), | ||
200 | out, | ||
201 | ); | ||
202 | } | ||
203 | } | ||
204 | } | ||
205 | self.output_debug_for_nodes_at_range(&node, range, restrict_range, out); | ||
206 | } | ||
207 | } | ||
208 | } | ||
209 | |||
210 | pub struct MatchDebugInfo { | ||
211 | node: SyntaxNode, | ||
212 | /// Our search pattern parsed as the same kind of syntax node as `node`. e.g. expression, item, | ||
213 | /// etc. Will be absent if the pattern can't be parsed as that kind. | ||
214 | pattern: Result<SyntaxNode, MatchFailureReason>, | ||
215 | matched: Result<Match, MatchFailureReason>, | ||
216 | } | ||
217 | |||
218 | impl std::fmt::Debug for MatchDebugInfo { | ||
219 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { | ||
220 | match &self.matched { | ||
221 | Ok(_) => writeln!(f, "Node matched")?, | ||
222 | Err(reason) => writeln!(f, "Node failed to match because: {}", reason.reason)?, | ||
223 | } | ||
224 | writeln!( | ||
225 | f, | ||
226 | "============ AST ===========\n\ | ||
227 | {:#?}", | ||
228 | self.node | ||
229 | )?; | ||
230 | writeln!(f, "========= PATTERN ==========")?; | ||
231 | match &self.pattern { | ||
232 | Ok(pattern) => { | ||
233 | writeln!(f, "{:#?}", pattern)?; | ||
234 | } | ||
235 | Err(err) => { | ||
236 | writeln!(f, "{}", err.reason)?; | ||
237 | } | ||
238 | } | ||
239 | writeln!(f, "============================")?; | ||
240 | Ok(()) | ||
241 | } | ||
242 | } | ||
243 | |||
244 | impl SsrPattern { | ||
245 | fn tree_for_kind_with_reason( | ||
246 | &self, | ||
247 | kind: SyntaxKind, | ||
248 | ) -> Result<&SyntaxNode, MatchFailureReason> { | ||
249 | record_match_fails_reasons_scope(true, || self.tree_for_kind(kind)) | ||
250 | .map_err(|e| MatchFailureReason { reason: e.reason.unwrap() }) | ||
251 | } | ||
252 | } | ||
253 | |||
254 | impl SsrMatches { | ||
255 | /// Returns `self` with any nested matches removed and made into top-level matches. | ||
256 | pub fn flattened(self) -> SsrMatches { | ||
257 | let mut out = SsrMatches::default(); | ||
258 | self.flatten_into(&mut out); | ||
259 | out | ||
260 | } | ||
261 | |||
262 | fn flatten_into(self, out: &mut SsrMatches) { | ||
263 | for mut m in self.matches { | ||
264 | for p in m.placeholder_values.values_mut() { | ||
265 | std::mem::replace(&mut p.inner_matches, SsrMatches::default()).flatten_into(out); | ||
266 | } | ||
267 | out.matches.push(m); | ||
268 | } | ||
269 | } | ||
270 | } | ||
271 | |||
272 | impl Match { | ||
273 | pub fn matched_text(&self) -> String { | ||
274 | self.matched_node.text().to_string() | ||
275 | } | ||
276 | } | ||
277 | |||
278 | impl std::error::Error for SsrError {} | ||
279 | |||
280 | #[cfg(test)] | ||
281 | impl MatchDebugInfo { | ||
282 | pub(crate) fn match_failure_reason(&self) -> Option<&str> { | ||
283 | self.matched.as_ref().err().map(|r| r.reason.as_str()) | ||
284 | } | ||
285 | } | ||
diff --git a/crates/ra_ssr/src/matching.rs b/crates/ra_ssr/src/matching.rs new file mode 100644 index 000000000..50b29eab2 --- /dev/null +++ b/crates/ra_ssr/src/matching.rs | |||
@@ -0,0 +1,623 @@ | |||
1 | //! This module is responsible for matching a search pattern against a node in the AST. In the | ||
2 | //! process of matching, placeholder values are recorded. | ||
3 | |||
4 | use crate::{ | ||
5 | parsing::{Constraint, NodeKind, Placeholder, SsrTemplate}, | ||
6 | SsrMatches, SsrPattern, SsrRule, | ||
7 | }; | ||
8 | use hir::Semantics; | ||
9 | use ra_db::FileRange; | ||
10 | use ra_syntax::ast::{AstNode, AstToken}; | ||
11 | use ra_syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken}; | ||
12 | use rustc_hash::FxHashMap; | ||
13 | use std::{cell::Cell, iter::Peekable}; | ||
14 | use test_utils::mark; | ||
15 | |||
16 | // Creates a match error. If we're currently attempting to match some code that we thought we were | ||
17 | // going to match, as indicated by the --debug-snippet flag, then populate the reason field. | ||
18 | macro_rules! match_error { | ||
19 | ($e:expr) => {{ | ||
20 | MatchFailed { | ||
21 | reason: if recording_match_fail_reasons() { | ||
22 | Some(format!("{}", $e)) | ||
23 | } else { | ||
24 | None | ||
25 | } | ||
26 | } | ||
27 | }}; | ||
28 | ($fmt:expr, $($arg:tt)+) => {{ | ||
29 | MatchFailed { | ||
30 | reason: if recording_match_fail_reasons() { | ||
31 | Some(format!($fmt, $($arg)+)) | ||
32 | } else { | ||
33 | None | ||
34 | } | ||
35 | } | ||
36 | }}; | ||
37 | } | ||
38 | |||
39 | // Fails the current match attempt, recording the supplied reason if we're recording match fail reasons. | ||
40 | macro_rules! fail_match { | ||
41 | ($($args:tt)*) => {return Err(match_error!($($args)*))}; | ||
42 | } | ||
43 | |||
44 | /// Information about a match that was found. | ||
45 | #[derive(Debug)] | ||
46 | pub struct Match { | ||
47 | pub(crate) range: FileRange, | ||
48 | pub(crate) matched_node: SyntaxNode, | ||
49 | pub(crate) placeholder_values: FxHashMap<Var, PlaceholderMatch>, | ||
50 | pub(crate) ignored_comments: Vec<ast::Comment>, | ||
51 | // A copy of the template for the rule that produced this match. We store this on the match for | ||
52 | // if/when we do replacement. | ||
53 | pub(crate) template: SsrTemplate, | ||
54 | } | ||
55 | |||
56 | /// Represents a `$var` in an SSR query. | ||
57 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
58 | pub(crate) struct Var(pub String); | ||
59 | |||
60 | /// Information about a placeholder bound in a match. | ||
61 | #[derive(Debug)] | ||
62 | pub(crate) struct PlaceholderMatch { | ||
63 | /// The node that the placeholder matched to. If set, then we'll search for further matches | ||
64 | /// within this node. It isn't set when we match tokens within a macro call's token tree. | ||
65 | pub(crate) node: Option<SyntaxNode>, | ||
66 | pub(crate) range: FileRange, | ||
67 | /// More matches, found within `node`. | ||
68 | pub(crate) inner_matches: SsrMatches, | ||
69 | } | ||
70 | |||
71 | #[derive(Debug)] | ||
72 | pub(crate) struct MatchFailureReason { | ||
73 | pub(crate) reason: String, | ||
74 | } | ||
75 | |||
76 | /// An "error" indicating that matching failed. Use the fail_match! macro to create and return this. | ||
77 | #[derive(Clone)] | ||
78 | pub(crate) struct MatchFailed { | ||
79 | /// The reason why we failed to match. Only present when debug_active true in call to | ||
80 | /// `get_match`. | ||
81 | pub(crate) reason: Option<String>, | ||
82 | } | ||
83 | |||
84 | /// Checks if `code` matches the search pattern found in `search_scope`, returning information about | ||
85 | /// the match, if it does. Since we only do matching in this module and searching is done by the | ||
86 | /// parent module, we don't populate nested matches. | ||
87 | pub(crate) fn get_match( | ||
88 | debug_active: bool, | ||
89 | rule: &SsrRule, | ||
90 | code: &SyntaxNode, | ||
91 | restrict_range: &Option<FileRange>, | ||
92 | sema: &Semantics<ra_ide_db::RootDatabase>, | ||
93 | ) -> Result<Match, MatchFailed> { | ||
94 | record_match_fails_reasons_scope(debug_active, || { | ||
95 | Matcher::try_match(rule, code, restrict_range, sema) | ||
96 | }) | ||
97 | } | ||
98 | |||
99 | /// Checks if our search pattern matches a particular node of the AST. | ||
100 | struct Matcher<'db, 'sema> { | ||
101 | sema: &'sema Semantics<'db, ra_ide_db::RootDatabase>, | ||
102 | /// If any placeholders come from anywhere outside of this range, then the match will be | ||
103 | /// rejected. | ||
104 | restrict_range: Option<FileRange>, | ||
105 | rule: &'sema SsrRule, | ||
106 | } | ||
107 | |||
108 | /// Which phase of matching we're currently performing. We do two phases because most attempted | ||
109 | /// matches will fail and it means we can defer more expensive checks to the second phase. | ||
110 | enum Phase<'a> { | ||
111 | /// On the first phase, we perform cheap checks. No state is mutated and nothing is recorded. | ||
112 | First, | ||
113 | /// On the second phase, we construct the `Match`. Things like what placeholders bind to is | ||
114 | /// recorded. | ||
115 | Second(&'a mut Match), | ||
116 | } | ||
117 | |||
118 | impl<'db, 'sema> Matcher<'db, 'sema> { | ||
119 | fn try_match( | ||
120 | rule: &'sema SsrRule, | ||
121 | code: &SyntaxNode, | ||
122 | restrict_range: &Option<FileRange>, | ||
123 | sema: &'sema Semantics<'db, ra_ide_db::RootDatabase>, | ||
124 | ) -> Result<Match, MatchFailed> { | ||
125 | let match_state = Matcher { sema, restrict_range: restrict_range.clone(), rule }; | ||
126 | let pattern_tree = rule.pattern.tree_for_kind(code.kind())?; | ||
127 | // First pass at matching, where we check that node types and idents match. | ||
128 | match_state.attempt_match_node(&mut Phase::First, &pattern_tree, code)?; | ||
129 | match_state.validate_range(&sema.original_range(code))?; | ||
130 | let mut the_match = Match { | ||
131 | range: sema.original_range(code), | ||
132 | matched_node: code.clone(), | ||
133 | placeholder_values: FxHashMap::default(), | ||
134 | ignored_comments: Vec::new(), | ||
135 | template: rule.template.clone(), | ||
136 | }; | ||
137 | // Second matching pass, where we record placeholder matches, ignored comments and maybe do | ||
138 | // any other more expensive checks that we didn't want to do on the first pass. | ||
139 | match_state.attempt_match_node(&mut Phase::Second(&mut the_match), &pattern_tree, code)?; | ||
140 | Ok(the_match) | ||
141 | } | ||
142 | |||
143 | /// Checks that `range` is within the permitted range if any. This is applicable when we're | ||
144 | /// processing a macro expansion and we want to fail the match if we're working with a node that | ||
145 | /// didn't originate from the token tree of the macro call. | ||
146 | fn validate_range(&self, range: &FileRange) -> Result<(), MatchFailed> { | ||
147 | if let Some(restrict_range) = &self.restrict_range { | ||
148 | if restrict_range.file_id != range.file_id | ||
149 | || !restrict_range.range.contains_range(range.range) | ||
150 | { | ||
151 | fail_match!("Node originated from a macro"); | ||
152 | } | ||
153 | } | ||
154 | Ok(()) | ||
155 | } | ||
156 | |||
157 | fn attempt_match_node( | ||
158 | &self, | ||
159 | phase: &mut Phase, | ||
160 | pattern: &SyntaxNode, | ||
161 | code: &SyntaxNode, | ||
162 | ) -> Result<(), MatchFailed> { | ||
163 | // Handle placeholders. | ||
164 | if let Some(placeholder) = self.get_placeholder(&SyntaxElement::Node(pattern.clone())) { | ||
165 | for constraint in &placeholder.constraints { | ||
166 | self.check_constraint(constraint, code)?; | ||
167 | } | ||
168 | if let Phase::Second(matches_out) = phase { | ||
169 | let original_range = self.sema.original_range(code); | ||
170 | // We validated the range for the node when we started the match, so the placeholder | ||
171 | // probably can't fail range validation, but just to be safe... | ||
172 | self.validate_range(&original_range)?; | ||
173 | matches_out.placeholder_values.insert( | ||
174 | Var(placeholder.ident.to_string()), | ||
175 | PlaceholderMatch::new(code, original_range), | ||
176 | ); | ||
177 | } | ||
178 | return Ok(()); | ||
179 | } | ||
180 | // Non-placeholders. | ||
181 | if pattern.kind() != code.kind() { | ||
182 | fail_match!( | ||
183 | "Pattern had a `{}` ({:?}), code had `{}` ({:?})", | ||
184 | pattern.text(), | ||
185 | pattern.kind(), | ||
186 | code.text(), | ||
187 | code.kind() | ||
188 | ); | ||
189 | } | ||
190 | // Some kinds of nodes have special handling. For everything else, we fall back to default | ||
191 | // matching. | ||
192 | match code.kind() { | ||
193 | SyntaxKind::RECORD_FIELD_LIST => { | ||
194 | self.attempt_match_record_field_list(phase, pattern, code) | ||
195 | } | ||
196 | SyntaxKind::TOKEN_TREE => self.attempt_match_token_tree(phase, pattern, code), | ||
197 | _ => self.attempt_match_node_children(phase, pattern, code), | ||
198 | } | ||
199 | } | ||
200 | |||
201 | fn attempt_match_node_children( | ||
202 | &self, | ||
203 | phase: &mut Phase, | ||
204 | pattern: &SyntaxNode, | ||
205 | code: &SyntaxNode, | ||
206 | ) -> Result<(), MatchFailed> { | ||
207 | self.attempt_match_sequences( | ||
208 | phase, | ||
209 | PatternIterator::new(pattern), | ||
210 | code.children_with_tokens(), | ||
211 | ) | ||
212 | } | ||
213 | |||
214 | fn attempt_match_sequences( | ||
215 | &self, | ||
216 | phase: &mut Phase, | ||
217 | pattern_it: PatternIterator, | ||
218 | mut code_it: SyntaxElementChildren, | ||
219 | ) -> Result<(), MatchFailed> { | ||
220 | let mut pattern_it = pattern_it.peekable(); | ||
221 | loop { | ||
222 | match phase.next_non_trivial(&mut code_it) { | ||
223 | None => { | ||
224 | if let Some(p) = pattern_it.next() { | ||
225 | fail_match!("Part of the pattern was unmatched: {:?}", p); | ||
226 | } | ||
227 | return Ok(()); | ||
228 | } | ||
229 | Some(SyntaxElement::Token(c)) => { | ||
230 | self.attempt_match_token(phase, &mut pattern_it, &c)?; | ||
231 | } | ||
232 | Some(SyntaxElement::Node(c)) => match pattern_it.next() { | ||
233 | Some(SyntaxElement::Node(p)) => { | ||
234 | self.attempt_match_node(phase, &p, &c)?; | ||
235 | } | ||
236 | Some(p) => fail_match!("Pattern wanted '{}', code has {}", p, c.text()), | ||
237 | None => fail_match!("Pattern reached end, code has {}", c.text()), | ||
238 | }, | ||
239 | } | ||
240 | } | ||
241 | } | ||
242 | |||
243 | fn attempt_match_token( | ||
244 | &self, | ||
245 | phase: &mut Phase, | ||
246 | pattern: &mut Peekable<PatternIterator>, | ||
247 | code: &ra_syntax::SyntaxToken, | ||
248 | ) -> Result<(), MatchFailed> { | ||
249 | phase.record_ignored_comments(code); | ||
250 | // Ignore whitespace and comments. | ||
251 | if code.kind().is_trivia() { | ||
252 | return Ok(()); | ||
253 | } | ||
254 | if let Some(SyntaxElement::Token(p)) = pattern.peek() { | ||
255 | // If the code has a comma and the pattern is about to close something, then accept the | ||
256 | // comma without advancing the pattern. i.e. ignore trailing commas. | ||
257 | if code.kind() == SyntaxKind::COMMA && is_closing_token(p.kind()) { | ||
258 | return Ok(()); | ||
259 | } | ||
260 | // Conversely, if the pattern has a comma and the code doesn't, skip that part of the | ||
261 | // pattern and continue to match the code. | ||
262 | if p.kind() == SyntaxKind::COMMA && is_closing_token(code.kind()) { | ||
263 | pattern.next(); | ||
264 | } | ||
265 | } | ||
266 | // Consume an element from the pattern and make sure it matches. | ||
267 | match pattern.next() { | ||
268 | Some(SyntaxElement::Token(p)) => { | ||
269 | if p.kind() != code.kind() || p.text() != code.text() { | ||
270 | fail_match!( | ||
271 | "Pattern wanted token '{}' ({:?}), but code had token '{}' ({:?})", | ||
272 | p.text(), | ||
273 | p.kind(), | ||
274 | code.text(), | ||
275 | code.kind() | ||
276 | ) | ||
277 | } | ||
278 | } | ||
279 | Some(SyntaxElement::Node(p)) => { | ||
280 | // Not sure if this is actually reachable. | ||
281 | fail_match!( | ||
282 | "Pattern wanted {:?}, but code had token '{}' ({:?})", | ||
283 | p, | ||
284 | code.text(), | ||
285 | code.kind() | ||
286 | ); | ||
287 | } | ||
288 | None => { | ||
289 | fail_match!("Pattern exhausted, while code remains: `{}`", code.text()); | ||
290 | } | ||
291 | } | ||
292 | Ok(()) | ||
293 | } | ||
294 | |||
295 | fn check_constraint( | ||
296 | &self, | ||
297 | constraint: &Constraint, | ||
298 | code: &SyntaxNode, | ||
299 | ) -> Result<(), MatchFailed> { | ||
300 | match constraint { | ||
301 | Constraint::Kind(kind) => { | ||
302 | kind.matches(code)?; | ||
303 | } | ||
304 | Constraint::Not(sub) => { | ||
305 | if self.check_constraint(&*sub, code).is_ok() { | ||
306 | fail_match!("Constraint {:?} failed for '{}'", constraint, code.text()); | ||
307 | } | ||
308 | } | ||
309 | } | ||
310 | Ok(()) | ||
311 | } | ||
312 | |||
313 | /// We want to allow the records to match in any order, so we have special matching logic for | ||
314 | /// them. | ||
315 | fn attempt_match_record_field_list( | ||
316 | &self, | ||
317 | phase: &mut Phase, | ||
318 | pattern: &SyntaxNode, | ||
319 | code: &SyntaxNode, | ||
320 | ) -> Result<(), MatchFailed> { | ||
321 | // Build a map keyed by field name. | ||
322 | let mut fields_by_name = FxHashMap::default(); | ||
323 | for child in code.children() { | ||
324 | if let Some(record) = ast::RecordField::cast(child.clone()) { | ||
325 | if let Some(name) = record.field_name() { | ||
326 | fields_by_name.insert(name.text().clone(), child.clone()); | ||
327 | } | ||
328 | } | ||
329 | } | ||
330 | for p in pattern.children_with_tokens() { | ||
331 | if let SyntaxElement::Node(p) = p { | ||
332 | if let Some(name_element) = p.first_child_or_token() { | ||
333 | if self.get_placeholder(&name_element).is_some() { | ||
334 | // If the pattern is using placeholders for field names then order | ||
335 | // independence doesn't make sense. Fall back to regular ordered | ||
336 | // matching. | ||
337 | return self.attempt_match_node_children(phase, pattern, code); | ||
338 | } | ||
339 | if let Some(ident) = only_ident(name_element) { | ||
340 | let code_record = fields_by_name.remove(ident.text()).ok_or_else(|| { | ||
341 | match_error!( | ||
342 | "Placeholder has record field '{}', but code doesn't", | ||
343 | ident | ||
344 | ) | ||
345 | })?; | ||
346 | self.attempt_match_node(phase, &p, &code_record)?; | ||
347 | } | ||
348 | } | ||
349 | } | ||
350 | } | ||
351 | if let Some(unmatched_fields) = fields_by_name.keys().next() { | ||
352 | fail_match!( | ||
353 | "{} field(s) of a record literal failed to match, starting with {}", | ||
354 | fields_by_name.len(), | ||
355 | unmatched_fields | ||
356 | ); | ||
357 | } | ||
358 | Ok(()) | ||
359 | } | ||
360 | |||
361 | /// Outside of token trees, a placeholder can only match a single AST node, whereas in a token | ||
362 | /// tree it can match a sequence of tokens. Note, that this code will only be used when the | ||
363 | /// pattern matches the macro invocation. For matches within the macro call, we'll already have | ||
364 | /// expanded the macro. | ||
365 | fn attempt_match_token_tree( | ||
366 | &self, | ||
367 | phase: &mut Phase, | ||
368 | pattern: &SyntaxNode, | ||
369 | code: &ra_syntax::SyntaxNode, | ||
370 | ) -> Result<(), MatchFailed> { | ||
371 | let mut pattern = PatternIterator::new(pattern).peekable(); | ||
372 | let mut children = code.children_with_tokens(); | ||
373 | while let Some(child) = children.next() { | ||
374 | if let Some(placeholder) = pattern.peek().and_then(|p| self.get_placeholder(p)) { | ||
375 | pattern.next(); | ||
376 | let next_pattern_token = pattern | ||
377 | .peek() | ||
378 | .and_then(|p| match p { | ||
379 | SyntaxElement::Token(t) => Some(t.clone()), | ||
380 | SyntaxElement::Node(n) => n.first_token(), | ||
381 | }) | ||
382 | .map(|p| p.text().to_string()); | ||
383 | let first_matched_token = child.clone(); | ||
384 | let mut last_matched_token = child; | ||
385 | // Read code tokens util we reach one equal to the next token from our pattern | ||
386 | // or we reach the end of the token tree. | ||
387 | while let Some(next) = children.next() { | ||
388 | match &next { | ||
389 | SyntaxElement::Token(t) => { | ||
390 | if Some(t.to_string()) == next_pattern_token { | ||
391 | pattern.next(); | ||
392 | break; | ||
393 | } | ||
394 | } | ||
395 | SyntaxElement::Node(n) => { | ||
396 | if let Some(first_token) = n.first_token() { | ||
397 | if Some(first_token.to_string()) == next_pattern_token { | ||
398 | if let Some(SyntaxElement::Node(p)) = pattern.next() { | ||
399 | // We have a subtree that starts with the next token in our pattern. | ||
400 | self.attempt_match_token_tree(phase, &p, &n)?; | ||
401 | break; | ||
402 | } | ||
403 | } | ||
404 | } | ||
405 | } | ||
406 | }; | ||
407 | last_matched_token = next; | ||
408 | } | ||
409 | if let Phase::Second(match_out) = phase { | ||
410 | match_out.placeholder_values.insert( | ||
411 | Var(placeholder.ident.to_string()), | ||
412 | PlaceholderMatch::from_range(FileRange { | ||
413 | file_id: self.sema.original_range(code).file_id, | ||
414 | range: first_matched_token | ||
415 | .text_range() | ||
416 | .cover(last_matched_token.text_range()), | ||
417 | }), | ||
418 | ); | ||
419 | } | ||
420 | continue; | ||
421 | } | ||
422 | // Match literal (non-placeholder) tokens. | ||
423 | match child { | ||
424 | SyntaxElement::Token(token) => { | ||
425 | self.attempt_match_token(phase, &mut pattern, &token)?; | ||
426 | } | ||
427 | SyntaxElement::Node(node) => match pattern.next() { | ||
428 | Some(SyntaxElement::Node(p)) => { | ||
429 | self.attempt_match_token_tree(phase, &p, &node)?; | ||
430 | } | ||
431 | Some(SyntaxElement::Token(p)) => fail_match!( | ||
432 | "Pattern has token '{}', code has subtree '{}'", | ||
433 | p.text(), | ||
434 | node.text() | ||
435 | ), | ||
436 | None => fail_match!("Pattern has nothing, code has '{}'", node.text()), | ||
437 | }, | ||
438 | } | ||
439 | } | ||
440 | if let Some(p) = pattern.next() { | ||
441 | fail_match!("Reached end of token tree in code, but pattern still has {:?}", p); | ||
442 | } | ||
443 | Ok(()) | ||
444 | } | ||
445 | |||
446 | fn get_placeholder(&self, element: &SyntaxElement) -> Option<&Placeholder> { | ||
447 | only_ident(element.clone()) | ||
448 | .and_then(|ident| self.rule.pattern.placeholders_by_stand_in.get(ident.text())) | ||
449 | } | ||
450 | } | ||
451 | |||
452 | impl Phase<'_> { | ||
453 | fn next_non_trivial(&mut self, code_it: &mut SyntaxElementChildren) -> Option<SyntaxElement> { | ||
454 | loop { | ||
455 | let c = code_it.next(); | ||
456 | if let Some(SyntaxElement::Token(t)) = &c { | ||
457 | self.record_ignored_comments(t); | ||
458 | if t.kind().is_trivia() { | ||
459 | continue; | ||
460 | } | ||
461 | } | ||
462 | return c; | ||
463 | } | ||
464 | } | ||
465 | |||
466 | fn record_ignored_comments(&mut self, token: &SyntaxToken) { | ||
467 | if token.kind() == SyntaxKind::COMMENT { | ||
468 | if let Phase::Second(match_out) = self { | ||
469 | if let Some(comment) = ast::Comment::cast(token.clone()) { | ||
470 | match_out.ignored_comments.push(comment); | ||
471 | } | ||
472 | } | ||
473 | } | ||
474 | } | ||
475 | } | ||
476 | |||
477 | fn is_closing_token(kind: SyntaxKind) -> bool { | ||
478 | kind == SyntaxKind::R_PAREN || kind == SyntaxKind::R_CURLY || kind == SyntaxKind::R_BRACK | ||
479 | } | ||
480 | |||
481 | pub(crate) fn record_match_fails_reasons_scope<F, T>(debug_active: bool, f: F) -> T | ||
482 | where | ||
483 | F: Fn() -> T, | ||
484 | { | ||
485 | RECORDING_MATCH_FAIL_REASONS.with(|c| c.set(debug_active)); | ||
486 | let res = f(); | ||
487 | RECORDING_MATCH_FAIL_REASONS.with(|c| c.set(false)); | ||
488 | res | ||
489 | } | ||
490 | |||
491 | // For performance reasons, we don't want to record the reason why every match fails, only the bit | ||
492 | // of code that the user indicated they thought would match. We use a thread local to indicate when | ||
493 | // we are trying to match that bit of code. This saves us having to pass a boolean into all the bits | ||
494 | // of code that can make the decision to not match. | ||
495 | thread_local! { | ||
496 | pub static RECORDING_MATCH_FAIL_REASONS: Cell<bool> = Cell::new(false); | ||
497 | } | ||
498 | |||
499 | fn recording_match_fail_reasons() -> bool { | ||
500 | RECORDING_MATCH_FAIL_REASONS.with(|c| c.get()) | ||
501 | } | ||
502 | |||
503 | impl PlaceholderMatch { | ||
504 | fn new(node: &SyntaxNode, range: FileRange) -> Self { | ||
505 | Self { node: Some(node.clone()), range, inner_matches: SsrMatches::default() } | ||
506 | } | ||
507 | |||
508 | fn from_range(range: FileRange) -> Self { | ||
509 | Self { node: None, range, inner_matches: SsrMatches::default() } | ||
510 | } | ||
511 | } | ||
512 | |||
513 | impl SsrPattern { | ||
514 | pub(crate) fn tree_for_kind(&self, kind: SyntaxKind) -> Result<&SyntaxNode, MatchFailed> { | ||
515 | let (tree, kind_name) = if ast::Expr::can_cast(kind) { | ||
516 | (&self.expr, "expression") | ||
517 | } else if ast::TypeRef::can_cast(kind) { | ||
518 | (&self.type_ref, "type reference") | ||
519 | } else if ast::ModuleItem::can_cast(kind) { | ||
520 | (&self.item, "item") | ||
521 | } else if ast::Path::can_cast(kind) { | ||
522 | (&self.path, "path") | ||
523 | } else if ast::Pat::can_cast(kind) { | ||
524 | (&self.pattern, "pattern") | ||
525 | } else { | ||
526 | fail_match!("Matching nodes of kind {:?} is not supported", kind); | ||
527 | }; | ||
528 | match tree { | ||
529 | Some(tree) => Ok(tree), | ||
530 | None => fail_match!("Pattern cannot be parsed as a {}", kind_name), | ||
531 | } | ||
532 | } | ||
533 | } | ||
534 | |||
535 | impl NodeKind { | ||
536 | fn matches(&self, node: &SyntaxNode) -> Result<(), MatchFailed> { | ||
537 | let ok = match self { | ||
538 | Self::Literal => { | ||
539 | mark::hit!(literal_constraint); | ||
540 | ast::Literal::can_cast(node.kind()) | ||
541 | } | ||
542 | }; | ||
543 | if !ok { | ||
544 | fail_match!("Code '{}' isn't of kind {:?}", node.text(), self); | ||
545 | } | ||
546 | Ok(()) | ||
547 | } | ||
548 | } | ||
549 | |||
550 | // If `node` contains nothing but an ident then return it, otherwise return None. | ||
551 | fn only_ident(element: SyntaxElement) -> Option<SyntaxToken> { | ||
552 | match element { | ||
553 | SyntaxElement::Token(t) => { | ||
554 | if t.kind() == SyntaxKind::IDENT { | ||
555 | return Some(t); | ||
556 | } | ||
557 | } | ||
558 | SyntaxElement::Node(n) => { | ||
559 | let mut children = n.children_with_tokens(); | ||
560 | if let (Some(only_child), None) = (children.next(), children.next()) { | ||
561 | return only_ident(only_child); | ||
562 | } | ||
563 | } | ||
564 | } | ||
565 | None | ||
566 | } | ||
567 | |||
568 | struct PatternIterator { | ||
569 | iter: SyntaxElementChildren, | ||
570 | } | ||
571 | |||
572 | impl Iterator for PatternIterator { | ||
573 | type Item = SyntaxElement; | ||
574 | |||
575 | fn next(&mut self) -> Option<SyntaxElement> { | ||
576 | while let Some(element) = self.iter.next() { | ||
577 | if !element.kind().is_trivia() { | ||
578 | return Some(element); | ||
579 | } | ||
580 | } | ||
581 | None | ||
582 | } | ||
583 | } | ||
584 | |||
585 | impl PatternIterator { | ||
586 | fn new(parent: &SyntaxNode) -> Self { | ||
587 | Self { iter: parent.children_with_tokens() } | ||
588 | } | ||
589 | } | ||
590 | |||
591 | #[cfg(test)] | ||
592 | mod tests { | ||
593 | use super::*; | ||
594 | use crate::{MatchFinder, SsrRule}; | ||
595 | |||
596 | #[test] | ||
597 | fn parse_match_replace() { | ||
598 | let rule: SsrRule = "foo($x) ==>> bar($x)".parse().unwrap(); | ||
599 | let input = "fn foo() {} fn main() { foo(1+2); }"; | ||
600 | |||
601 | use ra_db::fixture::WithFixture; | ||
602 | let (db, file_id) = ra_ide_db::RootDatabase::with_single_file(input); | ||
603 | let mut match_finder = MatchFinder::new(&db); | ||
604 | match_finder.add_rule(rule); | ||
605 | let matches = match_finder.find_matches_in_file(file_id); | ||
606 | assert_eq!(matches.matches.len(), 1); | ||
607 | assert_eq!(matches.matches[0].matched_node.text(), "foo(1+2)"); | ||
608 | assert_eq!(matches.matches[0].placeholder_values.len(), 1); | ||
609 | assert_eq!( | ||
610 | matches.matches[0].placeholder_values[&Var("x".to_string())] | ||
611 | .node | ||
612 | .as_ref() | ||
613 | .unwrap() | ||
614 | .text(), | ||
615 | "1+2" | ||
616 | ); | ||
617 | |||
618 | let edit = crate::replacing::matches_to_edit(&matches, input); | ||
619 | let mut after = input.to_string(); | ||
620 | edit.apply(&mut after); | ||
621 | assert_eq!(after, "fn foo() {} fn main() { bar(1+2); }"); | ||
622 | } | ||
623 | } | ||
diff --git a/crates/ra_ssr/src/parsing.rs b/crates/ra_ssr/src/parsing.rs new file mode 100644 index 000000000..4aee97bb2 --- /dev/null +++ b/crates/ra_ssr/src/parsing.rs | |||
@@ -0,0 +1,343 @@ | |||
1 | //! This file contains code for parsing SSR rules, which look something like `foo($a) ==>> bar($b)`. | ||
2 | //! We first split everything before and after the separator `==>>`. Next, both the search pattern | ||
3 | //! and the replacement template get tokenized by the Rust tokenizer. Tokens are then searched for | ||
4 | //! placeholders, which start with `$`. For replacement templates, this is the final form. For | ||
5 | //! search patterns, we go further and parse the pattern as each kind of thing that we can match. | ||
6 | //! e.g. expressions, type references etc. | ||
7 | |||
8 | use crate::errors::bail; | ||
9 | use crate::{SsrError, SsrPattern, SsrRule}; | ||
10 | use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind, T}; | ||
11 | use rustc_hash::{FxHashMap, FxHashSet}; | ||
12 | use std::str::FromStr; | ||
13 | |||
14 | #[derive(Clone, Debug)] | ||
15 | pub(crate) struct SsrTemplate { | ||
16 | pub(crate) tokens: Vec<PatternElement>, | ||
17 | } | ||
18 | |||
19 | #[derive(Debug)] | ||
20 | pub(crate) struct RawSearchPattern { | ||
21 | tokens: Vec<PatternElement>, | ||
22 | } | ||
23 | |||
24 | // Part of a search or replace pattern. | ||
25 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
26 | pub(crate) enum PatternElement { | ||
27 | Token(Token), | ||
28 | Placeholder(Placeholder), | ||
29 | } | ||
30 | |||
31 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
32 | pub(crate) struct Placeholder { | ||
33 | /// The name of this placeholder. e.g. for "$a", this would be "a" | ||
34 | pub(crate) ident: SmolStr, | ||
35 | /// A unique name used in place of this placeholder when we parse the pattern as Rust code. | ||
36 | stand_in_name: String, | ||
37 | pub(crate) constraints: Vec<Constraint>, | ||
38 | } | ||
39 | |||
40 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
41 | pub(crate) enum Constraint { | ||
42 | Kind(NodeKind), | ||
43 | Not(Box<Constraint>), | ||
44 | } | ||
45 | |||
46 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
47 | pub(crate) enum NodeKind { | ||
48 | Literal, | ||
49 | } | ||
50 | |||
51 | #[derive(Debug, Clone, PartialEq, Eq)] | ||
52 | pub(crate) struct Token { | ||
53 | kind: SyntaxKind, | ||
54 | pub(crate) text: SmolStr, | ||
55 | } | ||
56 | |||
57 | impl FromStr for SsrRule { | ||
58 | type Err = SsrError; | ||
59 | |||
60 | fn from_str(query: &str) -> Result<SsrRule, SsrError> { | ||
61 | let mut it = query.split("==>>"); | ||
62 | let pattern = it.next().expect("at least empty string").trim(); | ||
63 | let template = it | ||
64 | .next() | ||
65 | .ok_or_else(|| SsrError("Cannot find delimiter `==>>`".into()))? | ||
66 | .trim() | ||
67 | .to_string(); | ||
68 | if it.next().is_some() { | ||
69 | return Err(SsrError("More than one delimiter found".into())); | ||
70 | } | ||
71 | let rule = SsrRule { pattern: pattern.parse()?, template: template.parse()? }; | ||
72 | validate_rule(&rule)?; | ||
73 | Ok(rule) | ||
74 | } | ||
75 | } | ||
76 | |||
77 | impl FromStr for RawSearchPattern { | ||
78 | type Err = SsrError; | ||
79 | |||
80 | fn from_str(pattern_str: &str) -> Result<RawSearchPattern, SsrError> { | ||
81 | Ok(RawSearchPattern { tokens: parse_pattern(pattern_str)? }) | ||
82 | } | ||
83 | } | ||
84 | |||
85 | impl RawSearchPattern { | ||
86 | /// Returns this search pattern as Rust source code that we can feed to the Rust parser. | ||
87 | fn as_rust_code(&self) -> String { | ||
88 | let mut res = String::new(); | ||
89 | for t in &self.tokens { | ||
90 | res.push_str(match t { | ||
91 | PatternElement::Token(token) => token.text.as_str(), | ||
92 | PatternElement::Placeholder(placeholder) => placeholder.stand_in_name.as_str(), | ||
93 | }); | ||
94 | } | ||
95 | res | ||
96 | } | ||
97 | |||
98 | fn placeholders_by_stand_in(&self) -> FxHashMap<SmolStr, Placeholder> { | ||
99 | let mut res = FxHashMap::default(); | ||
100 | for t in &self.tokens { | ||
101 | if let PatternElement::Placeholder(placeholder) = t { | ||
102 | res.insert(SmolStr::new(placeholder.stand_in_name.clone()), placeholder.clone()); | ||
103 | } | ||
104 | } | ||
105 | res | ||
106 | } | ||
107 | } | ||
108 | |||
109 | impl FromStr for SsrPattern { | ||
110 | type Err = SsrError; | ||
111 | |||
112 | fn from_str(pattern_str: &str) -> Result<SsrPattern, SsrError> { | ||
113 | let raw: RawSearchPattern = pattern_str.parse()?; | ||
114 | let raw_str = raw.as_rust_code(); | ||
115 | let res = SsrPattern { | ||
116 | expr: ast::Expr::parse(&raw_str).ok().map(|n| n.syntax().clone()), | ||
117 | type_ref: ast::TypeRef::parse(&raw_str).ok().map(|n| n.syntax().clone()), | ||
118 | item: ast::ModuleItem::parse(&raw_str).ok().map(|n| n.syntax().clone()), | ||
119 | path: ast::Path::parse(&raw_str).ok().map(|n| n.syntax().clone()), | ||
120 | pattern: ast::Pat::parse(&raw_str).ok().map(|n| n.syntax().clone()), | ||
121 | placeholders_by_stand_in: raw.placeholders_by_stand_in(), | ||
122 | raw, | ||
123 | }; | ||
124 | if res.expr.is_none() | ||
125 | && res.type_ref.is_none() | ||
126 | && res.item.is_none() | ||
127 | && res.path.is_none() | ||
128 | && res.pattern.is_none() | ||
129 | { | ||
130 | bail!("Pattern is not a valid Rust expression, type, item, path or pattern"); | ||
131 | } | ||
132 | Ok(res) | ||
133 | } | ||
134 | } | ||
135 | |||
136 | impl FromStr for SsrTemplate { | ||
137 | type Err = SsrError; | ||
138 | |||
139 | fn from_str(pattern_str: &str) -> Result<SsrTemplate, SsrError> { | ||
140 | let tokens = parse_pattern(pattern_str)?; | ||
141 | // Validate that the template is a valid fragment of Rust code. We reuse the validation | ||
142 | // logic for search patterns since the only thing that differs is the error message. | ||
143 | if SsrPattern::from_str(pattern_str).is_err() { | ||
144 | bail!("Replacement is not a valid Rust expression, type, item, path or pattern"); | ||
145 | } | ||
146 | // Our actual template needs to preserve whitespace, so we can't reuse `tokens`. | ||
147 | Ok(SsrTemplate { tokens }) | ||
148 | } | ||
149 | } | ||
150 | |||
151 | /// Returns `pattern_str`, parsed as a search or replace pattern. If `remove_whitespace` is true, | ||
152 | /// then any whitespace tokens will be removed, which we do for the search pattern, but not for the | ||
153 | /// replace pattern. | ||
154 | fn parse_pattern(pattern_str: &str) -> Result<Vec<PatternElement>, SsrError> { | ||
155 | let mut res = Vec::new(); | ||
156 | let mut placeholder_names = FxHashSet::default(); | ||
157 | let mut tokens = tokenize(pattern_str)?.into_iter(); | ||
158 | while let Some(token) = tokens.next() { | ||
159 | if token.kind == T![$] { | ||
160 | let placeholder = parse_placeholder(&mut tokens)?; | ||
161 | if !placeholder_names.insert(placeholder.ident.clone()) { | ||
162 | bail!("Name `{}` repeats more than once", placeholder.ident); | ||
163 | } | ||
164 | res.push(PatternElement::Placeholder(placeholder)); | ||
165 | } else { | ||
166 | res.push(PatternElement::Token(token)); | ||
167 | } | ||
168 | } | ||
169 | Ok(res) | ||
170 | } | ||
171 | |||
172 | /// Checks for errors in a rule. e.g. the replace pattern referencing placeholders that the search | ||
173 | /// pattern didn't define. | ||
174 | fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> { | ||
175 | let mut defined_placeholders = FxHashSet::default(); | ||
176 | for p in &rule.pattern.raw.tokens { | ||
177 | if let PatternElement::Placeholder(placeholder) = p { | ||
178 | defined_placeholders.insert(&placeholder.ident); | ||
179 | } | ||
180 | } | ||
181 | let mut undefined = Vec::new(); | ||
182 | for p in &rule.template.tokens { | ||
183 | if let PatternElement::Placeholder(placeholder) = p { | ||
184 | if !defined_placeholders.contains(&placeholder.ident) { | ||
185 | undefined.push(format!("${}", placeholder.ident)); | ||
186 | } | ||
187 | if !placeholder.constraints.is_empty() { | ||
188 | bail!("Replacement placeholders cannot have constraints"); | ||
189 | } | ||
190 | } | ||
191 | } | ||
192 | if !undefined.is_empty() { | ||
193 | bail!("Replacement contains undefined placeholders: {}", undefined.join(", ")); | ||
194 | } | ||
195 | Ok(()) | ||
196 | } | ||
197 | |||
198 | fn tokenize(source: &str) -> Result<Vec<Token>, SsrError> { | ||
199 | let mut start = 0; | ||
200 | let (raw_tokens, errors) = ra_syntax::tokenize(source); | ||
201 | if let Some(first_error) = errors.first() { | ||
202 | bail!("Failed to parse pattern: {}", first_error); | ||
203 | } | ||
204 | let mut tokens: Vec<Token> = Vec::new(); | ||
205 | for raw_token in raw_tokens { | ||
206 | let token_len = usize::from(raw_token.len); | ||
207 | tokens.push(Token { | ||
208 | kind: raw_token.kind, | ||
209 | text: SmolStr::new(&source[start..start + token_len]), | ||
210 | }); | ||
211 | start += token_len; | ||
212 | } | ||
213 | Ok(tokens) | ||
214 | } | ||
215 | |||
216 | fn parse_placeholder(tokens: &mut std::vec::IntoIter<Token>) -> Result<Placeholder, SsrError> { | ||
217 | let mut name = None; | ||
218 | let mut constraints = Vec::new(); | ||
219 | if let Some(token) = tokens.next() { | ||
220 | match token.kind { | ||
221 | SyntaxKind::IDENT => { | ||
222 | name = Some(token.text); | ||
223 | } | ||
224 | T!['{'] => { | ||
225 | let token = | ||
226 | tokens.next().ok_or_else(|| SsrError::new("Unexpected end of placeholder"))?; | ||
227 | if token.kind == SyntaxKind::IDENT { | ||
228 | name = Some(token.text); | ||
229 | } | ||
230 | loop { | ||
231 | let token = tokens | ||
232 | .next() | ||
233 | .ok_or_else(|| SsrError::new("Placeholder is missing closing brace '}'"))?; | ||
234 | match token.kind { | ||
235 | T![:] => { | ||
236 | constraints.push(parse_constraint(tokens)?); | ||
237 | } | ||
238 | T!['}'] => break, | ||
239 | _ => bail!("Unexpected token while parsing placeholder: '{}'", token.text), | ||
240 | } | ||
241 | } | ||
242 | } | ||
243 | _ => { | ||
244 | bail!("Placeholders should either be $name or ${{name:constraints}}"); | ||
245 | } | ||
246 | } | ||
247 | } | ||
248 | let name = name.ok_or_else(|| SsrError::new("Placeholder ($) with no name"))?; | ||
249 | Ok(Placeholder::new(name, constraints)) | ||
250 | } | ||
251 | |||
252 | fn parse_constraint(tokens: &mut std::vec::IntoIter<Token>) -> Result<Constraint, SsrError> { | ||
253 | let constraint_type = tokens | ||
254 | .next() | ||
255 | .ok_or_else(|| SsrError::new("Found end of placeholder while looking for a constraint"))? | ||
256 | .text | ||
257 | .to_string(); | ||
258 | match constraint_type.as_str() { | ||
259 | "kind" => { | ||
260 | expect_token(tokens, "(")?; | ||
261 | let t = tokens.next().ok_or_else(|| { | ||
262 | SsrError::new("Unexpected end of constraint while looking for kind") | ||
263 | })?; | ||
264 | if t.kind != SyntaxKind::IDENT { | ||
265 | bail!("Expected ident, found {:?} while parsing kind constraint", t.kind); | ||
266 | } | ||
267 | expect_token(tokens, ")")?; | ||
268 | Ok(Constraint::Kind(NodeKind::from(&t.text)?)) | ||
269 | } | ||
270 | "not" => { | ||
271 | expect_token(tokens, "(")?; | ||
272 | let sub = parse_constraint(tokens)?; | ||
273 | expect_token(tokens, ")")?; | ||
274 | Ok(Constraint::Not(Box::new(sub))) | ||
275 | } | ||
276 | x => bail!("Unsupported constraint type '{}'", x), | ||
277 | } | ||
278 | } | ||
279 | |||
280 | fn expect_token(tokens: &mut std::vec::IntoIter<Token>, expected: &str) -> Result<(), SsrError> { | ||
281 | if let Some(t) = tokens.next() { | ||
282 | if t.text == expected { | ||
283 | return Ok(()); | ||
284 | } | ||
285 | bail!("Expected {} found {}", expected, t.text); | ||
286 | } | ||
287 | bail!("Expected {} found end of stream", expected); | ||
288 | } | ||
289 | |||
290 | impl NodeKind { | ||
291 | fn from(name: &SmolStr) -> Result<NodeKind, SsrError> { | ||
292 | Ok(match name.as_str() { | ||
293 | "literal" => NodeKind::Literal, | ||
294 | _ => bail!("Unknown node kind '{}'", name), | ||
295 | }) | ||
296 | } | ||
297 | } | ||
298 | |||
299 | impl Placeholder { | ||
300 | fn new(name: SmolStr, constraints: Vec<Constraint>) -> Self { | ||
301 | Self { stand_in_name: format!("__placeholder_{}", name), constraints, ident: name } | ||
302 | } | ||
303 | } | ||
304 | |||
305 | #[cfg(test)] | ||
306 | mod tests { | ||
307 | use super::*; | ||
308 | |||
309 | #[test] | ||
310 | fn parser_happy_case() { | ||
311 | fn token(kind: SyntaxKind, text: &str) -> PatternElement { | ||
312 | PatternElement::Token(Token { kind, text: SmolStr::new(text) }) | ||
313 | } | ||
314 | fn placeholder(name: &str) -> PatternElement { | ||
315 | PatternElement::Placeholder(Placeholder::new(SmolStr::new(name), Vec::new())) | ||
316 | } | ||
317 | let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap(); | ||
318 | assert_eq!( | ||
319 | result.pattern.raw.tokens, | ||
320 | vec![ | ||
321 | token(SyntaxKind::IDENT, "foo"), | ||
322 | token(T!['('], "("), | ||
323 | placeholder("a"), | ||
324 | token(T![,], ","), | ||
325 | token(SyntaxKind::WHITESPACE, " "), | ||
326 | placeholder("b"), | ||
327 | token(T![')'], ")"), | ||
328 | ] | ||
329 | ); | ||
330 | assert_eq!( | ||
331 | result.template.tokens, | ||
332 | vec![ | ||
333 | token(SyntaxKind::IDENT, "bar"), | ||
334 | token(T!['('], "("), | ||
335 | placeholder("b"), | ||
336 | token(T![,], ","), | ||
337 | token(SyntaxKind::WHITESPACE, " "), | ||
338 | placeholder("a"), | ||
339 | token(T![')'], ")"), | ||
340 | ] | ||
341 | ); | ||
342 | } | ||
343 | } | ||
diff --git a/crates/ra_ssr/src/replacing.rs b/crates/ra_ssr/src/replacing.rs new file mode 100644 index 000000000..e43cc5167 --- /dev/null +++ b/crates/ra_ssr/src/replacing.rs | |||
@@ -0,0 +1,66 @@ | |||
1 | //! Code for applying replacement templates for matches that have previously been found. | ||
2 | |||
3 | use crate::matching::Var; | ||
4 | use crate::parsing::PatternElement; | ||
5 | use crate::{Match, SsrMatches}; | ||
6 | use ra_syntax::ast::AstToken; | ||
7 | use ra_syntax::TextSize; | ||
8 | use ra_text_edit::TextEdit; | ||
9 | |||
10 | /// Returns a text edit that will replace each match in `matches` with its corresponding replacement | ||
11 | /// template. Placeholders in the template will have been substituted with whatever they matched to | ||
12 | /// in the original code. | ||
13 | pub(crate) fn matches_to_edit(matches: &SsrMatches, file_src: &str) -> TextEdit { | ||
14 | matches_to_edit_at_offset(matches, file_src, 0.into()) | ||
15 | } | ||
16 | |||
17 | fn matches_to_edit_at_offset( | ||
18 | matches: &SsrMatches, | ||
19 | file_src: &str, | ||
20 | relative_start: TextSize, | ||
21 | ) -> TextEdit { | ||
22 | let mut edit_builder = ra_text_edit::TextEditBuilder::default(); | ||
23 | for m in &matches.matches { | ||
24 | edit_builder.replace( | ||
25 | m.range.range.checked_sub(relative_start).unwrap(), | ||
26 | render_replace(m, file_src), | ||
27 | ); | ||
28 | } | ||
29 | edit_builder.finish() | ||
30 | } | ||
31 | |||
32 | fn render_replace(match_info: &Match, file_src: &str) -> String { | ||
33 | let mut out = String::new(); | ||
34 | for r in &match_info.template.tokens { | ||
35 | match r { | ||
36 | PatternElement::Token(t) => out.push_str(t.text.as_str()), | ||
37 | PatternElement::Placeholder(p) => { | ||
38 | if let Some(placeholder_value) = | ||
39 | match_info.placeholder_values.get(&Var(p.ident.to_string())) | ||
40 | { | ||
41 | let range = &placeholder_value.range.range; | ||
42 | let mut matched_text = | ||
43 | file_src[usize::from(range.start())..usize::from(range.end())].to_owned(); | ||
44 | let edit = matches_to_edit_at_offset( | ||
45 | &placeholder_value.inner_matches, | ||
46 | file_src, | ||
47 | range.start(), | ||
48 | ); | ||
49 | edit.apply(&mut matched_text); | ||
50 | out.push_str(&matched_text); | ||
51 | } else { | ||
52 | // We validated that all placeholder references were valid before we | ||
53 | // started, so this shouldn't happen. | ||
54 | panic!( | ||
55 | "Internal error: replacement referenced unknown placeholder {}", | ||
56 | p.ident | ||
57 | ); | ||
58 | } | ||
59 | } | ||
60 | } | ||
61 | } | ||
62 | for comment in &match_info.ignored_comments { | ||
63 | out.push_str(&comment.syntax().to_string()); | ||
64 | } | ||
65 | out | ||
66 | } | ||
diff --git a/crates/ra_ssr/src/tests.rs b/crates/ra_ssr/src/tests.rs new file mode 100644 index 000000000..f20ae2cdf --- /dev/null +++ b/crates/ra_ssr/src/tests.rs | |||
@@ -0,0 +1,582 @@ | |||
1 | use crate::{MatchFinder, SsrRule}; | ||
2 | use ra_db::{FileId, SourceDatabaseExt}; | ||
3 | use test_utils::mark; | ||
4 | |||
5 | fn parse_error_text(query: &str) -> String { | ||
6 | format!("{}", query.parse::<SsrRule>().unwrap_err()) | ||
7 | } | ||
8 | |||
9 | #[test] | ||
10 | fn parser_empty_query() { | ||
11 | assert_eq!(parse_error_text(""), "Parse error: Cannot find delimiter `==>>`"); | ||
12 | } | ||
13 | |||
14 | #[test] | ||
15 | fn parser_no_delimiter() { | ||
16 | assert_eq!(parse_error_text("foo()"), "Parse error: Cannot find delimiter `==>>`"); | ||
17 | } | ||
18 | |||
19 | #[test] | ||
20 | fn parser_two_delimiters() { | ||
21 | assert_eq!( | ||
22 | parse_error_text("foo() ==>> a ==>> b "), | ||
23 | "Parse error: More than one delimiter found" | ||
24 | ); | ||
25 | } | ||
26 | |||
27 | #[test] | ||
28 | fn parser_repeated_name() { | ||
29 | assert_eq!( | ||
30 | parse_error_text("foo($a, $a) ==>>"), | ||
31 | "Parse error: Name `a` repeats more than once" | ||
32 | ); | ||
33 | } | ||
34 | |||
35 | #[test] | ||
36 | fn parser_invalid_pattern() { | ||
37 | assert_eq!( | ||
38 | parse_error_text(" ==>> ()"), | ||
39 | "Parse error: Pattern is not a valid Rust expression, type, item, path or pattern" | ||
40 | ); | ||
41 | } | ||
42 | |||
43 | #[test] | ||
44 | fn parser_invalid_template() { | ||
45 | assert_eq!( | ||
46 | parse_error_text("() ==>> )"), | ||
47 | "Parse error: Replacement is not a valid Rust expression, type, item, path or pattern" | ||
48 | ); | ||
49 | } | ||
50 | |||
51 | #[test] | ||
52 | fn parser_undefined_placeholder_in_replacement() { | ||
53 | assert_eq!( | ||
54 | parse_error_text("42 ==>> $a"), | ||
55 | "Parse error: Replacement contains undefined placeholders: $a" | ||
56 | ); | ||
57 | } | ||
58 | |||
59 | fn single_file(code: &str) -> (ra_ide_db::RootDatabase, FileId) { | ||
60 | use ra_db::fixture::WithFixture; | ||
61 | ra_ide_db::RootDatabase::with_single_file(code) | ||
62 | } | ||
63 | |||
64 | fn assert_ssr_transform(rule: &str, input: &str, result: &str) { | ||
65 | assert_ssr_transforms(&[rule], input, result); | ||
66 | } | ||
67 | |||
68 | fn normalize_code(code: &str) -> String { | ||
69 | let (db, file_id) = single_file(code); | ||
70 | db.file_text(file_id).to_string() | ||
71 | } | ||
72 | |||
73 | fn assert_ssr_transforms(rules: &[&str], input: &str, result: &str) { | ||
74 | let (db, file_id) = single_file(input); | ||
75 | let mut match_finder = MatchFinder::new(&db); | ||
76 | for rule in rules { | ||
77 | let rule: SsrRule = rule.parse().unwrap(); | ||
78 | match_finder.add_rule(rule); | ||
79 | } | ||
80 | if let Some(edits) = match_finder.edits_for_file(file_id) { | ||
81 | // Note, db.file_text is not necessarily the same as `input`, since fixture parsing alters | ||
82 | // stuff. | ||
83 | let mut after = db.file_text(file_id).to_string(); | ||
84 | edits.apply(&mut after); | ||
85 | // Likewise, we need to make sure that whatever transformations fixture parsing applies, | ||
86 | // also get applied to our expected result. | ||
87 | let result = normalize_code(result); | ||
88 | assert_eq!(after, result); | ||
89 | } else { | ||
90 | panic!("No edits were made"); | ||
91 | } | ||
92 | } | ||
93 | |||
94 | fn print_match_debug_info(match_finder: &MatchFinder, file_id: FileId, snippet: &str) { | ||
95 | let debug_info = match_finder.debug_where_text_equal(file_id, snippet); | ||
96 | println!( | ||
97 | "Match debug info: {} nodes had text exactly equal to '{}'", | ||
98 | debug_info.len(), | ||
99 | snippet | ||
100 | ); | ||
101 | for (index, d) in debug_info.iter().enumerate() { | ||
102 | println!("Node #{}\n{:#?}\n", index, d); | ||
103 | } | ||
104 | } | ||
105 | |||
106 | fn assert_matches(pattern: &str, code: &str, expected: &[&str]) { | ||
107 | let (db, file_id) = single_file(code); | ||
108 | let mut match_finder = MatchFinder::new(&db); | ||
109 | match_finder.add_search_pattern(pattern.parse().unwrap()); | ||
110 | let matched_strings: Vec<String> = match_finder | ||
111 | .find_matches_in_file(file_id) | ||
112 | .flattened() | ||
113 | .matches | ||
114 | .iter() | ||
115 | .map(|m| m.matched_text()) | ||
116 | .collect(); | ||
117 | if matched_strings != expected && !expected.is_empty() { | ||
118 | print_match_debug_info(&match_finder, file_id, &expected[0]); | ||
119 | } | ||
120 | assert_eq!(matched_strings, expected); | ||
121 | } | ||
122 | |||
123 | fn assert_no_match(pattern: &str, code: &str) { | ||
124 | let (db, file_id) = single_file(code); | ||
125 | let mut match_finder = MatchFinder::new(&db); | ||
126 | match_finder.add_search_pattern(pattern.parse().unwrap()); | ||
127 | let matches = match_finder.find_matches_in_file(file_id).flattened().matches; | ||
128 | if !matches.is_empty() { | ||
129 | print_match_debug_info(&match_finder, file_id, &matches[0].matched_text()); | ||
130 | panic!("Got {} matches when we expected none: {:#?}", matches.len(), matches); | ||
131 | } | ||
132 | } | ||
133 | |||
134 | fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) { | ||
135 | let (db, file_id) = single_file(code); | ||
136 | let mut match_finder = MatchFinder::new(&db); | ||
137 | match_finder.add_search_pattern(pattern.parse().unwrap()); | ||
138 | let mut reasons = Vec::new(); | ||
139 | for d in match_finder.debug_where_text_equal(file_id, snippet) { | ||
140 | if let Some(reason) = d.match_failure_reason() { | ||
141 | reasons.push(reason.to_owned()); | ||
142 | } | ||
143 | } | ||
144 | assert_eq!(reasons, vec![expected_reason]); | ||
145 | } | ||
146 | |||
147 | #[test] | ||
148 | fn ssr_function_to_method() { | ||
149 | assert_ssr_transform( | ||
150 | "my_function($a, $b) ==>> ($a).my_method($b)", | ||
151 | "fn my_function() {} fn main() { loop { my_function( other_func(x, y), z + w) } }", | ||
152 | "fn my_function() {} fn main() { loop { (other_func(x, y)).my_method(z + w) } }", | ||
153 | ) | ||
154 | } | ||
155 | |||
156 | #[test] | ||
157 | fn ssr_nested_function() { | ||
158 | assert_ssr_transform( | ||
159 | "foo($a, $b, $c) ==>> bar($c, baz($a, $b))", | ||
160 | "fn foo() {} fn main { foo (x + value.method(b), x+y-z, true && false) }", | ||
161 | "fn foo() {} fn main { bar(true && false, baz(x + value.method(b), x+y-z)) }", | ||
162 | ) | ||
163 | } | ||
164 | |||
165 | #[test] | ||
166 | fn ssr_expected_spacing() { | ||
167 | assert_ssr_transform( | ||
168 | "foo($x) + bar() ==>> bar($x)", | ||
169 | "fn foo() {} fn bar() {} fn main() { foo(5) + bar() }", | ||
170 | "fn foo() {} fn bar() {} fn main() { bar(5) }", | ||
171 | ); | ||
172 | } | ||
173 | |||
174 | #[test] | ||
175 | fn ssr_with_extra_space() { | ||
176 | assert_ssr_transform( | ||
177 | "foo($x ) + bar() ==>> bar($x)", | ||
178 | "fn foo() {} fn bar() {} fn main() { foo( 5 ) +bar( ) }", | ||
179 | "fn foo() {} fn bar() {} fn main() { bar(5) }", | ||
180 | ); | ||
181 | } | ||
182 | |||
183 | #[test] | ||
184 | fn ssr_keeps_nested_comment() { | ||
185 | assert_ssr_transform( | ||
186 | "foo($x) ==>> bar($x)", | ||
187 | "fn foo() {} fn main() { foo(other(5 /* using 5 */)) }", | ||
188 | "fn foo() {} fn main() { bar(other(5 /* using 5 */)) }", | ||
189 | ) | ||
190 | } | ||
191 | |||
192 | #[test] | ||
193 | fn ssr_keeps_comment() { | ||
194 | assert_ssr_transform( | ||
195 | "foo($x) ==>> bar($x)", | ||
196 | "fn foo() {} fn main() { foo(5 /* using 5 */) }", | ||
197 | "fn foo() {} fn main() { bar(5)/* using 5 */ }", | ||
198 | ) | ||
199 | } | ||
200 | |||
201 | #[test] | ||
202 | fn ssr_struct_lit() { | ||
203 | assert_ssr_transform( | ||
204 | "foo{a: $a, b: $b} ==>> foo::new($a, $b)", | ||
205 | "fn foo() {} fn main() { foo{b:2, a:1} }", | ||
206 | "fn foo() {} fn main() { foo::new(1, 2) }", | ||
207 | ) | ||
208 | } | ||
209 | |||
210 | #[test] | ||
211 | fn ignores_whitespace() { | ||
212 | assert_matches("1+2", "fn f() -> i32 {1 + 2}", &["1 + 2"]); | ||
213 | assert_matches("1 + 2", "fn f() -> i32 {1+2}", &["1+2"]); | ||
214 | } | ||
215 | |||
216 | #[test] | ||
217 | fn no_match() { | ||
218 | assert_no_match("1 + 3", "fn f() -> i32 {1 + 2}"); | ||
219 | } | ||
220 | |||
221 | #[test] | ||
222 | fn match_fn_definition() { | ||
223 | assert_matches("fn $a($b: $t) {$c}", "fn f(a: i32) {bar()}", &["fn f(a: i32) {bar()}"]); | ||
224 | } | ||
225 | |||
226 | #[test] | ||
227 | fn match_struct_definition() { | ||
228 | let code = r#" | ||
229 | struct Option<T> {} | ||
230 | struct Bar {} | ||
231 | struct Foo {name: Option<String>}"#; | ||
232 | assert_matches("struct $n {$f: Option<String>}", code, &["struct Foo {name: Option<String>}"]); | ||
233 | } | ||
234 | |||
235 | #[test] | ||
236 | fn match_expr() { | ||
237 | let code = r#" | ||
238 | fn foo() {} | ||
239 | fn f() -> i32 {foo(40 + 2, 42)}"#; | ||
240 | assert_matches("foo($a, $b)", code, &["foo(40 + 2, 42)"]); | ||
241 | assert_no_match("foo($a, $b, $c)", code); | ||
242 | assert_no_match("foo($a)", code); | ||
243 | } | ||
244 | |||
245 | #[test] | ||
246 | fn match_nested_method_calls() { | ||
247 | assert_matches( | ||
248 | "$a.z().z().z()", | ||
249 | "fn f() {h().i().j().z().z().z().d().e()}", | ||
250 | &["h().i().j().z().z().z()"], | ||
251 | ); | ||
252 | } | ||
253 | |||
254 | // Make sure that our node matching semantics don't differ within macro calls. | ||
255 | #[test] | ||
256 | fn match_nested_method_calls_with_macro_call() { | ||
257 | assert_matches( | ||
258 | "$a.z().z().z()", | ||
259 | r#" | ||
260 | macro_rules! m1 { ($a:expr) => {$a}; } | ||
261 | fn f() {m1!(h().i().j().z().z().z().d().e())}"#, | ||
262 | &["h().i().j().z().z().z()"], | ||
263 | ); | ||
264 | } | ||
265 | |||
266 | #[test] | ||
267 | fn match_complex_expr() { | ||
268 | let code = r#" | ||
269 | fn foo() {} fn bar() {} | ||
270 | fn f() -> i32 {foo(bar(40, 2), 42)}"#; | ||
271 | assert_matches("foo($a, $b)", code, &["foo(bar(40, 2), 42)"]); | ||
272 | assert_no_match("foo($a, $b, $c)", code); | ||
273 | assert_no_match("foo($a)", code); | ||
274 | assert_matches("bar($a, $b)", code, &["bar(40, 2)"]); | ||
275 | } | ||
276 | |||
277 | // Trailing commas in the code should be ignored. | ||
278 | #[test] | ||
279 | fn match_with_trailing_commas() { | ||
280 | // Code has comma, pattern doesn't. | ||
281 | assert_matches("foo($a, $b)", "fn foo() {} fn f() {foo(1, 2,);}", &["foo(1, 2,)"]); | ||
282 | assert_matches("Foo{$a, $b}", "struct Foo {} fn f() {Foo{1, 2,};}", &["Foo{1, 2,}"]); | ||
283 | |||
284 | // Pattern has comma, code doesn't. | ||
285 | assert_matches("foo($a, $b,)", "fn foo() {} fn f() {foo(1, 2);}", &["foo(1, 2)"]); | ||
286 | assert_matches("Foo{$a, $b,}", "struct Foo {} fn f() {Foo{1, 2};}", &["Foo{1, 2}"]); | ||
287 | } | ||
288 | |||
289 | #[test] | ||
290 | fn match_type() { | ||
291 | assert_matches("i32", "fn f() -> i32 {1 + 2}", &["i32"]); | ||
292 | assert_matches( | ||
293 | "Option<$a>", | ||
294 | "struct Option<T> {} fn f() -> Option<i32> {42}", | ||
295 | &["Option<i32>"], | ||
296 | ); | ||
297 | assert_no_match( | ||
298 | "Option<$a>", | ||
299 | "struct Option<T> {} struct Result<T, E> {} fn f() -> Result<i32, ()> {42}", | ||
300 | ); | ||
301 | } | ||
302 | |||
303 | #[test] | ||
304 | fn match_struct_instantiation() { | ||
305 | let code = r#" | ||
306 | struct Foo {bar: i32, baz: i32} | ||
307 | fn f() {Foo {bar: 1, baz: 2}}"#; | ||
308 | assert_matches("Foo {bar: 1, baz: 2}", code, &["Foo {bar: 1, baz: 2}"]); | ||
309 | // Now with placeholders for all parts of the struct. | ||
310 | assert_matches("Foo {$a: $b, $c: $d}", code, &["Foo {bar: 1, baz: 2}"]); | ||
311 | assert_matches("Foo {}", "struct Foo {} fn f() {Foo {}}", &["Foo {}"]); | ||
312 | } | ||
313 | |||
314 | #[test] | ||
315 | fn match_path() { | ||
316 | let code = r#" | ||
317 | mod foo { | ||
318 | fn bar() {} | ||
319 | } | ||
320 | fn f() {foo::bar(42)}"#; | ||
321 | assert_matches("foo::bar", code, &["foo::bar"]); | ||
322 | assert_matches("$a::bar", code, &["foo::bar"]); | ||
323 | assert_matches("foo::$b", code, &["foo::bar"]); | ||
324 | } | ||
325 | |||
326 | #[test] | ||
327 | fn match_pattern() { | ||
328 | assert_matches("Some($a)", "struct Some(); fn f() {if let Some(x) = foo() {}}", &["Some(x)"]); | ||
329 | } | ||
330 | |||
331 | #[test] | ||
332 | fn literal_constraint() { | ||
333 | mark::check!(literal_constraint); | ||
334 | let code = r#" | ||
335 | enum Option<T> { Some(T), None } | ||
336 | use Option::Some; | ||
337 | fn f1() { | ||
338 | let x1 = Some(42); | ||
339 | let x2 = Some("foo"); | ||
340 | let x3 = Some(x1); | ||
341 | let x4 = Some(40 + 2); | ||
342 | let x5 = Some(true); | ||
343 | } | ||
344 | "#; | ||
345 | assert_matches("Some(${a:kind(literal)})", code, &["Some(42)", "Some(\"foo\")", "Some(true)"]); | ||
346 | assert_matches("Some(${a:not(kind(literal))})", code, &["Some(x1)", "Some(40 + 2)"]); | ||
347 | } | ||
348 | |||
349 | #[test] | ||
350 | fn match_reordered_struct_instantiation() { | ||
351 | assert_matches( | ||
352 | "Foo {aa: 1, b: 2, ccc: 3}", | ||
353 | "struct Foo {} fn f() {Foo {b: 2, ccc: 3, aa: 1}}", | ||
354 | &["Foo {b: 2, ccc: 3, aa: 1}"], | ||
355 | ); | ||
356 | assert_no_match("Foo {a: 1}", "struct Foo {} fn f() {Foo {b: 1}}"); | ||
357 | assert_no_match("Foo {a: 1}", "struct Foo {} fn f() {Foo {a: 2}}"); | ||
358 | assert_no_match("Foo {a: 1, b: 2}", "struct Foo {} fn f() {Foo {a: 1}}"); | ||
359 | assert_no_match("Foo {a: 1, b: 2}", "struct Foo {} fn f() {Foo {b: 2}}"); | ||
360 | assert_no_match("Foo {a: 1, }", "struct Foo {} fn f() {Foo {a: 1, b: 2}}"); | ||
361 | assert_no_match("Foo {a: 1, z: 9}", "struct Foo {} fn f() {Foo {a: 1}}"); | ||
362 | } | ||
363 | |||
364 | #[test] | ||
365 | fn match_macro_invocation() { | ||
366 | assert_matches( | ||
367 | "foo!($a)", | ||
368 | "macro_rules! foo {() => {}} fn() {foo(foo!(foo()))}", | ||
369 | &["foo!(foo())"], | ||
370 | ); | ||
371 | assert_matches( | ||
372 | "foo!(41, $a, 43)", | ||
373 | "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43)}", | ||
374 | &["foo!(41, 42, 43)"], | ||
375 | ); | ||
376 | assert_no_match("foo!(50, $a, 43)", "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43}"); | ||
377 | assert_no_match("foo!(41, $a, 50)", "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43}"); | ||
378 | assert_matches( | ||
379 | "foo!($a())", | ||
380 | "macro_rules! foo {() => {}} fn() {foo!(bar())}", | ||
381 | &["foo!(bar())"], | ||
382 | ); | ||
383 | } | ||
384 | |||
385 | // When matching within a macro expansion, we only allow matches of nodes that originated from | ||
386 | // the macro call, not from the macro definition. | ||
387 | #[test] | ||
388 | fn no_match_expression_from_macro() { | ||
389 | assert_no_match( | ||
390 | "$a.clone()", | ||
391 | r#" | ||
392 | macro_rules! m1 { | ||
393 | () => {42.clone()} | ||
394 | } | ||
395 | fn f1() {m1!()} | ||
396 | "#, | ||
397 | ); | ||
398 | } | ||
399 | |||
400 | // We definitely don't want to allow matching of an expression that part originates from the | ||
401 | // macro call `42` and part from the macro definition `.clone()`. | ||
402 | #[test] | ||
403 | fn no_match_split_expression() { | ||
404 | assert_no_match( | ||
405 | "$a.clone()", | ||
406 | r#" | ||
407 | macro_rules! m1 { | ||
408 | ($x:expr) => {$x.clone()} | ||
409 | } | ||
410 | fn f1() {m1!(42)} | ||
411 | "#, | ||
412 | ); | ||
413 | } | ||
414 | |||
415 | #[test] | ||
416 | fn replace_function_call() { | ||
417 | assert_ssr_transform( | ||
418 | "foo() ==>> bar()", | ||
419 | "fn foo() {} fn f1() {foo(); foo();}", | ||
420 | "fn foo() {} fn f1() {bar(); bar();}", | ||
421 | ); | ||
422 | } | ||
423 | |||
424 | #[test] | ||
425 | fn replace_function_call_with_placeholders() { | ||
426 | assert_ssr_transform( | ||
427 | "foo($a, $b) ==>> bar($b, $a)", | ||
428 | "fn foo() {} fn f1() {foo(5, 42)}", | ||
429 | "fn foo() {} fn f1() {bar(42, 5)}", | ||
430 | ); | ||
431 | } | ||
432 | |||
433 | #[test] | ||
434 | fn replace_nested_function_calls() { | ||
435 | assert_ssr_transform( | ||
436 | "foo($a) ==>> bar($a)", | ||
437 | "fn foo() {} fn f1() {foo(foo(42))}", | ||
438 | "fn foo() {} fn f1() {bar(bar(42))}", | ||
439 | ); | ||
440 | } | ||
441 | |||
442 | #[test] | ||
443 | fn replace_type() { | ||
444 | assert_ssr_transform( | ||
445 | "Result<(), $a> ==>> Option<$a>", | ||
446 | "struct Result<T, E> {} fn f1() -> Result<(), Vec<Error>> {foo()}", | ||
447 | "struct Result<T, E> {} fn f1() -> Option<Vec<Error>> {foo()}", | ||
448 | ); | ||
449 | } | ||
450 | |||
451 | #[test] | ||
452 | fn replace_struct_init() { | ||
453 | assert_ssr_transform( | ||
454 | "Foo {a: $a, b: $b} ==>> Foo::new($a, $b)", | ||
455 | "struct Foo {} fn f1() {Foo{b: 1, a: 2}}", | ||
456 | "struct Foo {} fn f1() {Foo::new(2, 1)}", | ||
457 | ); | ||
458 | } | ||
459 | |||
460 | #[test] | ||
461 | fn replace_macro_invocations() { | ||
462 | assert_ssr_transform( | ||
463 | "try!($a) ==>> $a?", | ||
464 | "macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(try!(foo()));}", | ||
465 | "macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(foo()?);}", | ||
466 | ); | ||
467 | assert_ssr_transform( | ||
468 | "foo!($a($b)) ==>> foo($b, $a)", | ||
469 | "macro_rules! foo {() => {}} fn f1() {foo!(abc(def() + 2));}", | ||
470 | "macro_rules! foo {() => {}} fn f1() {foo(def() + 2, abc);}", | ||
471 | ); | ||
472 | } | ||
473 | |||
474 | #[test] | ||
475 | fn replace_binary_op() { | ||
476 | assert_ssr_transform( | ||
477 | "$a + $b ==>> $b + $a", | ||
478 | "fn f() {2 * 3 + 4 * 5}", | ||
479 | "fn f() {4 * 5 + 2 * 3}", | ||
480 | ); | ||
481 | assert_ssr_transform( | ||
482 | "$a + $b ==>> $b + $a", | ||
483 | "fn f() {1 + 2 + 3 + 4}", | ||
484 | "fn f() {4 + 3 + 2 + 1}", | ||
485 | ); | ||
486 | } | ||
487 | |||
488 | #[test] | ||
489 | fn match_binary_op() { | ||
490 | assert_matches("$a + $b", "fn f() {1 + 2 + 3 + 4}", &["1 + 2", "1 + 2 + 3", "1 + 2 + 3 + 4"]); | ||
491 | } | ||
492 | |||
493 | #[test] | ||
494 | fn multiple_rules() { | ||
495 | assert_ssr_transforms( | ||
496 | &["$a + 1 ==>> add_one($a)", "$a + $b ==>> add($a, $b)"], | ||
497 | "fn f() -> i32 {3 + 2 + 1}", | ||
498 | "fn f() -> i32 {add_one(add(3, 2))}", | ||
499 | ) | ||
500 | } | ||
501 | |||
502 | #[test] | ||
503 | fn match_within_macro_invocation() { | ||
504 | let code = r#" | ||
505 | macro_rules! foo { | ||
506 | ($a:stmt; $b:expr) => { | ||
507 | $b | ||
508 | }; | ||
509 | } | ||
510 | struct A {} | ||
511 | impl A { | ||
512 | fn bar() {} | ||
513 | } | ||
514 | fn f1() { | ||
515 | let aaa = A {}; | ||
516 | foo!(macro_ignores_this(); aaa.bar()); | ||
517 | } | ||
518 | "#; | ||
519 | assert_matches("$a.bar()", code, &["aaa.bar()"]); | ||
520 | } | ||
521 | |||
522 | #[test] | ||
523 | fn replace_within_macro_expansion() { | ||
524 | assert_ssr_transform( | ||
525 | "$a.foo() ==>> bar($a)", | ||
526 | r#" | ||
527 | macro_rules! macro1 { | ||
528 | ($a:expr) => {$a} | ||
529 | } | ||
530 | fn f() {macro1!(5.x().foo().o2())}"#, | ||
531 | r#" | ||
532 | macro_rules! macro1 { | ||
533 | ($a:expr) => {$a} | ||
534 | } | ||
535 | fn f() {macro1!(bar(5.x()).o2())}"#, | ||
536 | ) | ||
537 | } | ||
538 | |||
539 | #[test] | ||
540 | fn preserves_whitespace_within_macro_expansion() { | ||
541 | assert_ssr_transform( | ||
542 | "$a + $b ==>> $b - $a", | ||
543 | r#" | ||
544 | macro_rules! macro1 { | ||
545 | ($a:expr) => {$a} | ||
546 | } | ||
547 | fn f() {macro1!(1 * 2 + 3 + 4}"#, | ||
548 | r#" | ||
549 | macro_rules! macro1 { | ||
550 | ($a:expr) => {$a} | ||
551 | } | ||
552 | fn f() {macro1!(4 - 3 - 1 * 2}"#, | ||
553 | ) | ||
554 | } | ||
555 | |||
556 | #[test] | ||
557 | fn match_failure_reasons() { | ||
558 | let code = r#" | ||
559 | fn bar() {} | ||
560 | macro_rules! foo { | ||
561 | ($a:expr) => { | ||
562 | 1 + $a + 2 | ||
563 | }; | ||
564 | } | ||
565 | fn f1() { | ||
566 | bar(1, 2); | ||
567 | foo!(5 + 43.to_string() + 5); | ||
568 | } | ||
569 | "#; | ||
570 | assert_match_failure_reason( | ||
571 | "bar($a, 3)", | ||
572 | code, | ||
573 | "bar(1, 2)", | ||
574 | r#"Pattern wanted token '3' (INT_NUMBER), but code had token '2' (INT_NUMBER)"#, | ||
575 | ); | ||
576 | assert_match_failure_reason( | ||
577 | "42.to_string()", | ||
578 | code, | ||
579 | "43.to_string()", | ||
580 | r#"Pattern wanted token '42' (INT_NUMBER), but code had token '43' (INT_NUMBER)"#, | ||
581 | ); | ||
582 | } | ||