diff options
Diffstat (limited to 'crates/ra_ssr/src')
-rw-r--r-- | crates/ra_ssr/src/lib.rs | 162 | ||||
-rw-r--r-- | crates/ra_ssr/src/matching.rs | 51 | ||||
-rw-r--r-- | crates/ra_ssr/src/parsing.rs | 110 | ||||
-rw-r--r-- | crates/ra_ssr/src/replacing.rs | 6 | ||||
-rw-r--r-- | crates/ra_ssr/src/tests.rs | 212 |
5 files changed, 362 insertions, 179 deletions
diff --git a/crates/ra_ssr/src/lib.rs b/crates/ra_ssr/src/lib.rs index e148f4564..422e15ee6 100644 --- a/crates/ra_ssr/src/lib.rs +++ b/crates/ra_ssr/src/lib.rs | |||
@@ -9,10 +9,11 @@ mod replacing; | |||
9 | #[cfg(test)] | 9 | #[cfg(test)] |
10 | mod tests; | 10 | mod tests; |
11 | 11 | ||
12 | use crate::matching::Match; | 12 | pub use crate::matching::Match; |
13 | use crate::matching::{record_match_fails_reasons_scope, MatchFailureReason}; | ||
13 | use hir::Semantics; | 14 | use hir::Semantics; |
14 | use ra_db::{FileId, FileRange}; | 15 | use ra_db::{FileId, FileRange}; |
15 | use ra_syntax::{ast, AstNode, SmolStr, SyntaxNode}; | 16 | use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind, SyntaxNode, TextRange}; |
16 | use ra_text_edit::TextEdit; | 17 | use ra_text_edit::TextEdit; |
17 | use rustc_hash::FxHashMap; | 18 | use rustc_hash::FxHashMap; |
18 | 19 | ||
@@ -26,7 +27,7 @@ pub struct SsrRule { | |||
26 | } | 27 | } |
27 | 28 | ||
28 | #[derive(Debug)] | 29 | #[derive(Debug)] |
29 | struct SsrPattern { | 30 | pub struct SsrPattern { |
30 | raw: parsing::RawSearchPattern, | 31 | raw: parsing::RawSearchPattern, |
31 | /// Placeholders keyed by the stand-in ident that we use in Rust source code. | 32 | /// Placeholders keyed by the stand-in ident that we use in Rust source code. |
32 | placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>, | 33 | placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>, |
@@ -45,7 +46,7 @@ pub struct SsrError(String); | |||
45 | 46 | ||
46 | #[derive(Debug, Default)] | 47 | #[derive(Debug, Default)] |
47 | pub struct SsrMatches { | 48 | pub struct SsrMatches { |
48 | matches: Vec<Match>, | 49 | pub matches: Vec<Match>, |
49 | } | 50 | } |
50 | 51 | ||
51 | /// Searches a crate for pattern matches and possibly replaces them with something else. | 52 | /// Searches a crate for pattern matches and possibly replaces them with something else. |
@@ -64,6 +65,12 @@ impl<'db> MatchFinder<'db> { | |||
64 | self.rules.push(rule); | 65 | self.rules.push(rule); |
65 | } | 66 | } |
66 | 67 | ||
68 | /// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you | ||
69 | /// intend to do replacement, use `add_rule` instead. | ||
70 | pub fn add_search_pattern(&mut self, pattern: SsrPattern) { | ||
71 | self.add_rule(SsrRule { pattern, template: "()".parse().unwrap() }) | ||
72 | } | ||
73 | |||
67 | pub fn edits_for_file(&self, file_id: FileId) -> Option<TextEdit> { | 74 | pub fn edits_for_file(&self, file_id: FileId) -> Option<TextEdit> { |
68 | let matches = self.find_matches_in_file(file_id); | 75 | let matches = self.find_matches_in_file(file_id); |
69 | if matches.matches.is_empty() { | 76 | if matches.matches.is_empty() { |
@@ -74,7 +81,7 @@ impl<'db> MatchFinder<'db> { | |||
74 | } | 81 | } |
75 | } | 82 | } |
76 | 83 | ||
77 | fn find_matches_in_file(&self, file_id: FileId) -> SsrMatches { | 84 | pub fn find_matches_in_file(&self, file_id: FileId) -> SsrMatches { |
78 | let file = self.sema.parse(file_id); | 85 | let file = self.sema.parse(file_id); |
79 | let code = file.syntax(); | 86 | let code = file.syntax(); |
80 | let mut matches = SsrMatches::default(); | 87 | let mut matches = SsrMatches::default(); |
@@ -82,6 +89,32 @@ impl<'db> MatchFinder<'db> { | |||
82 | matches | 89 | matches |
83 | } | 90 | } |
84 | 91 | ||
92 | /// Finds all nodes in `file_id` whose text is exactly equal to `snippet` and attempts to match | ||
93 | /// them, while recording reasons why they don't match. This API is useful for command | ||
94 | /// line-based debugging where providing a range is difficult. | ||
95 | pub fn debug_where_text_equal(&self, file_id: FileId, snippet: &str) -> Vec<MatchDebugInfo> { | ||
96 | use ra_db::SourceDatabaseExt; | ||
97 | let file = self.sema.parse(file_id); | ||
98 | let mut res = Vec::new(); | ||
99 | let file_text = self.sema.db.file_text(file_id); | ||
100 | let mut remaining_text = file_text.as_str(); | ||
101 | let mut base = 0; | ||
102 | let len = snippet.len() as u32; | ||
103 | while let Some(offset) = remaining_text.find(snippet) { | ||
104 | let start = base + offset as u32; | ||
105 | let end = start + len; | ||
106 | self.output_debug_for_nodes_at_range( | ||
107 | file.syntax(), | ||
108 | FileRange { file_id, range: TextRange::new(start.into(), end.into()) }, | ||
109 | &None, | ||
110 | &mut res, | ||
111 | ); | ||
112 | remaining_text = &remaining_text[offset + snippet.len()..]; | ||
113 | base = end; | ||
114 | } | ||
115 | res | ||
116 | } | ||
117 | |||
85 | fn find_matches( | 118 | fn find_matches( |
86 | &self, | 119 | &self, |
87 | code: &SyntaxNode, | 120 | code: &SyntaxNode, |
@@ -128,6 +161,59 @@ impl<'db> MatchFinder<'db> { | |||
128 | self.find_matches(&child, restrict_range, matches_out); | 161 | self.find_matches(&child, restrict_range, matches_out); |
129 | } | 162 | } |
130 | } | 163 | } |
164 | |||
165 | fn output_debug_for_nodes_at_range( | ||
166 | &self, | ||
167 | node: &SyntaxNode, | ||
168 | range: FileRange, | ||
169 | restrict_range: &Option<FileRange>, | ||
170 | out: &mut Vec<MatchDebugInfo>, | ||
171 | ) { | ||
172 | for node in node.children() { | ||
173 | let node_range = self.sema.original_range(&node); | ||
174 | if node_range.file_id != range.file_id || !node_range.range.contains_range(range.range) | ||
175 | { | ||
176 | continue; | ||
177 | } | ||
178 | if node_range.range == range.range { | ||
179 | for rule in &self.rules { | ||
180 | let pattern = | ||
181 | rule.pattern.tree_for_kind_with_reason(node.kind()).map(|p| p.clone()); | ||
182 | out.push(MatchDebugInfo { | ||
183 | matched: matching::get_match(true, rule, &node, restrict_range, &self.sema) | ||
184 | .map_err(|e| MatchFailureReason { | ||
185 | reason: e.reason.unwrap_or_else(|| { | ||
186 | "Match failed, but no reason was given".to_owned() | ||
187 | }), | ||
188 | }), | ||
189 | pattern, | ||
190 | node: node.clone(), | ||
191 | }); | ||
192 | } | ||
193 | } else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) { | ||
194 | if let Some(expanded) = self.sema.expand(¯o_call) { | ||
195 | if let Some(tt) = macro_call.token_tree() { | ||
196 | self.output_debug_for_nodes_at_range( | ||
197 | &expanded, | ||
198 | range, | ||
199 | &Some(self.sema.original_range(tt.syntax())), | ||
200 | out, | ||
201 | ); | ||
202 | } | ||
203 | } | ||
204 | } else { | ||
205 | self.output_debug_for_nodes_at_range(&node, range, restrict_range, out); | ||
206 | } | ||
207 | } | ||
208 | } | ||
209 | } | ||
210 | |||
211 | pub struct MatchDebugInfo { | ||
212 | node: SyntaxNode, | ||
213 | /// Our search pattern parsed as the same kind of syntax node as `node`. e.g. expression, item, | ||
214 | /// etc. Will be absent if the pattern can't be parsed as that kind. | ||
215 | pattern: Result<SyntaxNode, MatchFailureReason>, | ||
216 | matched: Result<Match, MatchFailureReason>, | ||
131 | } | 217 | } |
132 | 218 | ||
133 | impl std::fmt::Display for SsrError { | 219 | impl std::fmt::Display for SsrError { |
@@ -136,4 +222,70 @@ impl std::fmt::Display for SsrError { | |||
136 | } | 222 | } |
137 | } | 223 | } |
138 | 224 | ||
225 | impl std::fmt::Debug for MatchDebugInfo { | ||
226 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { | ||
227 | write!(f, "========= PATTERN ==========\n")?; | ||
228 | match &self.pattern { | ||
229 | Ok(pattern) => { | ||
230 | write!(f, "{:#?}", pattern)?; | ||
231 | } | ||
232 | Err(err) => { | ||
233 | write!(f, "{}", err.reason)?; | ||
234 | } | ||
235 | } | ||
236 | write!( | ||
237 | f, | ||
238 | "\n============ AST ===========\n\ | ||
239 | {:#?}\n============================\n", | ||
240 | self.node | ||
241 | )?; | ||
242 | match &self.matched { | ||
243 | Ok(_) => write!(f, "Node matched")?, | ||
244 | Err(reason) => write!(f, "Node failed to match because: {}", reason.reason)?, | ||
245 | } | ||
246 | Ok(()) | ||
247 | } | ||
248 | } | ||
249 | |||
250 | impl SsrPattern { | ||
251 | fn tree_for_kind_with_reason( | ||
252 | &self, | ||
253 | kind: SyntaxKind, | ||
254 | ) -> Result<&SyntaxNode, MatchFailureReason> { | ||
255 | record_match_fails_reasons_scope(true, || self.tree_for_kind(kind)) | ||
256 | .map_err(|e| MatchFailureReason { reason: e.reason.unwrap() }) | ||
257 | } | ||
258 | } | ||
259 | |||
260 | impl SsrMatches { | ||
261 | /// Returns `self` with any nested matches removed and made into top-level matches. | ||
262 | pub fn flattened(self) -> SsrMatches { | ||
263 | let mut out = SsrMatches::default(); | ||
264 | self.flatten_into(&mut out); | ||
265 | out | ||
266 | } | ||
267 | |||
268 | fn flatten_into(self, out: &mut SsrMatches) { | ||
269 | for mut m in self.matches { | ||
270 | for p in m.placeholder_values.values_mut() { | ||
271 | std::mem::replace(&mut p.inner_matches, SsrMatches::default()).flatten_into(out); | ||
272 | } | ||
273 | out.matches.push(m); | ||
274 | } | ||
275 | } | ||
276 | } | ||
277 | |||
278 | impl Match { | ||
279 | pub fn matched_text(&self) -> String { | ||
280 | self.matched_node.text().to_string() | ||
281 | } | ||
282 | } | ||
283 | |||
139 | impl std::error::Error for SsrError {} | 284 | impl std::error::Error for SsrError {} |
285 | |||
286 | #[cfg(test)] | ||
287 | impl MatchDebugInfo { | ||
288 | pub(crate) fn match_failure_reason(&self) -> Option<&str> { | ||
289 | self.matched.as_ref().err().map(|r| r.reason.as_str()) | ||
290 | } | ||
291 | } | ||
diff --git a/crates/ra_ssr/src/matching.rs b/crates/ra_ssr/src/matching.rs index bb87bda43..ce53d46d2 100644 --- a/crates/ra_ssr/src/matching.rs +++ b/crates/ra_ssr/src/matching.rs | |||
@@ -2,17 +2,16 @@ | |||
2 | //! process of matching, placeholder values are recorded. | 2 | //! process of matching, placeholder values are recorded. |
3 | 3 | ||
4 | use crate::{ | 4 | use crate::{ |
5 | parsing::{Placeholder, SsrTemplate}, | 5 | parsing::{Constraint, NodeKind, Placeholder, SsrTemplate}, |
6 | SsrMatches, SsrPattern, SsrRule, | 6 | SsrMatches, SsrPattern, SsrRule, |
7 | }; | 7 | }; |
8 | use hir::Semantics; | 8 | use hir::Semantics; |
9 | use ra_db::FileRange; | 9 | use ra_db::FileRange; |
10 | use ra_syntax::ast::{AstNode, AstToken}; | 10 | use ra_syntax::ast::{AstNode, AstToken}; |
11 | use ra_syntax::{ | 11 | use ra_syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken}; |
12 | ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, | ||
13 | }; | ||
14 | use rustc_hash::FxHashMap; | 12 | use rustc_hash::FxHashMap; |
15 | use std::{cell::Cell, iter::Peekable}; | 13 | use std::{cell::Cell, iter::Peekable}; |
14 | use test_utils::mark; | ||
16 | 15 | ||
17 | // Creates a match error. If we're currently attempting to match some code that we thought we were | 16 | // Creates a match error. If we're currently attempting to match some code that we thought we were |
18 | // going to match, as indicated by the --debug-snippet flag, then populate the reason field. | 17 | // going to match, as indicated by the --debug-snippet flag, then populate the reason field. |
@@ -44,8 +43,8 @@ macro_rules! fail_match { | |||
44 | 43 | ||
45 | /// Information about a match that was found. | 44 | /// Information about a match that was found. |
46 | #[derive(Debug)] | 45 | #[derive(Debug)] |
47 | pub(crate) struct Match { | 46 | pub struct Match { |
48 | pub(crate) range: TextRange, | 47 | pub(crate) range: FileRange, |
49 | pub(crate) matched_node: SyntaxNode, | 48 | pub(crate) matched_node: SyntaxNode, |
50 | pub(crate) placeholder_values: FxHashMap<Var, PlaceholderMatch>, | 49 | pub(crate) placeholder_values: FxHashMap<Var, PlaceholderMatch>, |
51 | pub(crate) ignored_comments: Vec<ast::Comment>, | 50 | pub(crate) ignored_comments: Vec<ast::Comment>, |
@@ -135,7 +134,7 @@ impl<'db, 'sema> MatchState<'db, 'sema> { | |||
135 | match_state.attempt_match_node(&match_inputs, &pattern_tree, code)?; | 134 | match_state.attempt_match_node(&match_inputs, &pattern_tree, code)?; |
136 | match_state.validate_range(&sema.original_range(code))?; | 135 | match_state.validate_range(&sema.original_range(code))?; |
137 | match_state.match_out = Some(Match { | 136 | match_state.match_out = Some(Match { |
138 | range: sema.original_range(code).range, | 137 | range: sema.original_range(code), |
139 | matched_node: code.clone(), | 138 | matched_node: code.clone(), |
140 | placeholder_values: FxHashMap::default(), | 139 | placeholder_values: FxHashMap::default(), |
141 | ignored_comments: Vec::new(), | 140 | ignored_comments: Vec::new(), |
@@ -171,6 +170,9 @@ impl<'db, 'sema> MatchState<'db, 'sema> { | |||
171 | if let Some(placeholder) = | 170 | if let Some(placeholder) = |
172 | match_inputs.get_placeholder(&SyntaxElement::Node(pattern.clone())) | 171 | match_inputs.get_placeholder(&SyntaxElement::Node(pattern.clone())) |
173 | { | 172 | { |
173 | for constraint in &placeholder.constraints { | ||
174 | self.check_constraint(constraint, code)?; | ||
175 | } | ||
174 | if self.match_out.is_none() { | 176 | if self.match_out.is_none() { |
175 | return Ok(()); | 177 | return Ok(()); |
176 | } | 178 | } |
@@ -225,7 +227,7 @@ impl<'db, 'sema> MatchState<'db, 'sema> { | |||
225 | match self.next_non_trivial(&mut code_it) { | 227 | match self.next_non_trivial(&mut code_it) { |
226 | None => { | 228 | None => { |
227 | if let Some(p) = pattern_it.next() { | 229 | if let Some(p) = pattern_it.next() { |
228 | fail_match!("Part of the pattern was unmached: {:?}", p); | 230 | fail_match!("Part of the pattern was unmatched: {:?}", p); |
229 | } | 231 | } |
230 | return Ok(()); | 232 | return Ok(()); |
231 | } | 233 | } |
@@ -294,6 +296,24 @@ impl<'db, 'sema> MatchState<'db, 'sema> { | |||
294 | Ok(()) | 296 | Ok(()) |
295 | } | 297 | } |
296 | 298 | ||
299 | fn check_constraint( | ||
300 | &self, | ||
301 | constraint: &Constraint, | ||
302 | code: &SyntaxNode, | ||
303 | ) -> Result<(), MatchFailed> { | ||
304 | match constraint { | ||
305 | Constraint::Kind(kind) => { | ||
306 | kind.matches(code)?; | ||
307 | } | ||
308 | Constraint::Not(sub) => { | ||
309 | if self.check_constraint(&*sub, code).is_ok() { | ||
310 | fail_match!("Constraint {:?} failed for '{}'", constraint, code.text()); | ||
311 | } | ||
312 | } | ||
313 | } | ||
314 | Ok(()) | ||
315 | } | ||
316 | |||
297 | /// We want to allow the records to match in any order, so we have special matching logic for | 317 | /// We want to allow the records to match in any order, so we have special matching logic for |
298 | /// them. | 318 | /// them. |
299 | fn attempt_match_record_field_list( | 319 | fn attempt_match_record_field_list( |
@@ -517,6 +537,21 @@ impl SsrPattern { | |||
517 | } | 537 | } |
518 | } | 538 | } |
519 | 539 | ||
540 | impl NodeKind { | ||
541 | fn matches(&self, node: &SyntaxNode) -> Result<(), MatchFailed> { | ||
542 | let ok = match self { | ||
543 | Self::Literal => { | ||
544 | mark::hit!(literal_constraint); | ||
545 | ast::Literal::can_cast(node.kind()) | ||
546 | } | ||
547 | }; | ||
548 | if !ok { | ||
549 | fail_match!("Code '{}' isn't of kind {:?}", node.text(), self); | ||
550 | } | ||
551 | Ok(()) | ||
552 | } | ||
553 | } | ||
554 | |||
520 | // If `node` contains nothing but an ident then return it, otherwise return None. | 555 | // If `node` contains nothing but an ident then return it, otherwise return None. |
521 | fn only_ident(element: SyntaxElement) -> Option<SyntaxToken> { | 556 | fn only_ident(element: SyntaxElement) -> Option<SyntaxToken> { |
522 | match element { | 557 | match element { |
diff --git a/crates/ra_ssr/src/parsing.rs b/crates/ra_ssr/src/parsing.rs index 1ae166d19..5ea125616 100644 --- a/crates/ra_ssr/src/parsing.rs +++ b/crates/ra_ssr/src/parsing.rs | |||
@@ -6,7 +6,7 @@ | |||
6 | //! e.g. expressions, type references etc. | 6 | //! e.g. expressions, type references etc. |
7 | 7 | ||
8 | use crate::{SsrError, SsrPattern, SsrRule}; | 8 | use crate::{SsrError, SsrPattern, SsrRule}; |
9 | use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind}; | 9 | use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind, T}; |
10 | use rustc_hash::{FxHashMap, FxHashSet}; | 10 | use rustc_hash::{FxHashMap, FxHashSet}; |
11 | use std::str::FromStr; | 11 | use std::str::FromStr; |
12 | 12 | ||
@@ -39,6 +39,18 @@ pub(crate) struct Placeholder { | |||
39 | pub(crate) ident: SmolStr, | 39 | pub(crate) ident: SmolStr, |
40 | /// A unique name used in place of this placeholder when we parse the pattern as Rust code. | 40 | /// A unique name used in place of this placeholder when we parse the pattern as Rust code. |
41 | stand_in_name: String, | 41 | stand_in_name: String, |
42 | pub(crate) constraints: Vec<Constraint>, | ||
43 | } | ||
44 | |||
45 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
46 | pub(crate) enum Constraint { | ||
47 | Kind(NodeKind), | ||
48 | Not(Box<Constraint>), | ||
49 | } | ||
50 | |||
51 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
52 | pub(crate) enum NodeKind { | ||
53 | Literal, | ||
42 | } | 54 | } |
43 | 55 | ||
44 | #[derive(Debug, Clone, PartialEq, Eq)] | 56 | #[derive(Debug, Clone, PartialEq, Eq)] |
@@ -55,7 +67,7 @@ impl FromStr for SsrRule { | |||
55 | let pattern = it.next().expect("at least empty string").trim(); | 67 | let pattern = it.next().expect("at least empty string").trim(); |
56 | let template = it | 68 | let template = it |
57 | .next() | 69 | .next() |
58 | .ok_or_else(|| SsrError("Cannot find delemiter `==>>`".into()))? | 70 | .ok_or_else(|| SsrError("Cannot find delimiter `==>>`".into()))? |
59 | .trim() | 71 | .trim() |
60 | .to_string(); | 72 | .to_string(); |
61 | if it.next().is_some() { | 73 | if it.next().is_some() { |
@@ -149,7 +161,7 @@ fn parse_pattern(pattern_str: &str) -> Result<Vec<PatternElement>, SsrError> { | |||
149 | let mut placeholder_names = FxHashSet::default(); | 161 | let mut placeholder_names = FxHashSet::default(); |
150 | let mut tokens = tokenize(pattern_str)?.into_iter(); | 162 | let mut tokens = tokenize(pattern_str)?.into_iter(); |
151 | while let Some(token) = tokens.next() { | 163 | while let Some(token) = tokens.next() { |
152 | if token.kind == SyntaxKind::DOLLAR { | 164 | if token.kind == T![$] { |
153 | let placeholder = parse_placeholder(&mut tokens)?; | 165 | let placeholder = parse_placeholder(&mut tokens)?; |
154 | if !placeholder_names.insert(placeholder.ident.clone()) { | 166 | if !placeholder_names.insert(placeholder.ident.clone()) { |
155 | bail!("Name `{}` repeats more than once", placeholder.ident); | 167 | bail!("Name `{}` repeats more than once", placeholder.ident); |
@@ -177,6 +189,9 @@ fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> { | |||
177 | if !defined_placeholders.contains(&placeholder.ident) { | 189 | if !defined_placeholders.contains(&placeholder.ident) { |
178 | undefined.push(format!("${}", placeholder.ident)); | 190 | undefined.push(format!("${}", placeholder.ident)); |
179 | } | 191 | } |
192 | if !placeholder.constraints.is_empty() { | ||
193 | bail!("Replacement placeholders cannot have constraints"); | ||
194 | } | ||
180 | } | 195 | } |
181 | } | 196 | } |
182 | if !undefined.is_empty() { | 197 | if !undefined.is_empty() { |
@@ -205,23 +220,90 @@ fn tokenize(source: &str) -> Result<Vec<Token>, SsrError> { | |||
205 | 220 | ||
206 | fn parse_placeholder(tokens: &mut std::vec::IntoIter<Token>) -> Result<Placeholder, SsrError> { | 221 | fn parse_placeholder(tokens: &mut std::vec::IntoIter<Token>) -> Result<Placeholder, SsrError> { |
207 | let mut name = None; | 222 | let mut name = None; |
223 | let mut constraints = Vec::new(); | ||
208 | if let Some(token) = tokens.next() { | 224 | if let Some(token) = tokens.next() { |
209 | match token.kind { | 225 | match token.kind { |
210 | SyntaxKind::IDENT => { | 226 | SyntaxKind::IDENT => { |
211 | name = Some(token.text); | 227 | name = Some(token.text); |
212 | } | 228 | } |
229 | T!['{'] => { | ||
230 | let token = | ||
231 | tokens.next().ok_or_else(|| SsrError::new("Unexpected end of placeholder"))?; | ||
232 | if token.kind == SyntaxKind::IDENT { | ||
233 | name = Some(token.text); | ||
234 | } | ||
235 | loop { | ||
236 | let token = tokens | ||
237 | .next() | ||
238 | .ok_or_else(|| SsrError::new("Placeholder is missing closing brace '}'"))?; | ||
239 | match token.kind { | ||
240 | T![:] => { | ||
241 | constraints.push(parse_constraint(tokens)?); | ||
242 | } | ||
243 | T!['}'] => break, | ||
244 | _ => bail!("Unexpected token while parsing placeholder: '{}'", token.text), | ||
245 | } | ||
246 | } | ||
247 | } | ||
213 | _ => { | 248 | _ => { |
214 | bail!("Placeholders should be $name"); | 249 | bail!("Placeholders should either be $name or ${name:constraints}"); |
215 | } | 250 | } |
216 | } | 251 | } |
217 | } | 252 | } |
218 | let name = name.ok_or_else(|| SsrError::new("Placeholder ($) with no name"))?; | 253 | let name = name.ok_or_else(|| SsrError::new("Placeholder ($) with no name"))?; |
219 | Ok(Placeholder::new(name)) | 254 | Ok(Placeholder::new(name, constraints)) |
255 | } | ||
256 | |||
257 | fn parse_constraint(tokens: &mut std::vec::IntoIter<Token>) -> Result<Constraint, SsrError> { | ||
258 | let constraint_type = tokens | ||
259 | .next() | ||
260 | .ok_or_else(|| SsrError::new("Found end of placeholder while looking for a constraint"))? | ||
261 | .text | ||
262 | .to_string(); | ||
263 | match constraint_type.as_str() { | ||
264 | "kind" => { | ||
265 | expect_token(tokens, "(")?; | ||
266 | let t = tokens.next().ok_or_else(|| { | ||
267 | SsrError::new("Unexpected end of constraint while looking for kind") | ||
268 | })?; | ||
269 | if t.kind != SyntaxKind::IDENT { | ||
270 | bail!("Expected ident, found {:?} while parsing kind constraint", t.kind); | ||
271 | } | ||
272 | expect_token(tokens, ")")?; | ||
273 | Ok(Constraint::Kind(NodeKind::from(&t.text)?)) | ||
274 | } | ||
275 | "not" => { | ||
276 | expect_token(tokens, "(")?; | ||
277 | let sub = parse_constraint(tokens)?; | ||
278 | expect_token(tokens, ")")?; | ||
279 | Ok(Constraint::Not(Box::new(sub))) | ||
280 | } | ||
281 | x => bail!("Unsupported constraint type '{}'", x), | ||
282 | } | ||
283 | } | ||
284 | |||
285 | fn expect_token(tokens: &mut std::vec::IntoIter<Token>, expected: &str) -> Result<(), SsrError> { | ||
286 | if let Some(t) = tokens.next() { | ||
287 | if t.text == expected { | ||
288 | return Ok(()); | ||
289 | } | ||
290 | bail!("Expected {} found {}", expected, t.text); | ||
291 | } | ||
292 | bail!("Expected {} found end of stream"); | ||
293 | } | ||
294 | |||
295 | impl NodeKind { | ||
296 | fn from(name: &SmolStr) -> Result<NodeKind, SsrError> { | ||
297 | Ok(match name.as_str() { | ||
298 | "literal" => NodeKind::Literal, | ||
299 | _ => bail!("Unknown node kind '{}'", name), | ||
300 | }) | ||
301 | } | ||
220 | } | 302 | } |
221 | 303 | ||
222 | impl Placeholder { | 304 | impl Placeholder { |
223 | fn new(name: SmolStr) -> Self { | 305 | fn new(name: SmolStr, constraints: Vec<Constraint>) -> Self { |
224 | Self { stand_in_name: format!("__placeholder_{}", name), ident: name } | 306 | Self { stand_in_name: format!("__placeholder_{}", name), constraints, ident: name } |
225 | } | 307 | } |
226 | } | 308 | } |
227 | 309 | ||
@@ -241,31 +323,31 @@ mod tests { | |||
241 | PatternElement::Token(Token { kind, text: SmolStr::new(text) }) | 323 | PatternElement::Token(Token { kind, text: SmolStr::new(text) }) |
242 | } | 324 | } |
243 | fn placeholder(name: &str) -> PatternElement { | 325 | fn placeholder(name: &str) -> PatternElement { |
244 | PatternElement::Placeholder(Placeholder::new(SmolStr::new(name))) | 326 | PatternElement::Placeholder(Placeholder::new(SmolStr::new(name), Vec::new())) |
245 | } | 327 | } |
246 | let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap(); | 328 | let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap(); |
247 | assert_eq!( | 329 | assert_eq!( |
248 | result.pattern.raw.tokens, | 330 | result.pattern.raw.tokens, |
249 | vec![ | 331 | vec![ |
250 | token(SyntaxKind::IDENT, "foo"), | 332 | token(SyntaxKind::IDENT, "foo"), |
251 | token(SyntaxKind::L_PAREN, "("), | 333 | token(T!['('], "("), |
252 | placeholder("a"), | 334 | placeholder("a"), |
253 | token(SyntaxKind::COMMA, ","), | 335 | token(T![,], ","), |
254 | token(SyntaxKind::WHITESPACE, " "), | 336 | token(SyntaxKind::WHITESPACE, " "), |
255 | placeholder("b"), | 337 | placeholder("b"), |
256 | token(SyntaxKind::R_PAREN, ")"), | 338 | token(T![')'], ")"), |
257 | ] | 339 | ] |
258 | ); | 340 | ); |
259 | assert_eq!( | 341 | assert_eq!( |
260 | result.template.tokens, | 342 | result.template.tokens, |
261 | vec![ | 343 | vec![ |
262 | token(SyntaxKind::IDENT, "bar"), | 344 | token(SyntaxKind::IDENT, "bar"), |
263 | token(SyntaxKind::L_PAREN, "("), | 345 | token(T!['('], "("), |
264 | placeholder("b"), | 346 | placeholder("b"), |
265 | token(SyntaxKind::COMMA, ","), | 347 | token(T![,], ","), |
266 | token(SyntaxKind::WHITESPACE, " "), | 348 | token(SyntaxKind::WHITESPACE, " "), |
267 | placeholder("a"), | 349 | placeholder("a"), |
268 | token(SyntaxKind::R_PAREN, ")"), | 350 | token(T![')'], ")"), |
269 | ] | 351 | ] |
270 | ); | 352 | ); |
271 | } | 353 | } |
diff --git a/crates/ra_ssr/src/replacing.rs b/crates/ra_ssr/src/replacing.rs index 70ce1c185..e43cc5167 100644 --- a/crates/ra_ssr/src/replacing.rs +++ b/crates/ra_ssr/src/replacing.rs | |||
@@ -21,8 +21,10 @@ fn matches_to_edit_at_offset( | |||
21 | ) -> TextEdit { | 21 | ) -> TextEdit { |
22 | let mut edit_builder = ra_text_edit::TextEditBuilder::default(); | 22 | let mut edit_builder = ra_text_edit::TextEditBuilder::default(); |
23 | for m in &matches.matches { | 23 | for m in &matches.matches { |
24 | edit_builder | 24 | edit_builder.replace( |
25 | .replace(m.range.checked_sub(relative_start).unwrap(), render_replace(m, file_src)); | 25 | m.range.range.checked_sub(relative_start).unwrap(), |
26 | render_replace(m, file_src), | ||
27 | ); | ||
26 | } | 28 | } |
27 | edit_builder.finish() | 29 | edit_builder.finish() |
28 | } | 30 | } |
diff --git a/crates/ra_ssr/src/tests.rs b/crates/ra_ssr/src/tests.rs index 8be60c293..9568d4432 100644 --- a/crates/ra_ssr/src/tests.rs +++ b/crates/ra_ssr/src/tests.rs | |||
@@ -1,150 +1,6 @@ | |||
1 | use crate::matching::MatchFailureReason; | 1 | use crate::{MatchFinder, SsrRule}; |
2 | use crate::{matching, Match, MatchFinder, SsrMatches, SsrPattern, SsrRule}; | 2 | use ra_db::{FileId, SourceDatabaseExt}; |
3 | use matching::record_match_fails_reasons_scope; | 3 | use test_utils::mark; |
4 | use ra_db::{FileId, FileRange, SourceDatabaseExt}; | ||
5 | use ra_syntax::ast::AstNode; | ||
6 | use ra_syntax::{ast, SyntaxKind, SyntaxNode, TextRange}; | ||
7 | |||
8 | struct MatchDebugInfo { | ||
9 | node: SyntaxNode, | ||
10 | /// Our search pattern parsed as the same kind of syntax node as `node`. e.g. expression, item, | ||
11 | /// etc. Will be absent if the pattern can't be parsed as that kind. | ||
12 | pattern: Result<SyntaxNode, MatchFailureReason>, | ||
13 | matched: Result<Match, MatchFailureReason>, | ||
14 | } | ||
15 | |||
16 | impl SsrPattern { | ||
17 | pub(crate) fn tree_for_kind_with_reason( | ||
18 | &self, | ||
19 | kind: SyntaxKind, | ||
20 | ) -> Result<&SyntaxNode, MatchFailureReason> { | ||
21 | record_match_fails_reasons_scope(true, || self.tree_for_kind(kind)) | ||
22 | .map_err(|e| MatchFailureReason { reason: e.reason.unwrap() }) | ||
23 | } | ||
24 | } | ||
25 | |||
26 | impl std::fmt::Debug for MatchDebugInfo { | ||
27 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { | ||
28 | write!(f, "========= PATTERN ==========\n")?; | ||
29 | match &self.pattern { | ||
30 | Ok(pattern) => { | ||
31 | write!(f, "{:#?}", pattern)?; | ||
32 | } | ||
33 | Err(err) => { | ||
34 | write!(f, "{}", err.reason)?; | ||
35 | } | ||
36 | } | ||
37 | write!( | ||
38 | f, | ||
39 | "\n============ AST ===========\n\ | ||
40 | {:#?}\n============================", | ||
41 | self.node | ||
42 | )?; | ||
43 | match &self.matched { | ||
44 | Ok(_) => write!(f, "Node matched")?, | ||
45 | Err(reason) => write!(f, "Node failed to match because: {}", reason.reason)?, | ||
46 | } | ||
47 | Ok(()) | ||
48 | } | ||
49 | } | ||
50 | |||
51 | impl SsrMatches { | ||
52 | /// Returns `self` with any nested matches removed and made into top-level matches. | ||
53 | pub(crate) fn flattened(self) -> SsrMatches { | ||
54 | let mut out = SsrMatches::default(); | ||
55 | self.flatten_into(&mut out); | ||
56 | out | ||
57 | } | ||
58 | |||
59 | fn flatten_into(self, out: &mut SsrMatches) { | ||
60 | for mut m in self.matches { | ||
61 | for p in m.placeholder_values.values_mut() { | ||
62 | std::mem::replace(&mut p.inner_matches, SsrMatches::default()).flatten_into(out); | ||
63 | } | ||
64 | out.matches.push(m); | ||
65 | } | ||
66 | } | ||
67 | } | ||
68 | |||
69 | impl Match { | ||
70 | pub(crate) fn matched_text(&self) -> String { | ||
71 | self.matched_node.text().to_string() | ||
72 | } | ||
73 | } | ||
74 | |||
75 | impl<'db> MatchFinder<'db> { | ||
76 | /// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you | ||
77 | /// intend to do replacement, use `add_rule` instead. | ||
78 | fn add_search_pattern(&mut self, pattern: SsrPattern) { | ||
79 | self.add_rule(SsrRule { pattern, template: "()".parse().unwrap() }) | ||
80 | } | ||
81 | |||
82 | /// Finds all nodes in `file_id` whose text is exactly equal to `snippet` and attempts to match | ||
83 | /// them, while recording reasons why they don't match. This API is useful for command | ||
84 | /// line-based debugging where providing a range is difficult. | ||
85 | fn debug_where_text_equal(&self, file_id: FileId, snippet: &str) -> Vec<MatchDebugInfo> { | ||
86 | let file = self.sema.parse(file_id); | ||
87 | let mut res = Vec::new(); | ||
88 | let file_text = self.sema.db.file_text(file_id); | ||
89 | let mut remaining_text = file_text.as_str(); | ||
90 | let mut base = 0; | ||
91 | let len = snippet.len() as u32; | ||
92 | while let Some(offset) = remaining_text.find(snippet) { | ||
93 | let start = base + offset as u32; | ||
94 | let end = start + len; | ||
95 | self.output_debug_for_nodes_at_range( | ||
96 | file.syntax(), | ||
97 | TextRange::new(start.into(), end.into()), | ||
98 | &None, | ||
99 | &mut res, | ||
100 | ); | ||
101 | remaining_text = &remaining_text[offset + snippet.len()..]; | ||
102 | base = end; | ||
103 | } | ||
104 | res | ||
105 | } | ||
106 | |||
107 | fn output_debug_for_nodes_at_range( | ||
108 | &self, | ||
109 | node: &SyntaxNode, | ||
110 | range: TextRange, | ||
111 | restrict_range: &Option<FileRange>, | ||
112 | out: &mut Vec<MatchDebugInfo>, | ||
113 | ) { | ||
114 | for node in node.children() { | ||
115 | if !node.text_range().contains_range(range) { | ||
116 | continue; | ||
117 | } | ||
118 | if node.text_range() == range { | ||
119 | for rule in &self.rules { | ||
120 | let pattern = | ||
121 | rule.pattern.tree_for_kind_with_reason(node.kind()).map(|p| p.clone()); | ||
122 | out.push(MatchDebugInfo { | ||
123 | matched: matching::get_match(true, rule, &node, restrict_range, &self.sema) | ||
124 | .map_err(|e| MatchFailureReason { | ||
125 | reason: e.reason.unwrap_or_else(|| { | ||
126 | "Match failed, but no reason was given".to_owned() | ||
127 | }), | ||
128 | }), | ||
129 | pattern, | ||
130 | node: node.clone(), | ||
131 | }); | ||
132 | } | ||
133 | } else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) { | ||
134 | if let Some(expanded) = self.sema.expand(¯o_call) { | ||
135 | if let Some(tt) = macro_call.token_tree() { | ||
136 | self.output_debug_for_nodes_at_range( | ||
137 | &expanded, | ||
138 | range, | ||
139 | &Some(self.sema.original_range(tt.syntax())), | ||
140 | out, | ||
141 | ); | ||
142 | } | ||
143 | } | ||
144 | } | ||
145 | } | ||
146 | } | ||
147 | } | ||
148 | 4 | ||
149 | fn parse_error_text(query: &str) -> String { | 5 | fn parse_error_text(query: &str) -> String { |
150 | format!("{}", query.parse::<SsrRule>().unwrap_err()) | 6 | format!("{}", query.parse::<SsrRule>().unwrap_err()) |
@@ -152,12 +8,12 @@ fn parse_error_text(query: &str) -> String { | |||
152 | 8 | ||
153 | #[test] | 9 | #[test] |
154 | fn parser_empty_query() { | 10 | fn parser_empty_query() { |
155 | assert_eq!(parse_error_text(""), "Parse error: Cannot find delemiter `==>>`"); | 11 | assert_eq!(parse_error_text(""), "Parse error: Cannot find delimiter `==>>`"); |
156 | } | 12 | } |
157 | 13 | ||
158 | #[test] | 14 | #[test] |
159 | fn parser_no_delimiter() { | 15 | fn parser_no_delimiter() { |
160 | assert_eq!(parse_error_text("foo()"), "Parse error: Cannot find delemiter `==>>`"); | 16 | assert_eq!(parse_error_text("foo()"), "Parse error: Cannot find delimiter `==>>`"); |
161 | } | 17 | } |
162 | 18 | ||
163 | #[test] | 19 | #[test] |
@@ -227,7 +83,7 @@ fn assert_ssr_transforms(rules: &[&str], input: &str, result: &str) { | |||
227 | let mut after = db.file_text(file_id).to_string(); | 83 | let mut after = db.file_text(file_id).to_string(); |
228 | edits.apply(&mut after); | 84 | edits.apply(&mut after); |
229 | // Likewise, we need to make sure that whatever transformations fixture parsing applies, | 85 | // Likewise, we need to make sure that whatever transformations fixture parsing applies, |
230 | // also get appplied to our expected result. | 86 | // also get applied to our expected result. |
231 | let result = normalize_code(result); | 87 | let result = normalize_code(result); |
232 | assert_eq!(after, result); | 88 | assert_eq!(after, result); |
233 | } else { | 89 | } else { |
@@ -260,6 +116,19 @@ fn assert_no_match(pattern: &str, code: &str) { | |||
260 | assert_matches(pattern, code, &[]); | 116 | assert_matches(pattern, code, &[]); |
261 | } | 117 | } |
262 | 118 | ||
119 | fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) { | ||
120 | let (db, file_id) = single_file(code); | ||
121 | let mut match_finder = MatchFinder::new(&db); | ||
122 | match_finder.add_search_pattern(pattern.parse().unwrap()); | ||
123 | let mut reasons = Vec::new(); | ||
124 | for d in match_finder.debug_where_text_equal(file_id, snippet) { | ||
125 | if let Some(reason) = d.match_failure_reason() { | ||
126 | reasons.push(reason.to_owned()); | ||
127 | } | ||
128 | } | ||
129 | assert_eq!(reasons, vec![expected_reason]); | ||
130 | } | ||
131 | |||
263 | #[test] | 132 | #[test] |
264 | fn ssr_function_to_method() { | 133 | fn ssr_function_to_method() { |
265 | assert_ssr_transform( | 134 | assert_ssr_transform( |
@@ -434,6 +303,22 @@ fn match_pattern() { | |||
434 | } | 303 | } |
435 | 304 | ||
436 | #[test] | 305 | #[test] |
306 | fn literal_constraint() { | ||
307 | mark::check!(literal_constraint); | ||
308 | let code = r#" | ||
309 | fn f1() { | ||
310 | let x1 = Some(42); | ||
311 | let x2 = Some("foo"); | ||
312 | let x3 = Some(x1); | ||
313 | let x4 = Some(40 + 2); | ||
314 | let x5 = Some(true); | ||
315 | } | ||
316 | "#; | ||
317 | assert_matches("Some(${a:kind(literal)})", code, &["Some(42)", "Some(\"foo\")", "Some(true)"]); | ||
318 | assert_matches("Some(${a:not(kind(literal))})", code, &["Some(x1)", "Some(40 + 2)"]); | ||
319 | } | ||
320 | |||
321 | #[test] | ||
437 | fn match_reordered_struct_instantiation() { | 322 | fn match_reordered_struct_instantiation() { |
438 | assert_matches( | 323 | assert_matches( |
439 | "Foo {aa: 1, b: 2, ccc: 3}", | 324 | "Foo {aa: 1, b: 2, ccc: 3}", |
@@ -623,3 +508,30 @@ fn preserves_whitespace_within_macro_expansion() { | |||
623 | fn f() {macro1!(4 - 3 - 1 * 2}"#, | 508 | fn f() {macro1!(4 - 3 - 1 * 2}"#, |
624 | ) | 509 | ) |
625 | } | 510 | } |
511 | |||
512 | #[test] | ||
513 | fn match_failure_reasons() { | ||
514 | let code = r#" | ||
515 | macro_rules! foo { | ||
516 | ($a:expr) => { | ||
517 | 1 + $a + 2 | ||
518 | }; | ||
519 | } | ||
520 | fn f1() { | ||
521 | bar(1, 2); | ||
522 | foo!(5 + 43.to_string() + 5); | ||
523 | } | ||
524 | "#; | ||
525 | assert_match_failure_reason( | ||
526 | "bar($a, 3)", | ||
527 | code, | ||
528 | "bar(1, 2)", | ||
529 | r#"Pattern wanted token '3' (INT_NUMBER), but code had token '2' (INT_NUMBER)"#, | ||
530 | ); | ||
531 | assert_match_failure_reason( | ||
532 | "42.to_string()", | ||
533 | code, | ||
534 | "43.to_string()", | ||
535 | r#"Pattern wanted token '42' (INT_NUMBER), but code had token '43' (INT_NUMBER)"#, | ||
536 | ); | ||
537 | } | ||