aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_ssr
diff options
context:
space:
mode:
Diffstat (limited to 'crates/ra_ssr')
-rw-r--r--crates/ra_ssr/Cargo.toml4
-rw-r--r--crates/ra_ssr/src/errors.rs29
-rw-r--r--crates/ra_ssr/src/lib.rs308
-rw-r--r--crates/ra_ssr/src/matching.rs381
-rw-r--r--crates/ra_ssr/src/nester.rs98
-rw-r--r--crates/ra_ssr/src/parsing.rs259
-rw-r--r--crates/ra_ssr/src/replacing.rs204
-rw-r--r--crates/ra_ssr/src/resolving.rs228
-rw-r--r--crates/ra_ssr/src/search.rs273
-rw-r--r--crates/ra_ssr/src/tests.rs963
10 files changed, 2169 insertions, 578 deletions
diff --git a/crates/ra_ssr/Cargo.toml b/crates/ra_ssr/Cargo.toml
index 3c2f15a83..84e4b171e 100644
--- a/crates/ra_ssr/Cargo.toml
+++ b/crates/ra_ssr/Cargo.toml
@@ -17,3 +17,7 @@ ra_db = { path = "../ra_db" }
17ra_ide_db = { path = "../ra_ide_db" } 17ra_ide_db = { path = "../ra_ide_db" }
18hir = { path = "../ra_hir", package = "ra_hir" } 18hir = { path = "../ra_hir", package = "ra_hir" }
19rustc-hash = "1.1.0" 19rustc-hash = "1.1.0"
20test_utils = { path = "../test_utils" }
21
22[dev-dependencies]
23expect = { path = "../expect" }
diff --git a/crates/ra_ssr/src/errors.rs b/crates/ra_ssr/src/errors.rs
new file mode 100644
index 000000000..c02bacae6
--- /dev/null
+++ b/crates/ra_ssr/src/errors.rs
@@ -0,0 +1,29 @@
1//! Code relating to errors produced by SSR.
2
3/// Constructs an SsrError taking arguments like the format macro.
4macro_rules! _error {
5 ($fmt:expr) => {$crate::SsrError::new(format!($fmt))};
6 ($fmt:expr, $($arg:tt)+) => {$crate::SsrError::new(format!($fmt, $($arg)+))}
7}
8pub(crate) use _error as error;
9
10/// Returns from the current function with an error, supplied by arguments as for format!
11macro_rules! _bail {
12 ($($tokens:tt)*) => {return Err(crate::errors::error!($($tokens)*))}
13}
14pub(crate) use _bail as bail;
15
16#[derive(Debug, PartialEq)]
17pub struct SsrError(pub(crate) String);
18
19impl std::fmt::Display for SsrError {
20 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
21 write!(f, "Parse error: {}", self.0)
22 }
23}
24
25impl SsrError {
26 pub(crate) fn new(message: impl Into<String>) -> SsrError {
27 SsrError(message.into())
28 }
29}
diff --git a/crates/ra_ssr/src/lib.rs b/crates/ra_ssr/src/lib.rs
index e148f4564..73abfecb2 100644
--- a/crates/ra_ssr/src/lib.rs
+++ b/crates/ra_ssr/src/lib.rs
@@ -4,136 +4,288 @@
4//! based on a template. 4//! based on a template.
5 5
6mod matching; 6mod matching;
7mod nester;
7mod parsing; 8mod parsing;
8mod replacing; 9mod replacing;
10mod resolving;
11mod search;
12#[macro_use]
13mod errors;
9#[cfg(test)] 14#[cfg(test)]
10mod tests; 15mod tests;
11 16
12use crate::matching::Match; 17use crate::errors::bail;
18pub use crate::errors::SsrError;
19pub use crate::matching::Match;
20use crate::matching::MatchFailureReason;
13use hir::Semantics; 21use hir::Semantics;
14use ra_db::{FileId, FileRange}; 22use ra_db::{FileId, FilePosition, FileRange};
15use ra_syntax::{ast, AstNode, SmolStr, SyntaxNode}; 23use ra_ide_db::source_change::SourceFileEdit;
16use ra_text_edit::TextEdit; 24use ra_syntax::{ast, AstNode, SyntaxNode, TextRange};
25use resolving::ResolvedRule;
17use rustc_hash::FxHashMap; 26use rustc_hash::FxHashMap;
18 27
19// A structured search replace rule. Create by calling `parse` on a str. 28// A structured search replace rule. Create by calling `parse` on a str.
20#[derive(Debug)] 29#[derive(Debug)]
21pub struct SsrRule { 30pub struct SsrRule {
22 /// A structured pattern that we're searching for. 31 /// A structured pattern that we're searching for.
23 pattern: SsrPattern, 32 pattern: parsing::RawPattern,
24 /// What we'll replace it with. 33 /// What we'll replace it with.
25 template: parsing::SsrTemplate, 34 template: parsing::RawPattern,
35 parsed_rules: Vec<parsing::ParsedRule>,
26} 36}
27 37
28#[derive(Debug)] 38#[derive(Debug)]
29struct SsrPattern { 39pub struct SsrPattern {
30 raw: parsing::RawSearchPattern, 40 raw: parsing::RawPattern,
31 /// Placeholders keyed by the stand-in ident that we use in Rust source code. 41 parsed_rules: Vec<parsing::ParsedRule>,
32 placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
33 // We store our search pattern, parsed as each different kind of thing we can look for. As we
34 // traverse the AST, we get the appropriate one of these for the type of node we're on. For many
35 // search patterns, only some of these will be present.
36 expr: Option<SyntaxNode>,
37 type_ref: Option<SyntaxNode>,
38 item: Option<SyntaxNode>,
39 path: Option<SyntaxNode>,
40 pattern: Option<SyntaxNode>,
41} 42}
42 43
43#[derive(Debug, PartialEq)]
44pub struct SsrError(String);
45
46#[derive(Debug, Default)] 44#[derive(Debug, Default)]
47pub struct SsrMatches { 45pub struct SsrMatches {
48 matches: Vec<Match>, 46 pub matches: Vec<Match>,
49} 47}
50 48
51/// Searches a crate for pattern matches and possibly replaces them with something else. 49/// Searches a crate for pattern matches and possibly replaces them with something else.
52pub struct MatchFinder<'db> { 50pub struct MatchFinder<'db> {
53 /// Our source of information about the user's code. 51 /// Our source of information about the user's code.
54 sema: Semantics<'db, ra_ide_db::RootDatabase>, 52 sema: Semantics<'db, ra_ide_db::RootDatabase>,
55 rules: Vec<SsrRule>, 53 rules: Vec<ResolvedRule>,
54 resolution_scope: resolving::ResolutionScope<'db>,
55 restrict_ranges: Vec<FileRange>,
56} 56}
57 57
58impl<'db> MatchFinder<'db> { 58impl<'db> MatchFinder<'db> {
59 pub fn new(db: &'db ra_ide_db::RootDatabase) -> MatchFinder<'db> { 59 /// Constructs a new instance where names will be looked up as if they appeared at
60 MatchFinder { sema: Semantics::new(db), rules: Vec::new() } 60 /// `lookup_context`.
61 pub fn in_context(
62 db: &'db ra_ide_db::RootDatabase,
63 lookup_context: FilePosition,
64 mut restrict_ranges: Vec<FileRange>,
65 ) -> MatchFinder<'db> {
66 restrict_ranges.retain(|range| !range.range.is_empty());
67 let sema = Semantics::new(db);
68 let resolution_scope = resolving::ResolutionScope::new(&sema, lookup_context);
69 MatchFinder {
70 sema: Semantics::new(db),
71 rules: Vec::new(),
72 resolution_scope,
73 restrict_ranges,
74 }
61 } 75 }
62 76
63 pub fn add_rule(&mut self, rule: SsrRule) { 77 /// Constructs an instance using the start of the first file in `db` as the lookup context.
64 self.rules.push(rule); 78 pub fn at_first_file(db: &'db ra_ide_db::RootDatabase) -> Result<MatchFinder<'db>, SsrError> {
79 use ra_db::SourceDatabaseExt;
80 use ra_ide_db::symbol_index::SymbolsDatabase;
81 if let Some(first_file_id) = db
82 .local_roots()
83 .iter()
84 .next()
85 .and_then(|root| db.source_root(root.clone()).iter().next())
86 {
87 Ok(MatchFinder::in_context(
88 db,
89 FilePosition { file_id: first_file_id, offset: 0.into() },
90 vec![],
91 ))
92 } else {
93 bail!("No files to search");
94 }
65 } 95 }
66 96
67 pub fn edits_for_file(&self, file_id: FileId) -> Option<TextEdit> { 97 /// Adds a rule to be applied. The order in which rules are added matters. Earlier rules take
68 let matches = self.find_matches_in_file(file_id); 98 /// precedence. If a node is matched by an earlier rule, then later rules won't be permitted to
69 if matches.matches.is_empty() { 99 /// match to it.
70 None 100 pub fn add_rule(&mut self, rule: SsrRule) -> Result<(), SsrError> {
71 } else { 101 for parsed_rule in rule.parsed_rules {
72 use ra_db::SourceDatabaseExt; 102 self.rules.push(ResolvedRule::new(
73 Some(replacing::matches_to_edit(&matches, &self.sema.db.file_text(file_id))) 103 parsed_rule,
104 &self.resolution_scope,
105 self.rules.len(),
106 )?);
74 } 107 }
108 Ok(())
75 } 109 }
76 110
77 fn find_matches_in_file(&self, file_id: FileId) -> SsrMatches { 111 /// Finds matches for all added rules and returns edits for all found matches.
112 pub fn edits(&self) -> Vec<SourceFileEdit> {
113 use ra_db::SourceDatabaseExt;
114 let mut matches_by_file = FxHashMap::default();
115 for m in self.matches().matches {
116 matches_by_file
117 .entry(m.range.file_id)
118 .or_insert_with(|| SsrMatches::default())
119 .matches
120 .push(m);
121 }
122 let mut edits = vec![];
123 for (file_id, matches) in matches_by_file {
124 let edit =
125 replacing::matches_to_edit(&matches, &self.sema.db.file_text(file_id), &self.rules);
126 edits.push(SourceFileEdit { file_id, edit });
127 }
128 edits
129 }
130
131 /// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you
132 /// intend to do replacement, use `add_rule` instead.
133 pub fn add_search_pattern(&mut self, pattern: SsrPattern) -> Result<(), SsrError> {
134 for parsed_rule in pattern.parsed_rules {
135 self.rules.push(ResolvedRule::new(
136 parsed_rule,
137 &self.resolution_scope,
138 self.rules.len(),
139 )?);
140 }
141 Ok(())
142 }
143
144 /// Returns matches for all added rules.
145 pub fn matches(&self) -> SsrMatches {
146 let mut matches = Vec::new();
147 let mut usage_cache = search::UsageCache::default();
148 for rule in &self.rules {
149 self.find_matches_for_rule(rule, &mut usage_cache, &mut matches);
150 }
151 nester::nest_and_remove_collisions(matches, &self.sema)
152 }
153
154 /// Finds all nodes in `file_id` whose text is exactly equal to `snippet` and attempts to match
155 /// them, while recording reasons why they don't match. This API is useful for command
156 /// line-based debugging where providing a range is difficult.
157 pub fn debug_where_text_equal(&self, file_id: FileId, snippet: &str) -> Vec<MatchDebugInfo> {
158 use ra_db::SourceDatabaseExt;
78 let file = self.sema.parse(file_id); 159 let file = self.sema.parse(file_id);
79 let code = file.syntax(); 160 let mut res = Vec::new();
80 let mut matches = SsrMatches::default(); 161 let file_text = self.sema.db.file_text(file_id);
81 self.find_matches(code, &None, &mut matches); 162 let mut remaining_text = file_text.as_str();
82 matches 163 let mut base = 0;
164 let len = snippet.len() as u32;
165 while let Some(offset) = remaining_text.find(snippet) {
166 let start = base + offset as u32;
167 let end = start + len;
168 self.output_debug_for_nodes_at_range(
169 file.syntax(),
170 FileRange { file_id, range: TextRange::new(start.into(), end.into()) },
171 &None,
172 &mut res,
173 );
174 remaining_text = &remaining_text[offset + snippet.len()..];
175 base = end;
176 }
177 res
83 } 178 }
84 179
85 fn find_matches( 180 fn output_debug_for_nodes_at_range(
86 &self, 181 &self,
87 code: &SyntaxNode, 182 node: &SyntaxNode,
183 range: FileRange,
88 restrict_range: &Option<FileRange>, 184 restrict_range: &Option<FileRange>,
89 matches_out: &mut SsrMatches, 185 out: &mut Vec<MatchDebugInfo>,
90 ) { 186 ) {
91 for rule in &self.rules { 187 for node in node.children() {
92 if let Ok(mut m) = matching::get_match(false, rule, &code, restrict_range, &self.sema) { 188 let node_range = self.sema.original_range(&node);
93 // Continue searching in each of our placeholders. 189 if node_range.file_id != range.file_id || !node_range.range.contains_range(range.range)
94 for placeholder_value in m.placeholder_values.values_mut() { 190 {
95 if let Some(placeholder_node) = &placeholder_value.node { 191 continue;
96 // Don't search our placeholder if it's the entire matched node, otherwise we'd 192 }
97 // find the same match over and over until we got a stack overflow. 193 if node_range.range == range.range {
98 if placeholder_node != code { 194 for rule in &self.rules {
99 self.find_matches( 195 // For now we ignore rules that have a different kind than our node, otherwise
100 placeholder_node, 196 // we get lots of noise. If at some point we add support for restricting rules
101 restrict_range, 197 // to a particular kind of thing (e.g. only match type references), then we can
102 &mut placeholder_value.inner_matches, 198 // relax this. We special-case expressions, since function calls can match
103 ); 199 // method calls.
104 } 200 if rule.pattern.node.kind() != node.kind()
201 && !(ast::Expr::can_cast(rule.pattern.node.kind())
202 && ast::Expr::can_cast(node.kind()))
203 {
204 continue;
105 } 205 }
206 out.push(MatchDebugInfo {
207 matched: matching::get_match(true, rule, &node, restrict_range, &self.sema)
208 .map_err(|e| MatchFailureReason {
209 reason: e.reason.unwrap_or_else(|| {
210 "Match failed, but no reason was given".to_owned()
211 }),
212 }),
213 pattern: rule.pattern.node.clone(),
214 node: node.clone(),
215 });
106 } 216 }
107 matches_out.matches.push(m); 217 } else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) {
108 return; 218 if let Some(expanded) = self.sema.expand(&macro_call) {
109 } 219 if let Some(tt) = macro_call.token_tree() {
110 } 220 self.output_debug_for_nodes_at_range(
111 // If we've got a macro call, we already tried matching it pre-expansion, which is the only 221 &expanded,
112 // way to match the whole macro, now try expanding it and matching the expansion. 222 range,
113 if let Some(macro_call) = ast::MacroCall::cast(code.clone()) { 223 &Some(self.sema.original_range(tt.syntax())),
114 if let Some(expanded) = self.sema.expand(&macro_call) { 224 out,
115 if let Some(tt) = macro_call.token_tree() { 225 );
116 // When matching within a macro expansion, we only want to allow matches of 226 }
117 // nodes that originated entirely from within the token tree of the macro call.
118 // i.e. we don't want to match something that came from the macro itself.
119 self.find_matches(
120 &expanded,
121 &Some(self.sema.original_range(tt.syntax())),
122 matches_out,
123 );
124 } 227 }
125 } 228 }
229 self.output_debug_for_nodes_at_range(&node, range, restrict_range, out);
126 } 230 }
127 for child in code.children() { 231 }
128 self.find_matches(&child, restrict_range, matches_out); 232}
233
234pub struct MatchDebugInfo {
235 node: SyntaxNode,
236 /// Our search pattern parsed as an expression or item, etc
237 pattern: SyntaxNode,
238 matched: Result<Match, MatchFailureReason>,
239}
240
241impl std::fmt::Debug for MatchDebugInfo {
242 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
243 match &self.matched {
244 Ok(_) => writeln!(f, "Node matched")?,
245 Err(reason) => writeln!(f, "Node failed to match because: {}", reason.reason)?,
129 } 246 }
247 writeln!(
248 f,
249 "============ AST ===========\n\
250 {:#?}",
251 self.node
252 )?;
253 writeln!(f, "========= PATTERN ==========")?;
254 writeln!(f, "{:#?}", self.pattern)?;
255 writeln!(f, "============================")?;
256 Ok(())
130 } 257 }
131} 258}
132 259
133impl std::fmt::Display for SsrError { 260impl SsrMatches {
134 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { 261 /// Returns `self` with any nested matches removed and made into top-level matches.
135 write!(f, "Parse error: {}", self.0) 262 pub fn flattened(self) -> SsrMatches {
263 let mut out = SsrMatches::default();
264 self.flatten_into(&mut out);
265 out
266 }
267
268 fn flatten_into(self, out: &mut SsrMatches) {
269 for mut m in self.matches {
270 for p in m.placeholder_values.values_mut() {
271 std::mem::replace(&mut p.inner_matches, SsrMatches::default()).flatten_into(out);
272 }
273 out.matches.push(m);
274 }
275 }
276}
277
278impl Match {
279 pub fn matched_text(&self) -> String {
280 self.matched_node.text().to_string()
136 } 281 }
137} 282}
138 283
139impl std::error::Error for SsrError {} 284impl std::error::Error for SsrError {}
285
286#[cfg(test)]
287impl MatchDebugInfo {
288 pub(crate) fn match_failure_reason(&self) -> Option<&str> {
289 self.matched.as_ref().err().map(|r| r.reason.as_str())
290 }
291}
diff --git a/crates/ra_ssr/src/matching.rs b/crates/ra_ssr/src/matching.rs
index bb87bda43..74e15c631 100644
--- a/crates/ra_ssr/src/matching.rs
+++ b/crates/ra_ssr/src/matching.rs
@@ -2,17 +2,17 @@
2//! process of matching, placeholder values are recorded. 2//! process of matching, placeholder values are recorded.
3 3
4use crate::{ 4use crate::{
5 parsing::{Placeholder, SsrTemplate}, 5 parsing::{Constraint, NodeKind, Placeholder},
6 SsrMatches, SsrPattern, SsrRule, 6 resolving::{ResolvedPattern, ResolvedRule},
7 SsrMatches,
7}; 8};
8use hir::Semantics; 9use hir::Semantics;
9use ra_db::FileRange; 10use ra_db::FileRange;
10use ra_syntax::ast::{AstNode, AstToken}; 11use ra_syntax::ast::{AstNode, AstToken};
11use ra_syntax::{ 12use ra_syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken};
12 ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken, TextRange,
13};
14use rustc_hash::FxHashMap; 13use rustc_hash::FxHashMap;
15use std::{cell::Cell, iter::Peekable}; 14use std::{cell::Cell, iter::Peekable};
15use test_utils::mark;
16 16
17// Creates a match error. If we're currently attempting to match some code that we thought we were 17// Creates a match error. If we're currently attempting to match some code that we thought we were
18// going to match, as indicated by the --debug-snippet flag, then populate the reason field. 18// going to match, as indicated by the --debug-snippet flag, then populate the reason field.
@@ -44,14 +44,16 @@ macro_rules! fail_match {
44 44
45/// Information about a match that was found. 45/// Information about a match that was found.
46#[derive(Debug)] 46#[derive(Debug)]
47pub(crate) struct Match { 47pub struct Match {
48 pub(crate) range: TextRange, 48 pub(crate) range: FileRange,
49 pub(crate) matched_node: SyntaxNode, 49 pub(crate) matched_node: SyntaxNode,
50 pub(crate) placeholder_values: FxHashMap<Var, PlaceholderMatch>, 50 pub(crate) placeholder_values: FxHashMap<Var, PlaceholderMatch>,
51 pub(crate) ignored_comments: Vec<ast::Comment>, 51 pub(crate) ignored_comments: Vec<ast::Comment>,
52 // A copy of the template for the rule that produced this match. We store this on the match for 52 pub(crate) rule_index: usize,
53 // if/when we do replacement. 53 /// The depth of matched_node.
54 pub(crate) template: SsrTemplate, 54 pub(crate) depth: usize,
55 // Each path in the template rendered for the module in which the match was found.
56 pub(crate) rendered_template_paths: FxHashMap<SyntaxNode, hir::ModPath>,
55} 57}
56 58
57/// Represents a `$var` in an SSR query. 59/// Represents a `$var` in an SSR query.
@@ -87,64 +89,67 @@ pub(crate) struct MatchFailed {
87/// parent module, we don't populate nested matches. 89/// parent module, we don't populate nested matches.
88pub(crate) fn get_match( 90pub(crate) fn get_match(
89 debug_active: bool, 91 debug_active: bool,
90 rule: &SsrRule, 92 rule: &ResolvedRule,
91 code: &SyntaxNode, 93 code: &SyntaxNode,
92 restrict_range: &Option<FileRange>, 94 restrict_range: &Option<FileRange>,
93 sema: &Semantics<ra_ide_db::RootDatabase>, 95 sema: &Semantics<ra_ide_db::RootDatabase>,
94) -> Result<Match, MatchFailed> { 96) -> Result<Match, MatchFailed> {
95 record_match_fails_reasons_scope(debug_active, || { 97 record_match_fails_reasons_scope(debug_active, || {
96 MatchState::try_match(rule, code, restrict_range, sema) 98 Matcher::try_match(rule, code, restrict_range, sema)
97 }) 99 })
98} 100}
99 101
100/// Inputs to matching. This cannot be part of `MatchState`, since we mutate `MatchState` and in at 102/// Checks if our search pattern matches a particular node of the AST.
101/// least one case need to hold a borrow of a placeholder from the input pattern while calling a 103struct Matcher<'db, 'sema> {
102/// mutable `MatchState` method.
103struct MatchInputs<'pattern> {
104 ssr_pattern: &'pattern SsrPattern,
105}
106
107/// State used while attempting to match our search pattern against a particular node of the AST.
108struct MatchState<'db, 'sema> {
109 sema: &'sema Semantics<'db, ra_ide_db::RootDatabase>, 104 sema: &'sema Semantics<'db, ra_ide_db::RootDatabase>,
110 /// If any placeholders come from anywhere outside of this range, then the match will be 105 /// If any placeholders come from anywhere outside of this range, then the match will be
111 /// rejected. 106 /// rejected.
112 restrict_range: Option<FileRange>, 107 restrict_range: Option<FileRange>,
113 /// The match that we're building. We do two passes for a successful match. On the first pass, 108 rule: &'sema ResolvedRule,
114 /// this is None so that we can avoid doing things like storing copies of what placeholders 109}
115 /// matched to. If that pass succeeds, then we do a second pass where we collect those details. 110
116 /// This means that if we have a pattern like `$a.foo()` we won't do an insert into the 111/// Which phase of matching we're currently performing. We do two phases because most attempted
117 /// placeholders map for every single method call in the codebase. Instead we'll discard all the 112/// matches will fail and it means we can defer more expensive checks to the second phase.
118 /// method calls that aren't calls to `foo` on the first pass and only insert into the 113enum Phase<'a> {
119 /// placeholders map on the second pass. Likewise for ignored comments. 114 /// On the first phase, we perform cheap checks. No state is mutated and nothing is recorded.
120 match_out: Option<Match>, 115 First,
116 /// On the second phase, we construct the `Match`. Things like what placeholders bind to is
117 /// recorded.
118 Second(&'a mut Match),
121} 119}
122 120
123impl<'db, 'sema> MatchState<'db, 'sema> { 121impl<'db, 'sema> Matcher<'db, 'sema> {
124 fn try_match( 122 fn try_match(
125 rule: &SsrRule, 123 rule: &ResolvedRule,
126 code: &SyntaxNode, 124 code: &SyntaxNode,
127 restrict_range: &Option<FileRange>, 125 restrict_range: &Option<FileRange>,
128 sema: &'sema Semantics<'db, ra_ide_db::RootDatabase>, 126 sema: &'sema Semantics<'db, ra_ide_db::RootDatabase>,
129 ) -> Result<Match, MatchFailed> { 127 ) -> Result<Match, MatchFailed> {
130 let mut match_state = 128 let match_state = Matcher { sema, restrict_range: restrict_range.clone(), rule };
131 MatchState { sema, restrict_range: restrict_range.clone(), match_out: None };
132 let match_inputs = MatchInputs { ssr_pattern: &rule.pattern };
133 let pattern_tree = rule.pattern.tree_for_kind(code.kind())?;
134 // First pass at matching, where we check that node types and idents match. 129 // First pass at matching, where we check that node types and idents match.
135 match_state.attempt_match_node(&match_inputs, &pattern_tree, code)?; 130 match_state.attempt_match_node(&mut Phase::First, &rule.pattern.node, code)?;
136 match_state.validate_range(&sema.original_range(code))?; 131 match_state.validate_range(&sema.original_range(code))?;
137 match_state.match_out = Some(Match { 132 let mut the_match = Match {
138 range: sema.original_range(code).range, 133 range: sema.original_range(code),
139 matched_node: code.clone(), 134 matched_node: code.clone(),
140 placeholder_values: FxHashMap::default(), 135 placeholder_values: FxHashMap::default(),
141 ignored_comments: Vec::new(), 136 ignored_comments: Vec::new(),
142 template: rule.template.clone(), 137 rule_index: rule.index,
143 }); 138 depth: 0,
139 rendered_template_paths: FxHashMap::default(),
140 };
144 // Second matching pass, where we record placeholder matches, ignored comments and maybe do 141 // Second matching pass, where we record placeholder matches, ignored comments and maybe do
145 // any other more expensive checks that we didn't want to do on the first pass. 142 // any other more expensive checks that we didn't want to do on the first pass.
146 match_state.attempt_match_node(&match_inputs, &pattern_tree, code)?; 143 match_state.attempt_match_node(
147 Ok(match_state.match_out.unwrap()) 144 &mut Phase::Second(&mut the_match),
145 &rule.pattern.node,
146 code,
147 )?;
148 the_match.depth = sema.ancestors_with_macros(the_match.matched_node.clone()).count();
149 if let Some(template) = &rule.template {
150 the_match.render_template_paths(template, sema)?;
151 }
152 Ok(the_match)
148 } 153 }
149 154
150 /// Checks that `range` is within the permitted range if any. This is applicable when we're 155 /// Checks that `range` is within the permitted range if any. This is applicable when we're
@@ -162,79 +167,91 @@ impl<'db, 'sema> MatchState<'db, 'sema> {
162 } 167 }
163 168
164 fn attempt_match_node( 169 fn attempt_match_node(
165 &mut self, 170 &self,
166 match_inputs: &MatchInputs, 171 phase: &mut Phase,
167 pattern: &SyntaxNode, 172 pattern: &SyntaxNode,
168 code: &SyntaxNode, 173 code: &SyntaxNode,
169 ) -> Result<(), MatchFailed> { 174 ) -> Result<(), MatchFailed> {
170 // Handle placeholders. 175 // Handle placeholders.
171 if let Some(placeholder) = 176 if let Some(placeholder) = self.get_placeholder(&SyntaxElement::Node(pattern.clone())) {
172 match_inputs.get_placeholder(&SyntaxElement::Node(pattern.clone())) 177 for constraint in &placeholder.constraints {
173 { 178 self.check_constraint(constraint, code)?;
174 if self.match_out.is_none() {
175 return Ok(());
176 } 179 }
177 let original_range = self.sema.original_range(code); 180 if let Phase::Second(matches_out) = phase {
178 // We validated the range for the node when we started the match, so the placeholder 181 let original_range = self.sema.original_range(code);
179 // probably can't fail range validation, but just to be safe... 182 // We validated the range for the node when we started the match, so the placeholder
180 self.validate_range(&original_range)?; 183 // probably can't fail range validation, but just to be safe...
181 if let Some(match_out) = &mut self.match_out { 184 self.validate_range(&original_range)?;
182 match_out.placeholder_values.insert( 185 matches_out.placeholder_values.insert(
183 Var(placeholder.ident.to_string()), 186 Var(placeholder.ident.to_string()),
184 PlaceholderMatch::new(code, original_range), 187 PlaceholderMatch::new(code, original_range),
185 ); 188 );
186 } 189 }
187 return Ok(()); 190 return Ok(());
188 } 191 }
189 // Non-placeholders. 192 // We allow a UFCS call to match a method call, provided they resolve to the same function.
193 if let Some(pattern_function) = self.rule.pattern.ufcs_function_calls.get(pattern) {
194 if let (Some(pattern), Some(code)) =
195 (ast::CallExpr::cast(pattern.clone()), ast::MethodCallExpr::cast(code.clone()))
196 {
197 return self.attempt_match_ufcs(phase, &pattern, &code, *pattern_function);
198 }
199 }
190 if pattern.kind() != code.kind() { 200 if pattern.kind() != code.kind() {
191 fail_match!("Pattern had a {:?}, code had {:?}", pattern.kind(), code.kind()); 201 fail_match!(
202 "Pattern had `{}` ({:?}), code had `{}` ({:?})",
203 pattern.text(),
204 pattern.kind(),
205 code.text(),
206 code.kind()
207 );
192 } 208 }
193 // Some kinds of nodes have special handling. For everything else, we fall back to default 209 // Some kinds of nodes have special handling. For everything else, we fall back to default
194 // matching. 210 // matching.
195 match code.kind() { 211 match code.kind() {
196 SyntaxKind::RECORD_FIELD_LIST => { 212 SyntaxKind::RECORD_EXPR_FIELD_LIST => {
197 self.attempt_match_record_field_list(match_inputs, pattern, code) 213 self.attempt_match_record_field_list(phase, pattern, code)
198 } 214 }
199 SyntaxKind::TOKEN_TREE => self.attempt_match_token_tree(match_inputs, pattern, code), 215 SyntaxKind::TOKEN_TREE => self.attempt_match_token_tree(phase, pattern, code),
200 _ => self.attempt_match_node_children(match_inputs, pattern, code), 216 SyntaxKind::PATH => self.attempt_match_path(phase, pattern, code),
217 _ => self.attempt_match_node_children(phase, pattern, code),
201 } 218 }
202 } 219 }
203 220
204 fn attempt_match_node_children( 221 fn attempt_match_node_children(
205 &mut self, 222 &self,
206 match_inputs: &MatchInputs, 223 phase: &mut Phase,
207 pattern: &SyntaxNode, 224 pattern: &SyntaxNode,
208 code: &SyntaxNode, 225 code: &SyntaxNode,
209 ) -> Result<(), MatchFailed> { 226 ) -> Result<(), MatchFailed> {
210 self.attempt_match_sequences( 227 self.attempt_match_sequences(
211 match_inputs, 228 phase,
212 PatternIterator::new(pattern), 229 PatternIterator::new(pattern),
213 code.children_with_tokens(), 230 code.children_with_tokens(),
214 ) 231 )
215 } 232 }
216 233
217 fn attempt_match_sequences( 234 fn attempt_match_sequences(
218 &mut self, 235 &self,
219 match_inputs: &MatchInputs, 236 phase: &mut Phase,
220 pattern_it: PatternIterator, 237 pattern_it: PatternIterator,
221 mut code_it: SyntaxElementChildren, 238 mut code_it: SyntaxElementChildren,
222 ) -> Result<(), MatchFailed> { 239 ) -> Result<(), MatchFailed> {
223 let mut pattern_it = pattern_it.peekable(); 240 let mut pattern_it = pattern_it.peekable();
224 loop { 241 loop {
225 match self.next_non_trivial(&mut code_it) { 242 match phase.next_non_trivial(&mut code_it) {
226 None => { 243 None => {
227 if let Some(p) = pattern_it.next() { 244 if let Some(p) = pattern_it.next() {
228 fail_match!("Part of the pattern was unmached: {:?}", p); 245 fail_match!("Part of the pattern was unmatched: {:?}", p);
229 } 246 }
230 return Ok(()); 247 return Ok(());
231 } 248 }
232 Some(SyntaxElement::Token(c)) => { 249 Some(SyntaxElement::Token(c)) => {
233 self.attempt_match_token(&mut pattern_it, &c)?; 250 self.attempt_match_token(phase, &mut pattern_it, &c)?;
234 } 251 }
235 Some(SyntaxElement::Node(c)) => match pattern_it.next() { 252 Some(SyntaxElement::Node(c)) => match pattern_it.next() {
236 Some(SyntaxElement::Node(p)) => { 253 Some(SyntaxElement::Node(p)) => {
237 self.attempt_match_node(match_inputs, &p, &c)?; 254 self.attempt_match_node(phase, &p, &c)?;
238 } 255 }
239 Some(p) => fail_match!("Pattern wanted '{}', code has {}", p, c.text()), 256 Some(p) => fail_match!("Pattern wanted '{}', code has {}", p, c.text()),
240 None => fail_match!("Pattern reached end, code has {}", c.text()), 257 None => fail_match!("Pattern reached end, code has {}", c.text()),
@@ -244,11 +261,12 @@ impl<'db, 'sema> MatchState<'db, 'sema> {
244 } 261 }
245 262
246 fn attempt_match_token( 263 fn attempt_match_token(
247 &mut self, 264 &self,
265 phase: &mut Phase,
248 pattern: &mut Peekable<PatternIterator>, 266 pattern: &mut Peekable<PatternIterator>,
249 code: &ra_syntax::SyntaxToken, 267 code: &ra_syntax::SyntaxToken,
250 ) -> Result<(), MatchFailed> { 268 ) -> Result<(), MatchFailed> {
251 self.record_ignored_comments(code); 269 phase.record_ignored_comments(code);
252 // Ignore whitespace and comments. 270 // Ignore whitespace and comments.
253 if code.kind().is_trivia() { 271 if code.kind().is_trivia() {
254 return Ok(()); 272 return Ok(());
@@ -294,18 +312,94 @@ impl<'db, 'sema> MatchState<'db, 'sema> {
294 Ok(()) 312 Ok(())
295 } 313 }
296 314
315 fn check_constraint(
316 &self,
317 constraint: &Constraint,
318 code: &SyntaxNode,
319 ) -> Result<(), MatchFailed> {
320 match constraint {
321 Constraint::Kind(kind) => {
322 kind.matches(code)?;
323 }
324 Constraint::Not(sub) => {
325 if self.check_constraint(&*sub, code).is_ok() {
326 fail_match!("Constraint {:?} failed for '{}'", constraint, code.text());
327 }
328 }
329 }
330 Ok(())
331 }
332
333 /// Paths are matched based on whether they refer to the same thing, even if they're written
334 /// differently.
335 fn attempt_match_path(
336 &self,
337 phase: &mut Phase,
338 pattern: &SyntaxNode,
339 code: &SyntaxNode,
340 ) -> Result<(), MatchFailed> {
341 if let Some(pattern_resolved) = self.rule.pattern.resolved_paths.get(pattern) {
342 let pattern_path = ast::Path::cast(pattern.clone()).unwrap();
343 let code_path = ast::Path::cast(code.clone()).unwrap();
344 if let (Some(pattern_segment), Some(code_segment)) =
345 (pattern_path.segment(), code_path.segment())
346 {
347 // Match everything within the segment except for the name-ref, which is handled
348 // separately via comparing what the path resolves to below.
349 self.attempt_match_opt(
350 phase,
351 pattern_segment.type_arg_list(),
352 code_segment.type_arg_list(),
353 )?;
354 self.attempt_match_opt(
355 phase,
356 pattern_segment.param_list(),
357 code_segment.param_list(),
358 )?;
359 }
360 if matches!(phase, Phase::Second(_)) {
361 let resolution = self
362 .sema
363 .resolve_path(&code_path)
364 .ok_or_else(|| match_error!("Failed to resolve path `{}`", code.text()))?;
365 if pattern_resolved.resolution != resolution {
366 fail_match!("Pattern had path `{}` code had `{}`", pattern.text(), code.text());
367 }
368 }
369 } else {
370 return self.attempt_match_node_children(phase, pattern, code);
371 }
372 Ok(())
373 }
374
375 fn attempt_match_opt<T: AstNode>(
376 &self,
377 phase: &mut Phase,
378 pattern: Option<T>,
379 code: Option<T>,
380 ) -> Result<(), MatchFailed> {
381 match (pattern, code) {
382 (Some(p), Some(c)) => self.attempt_match_node(phase, &p.syntax(), &c.syntax()),
383 (None, None) => Ok(()),
384 (Some(p), None) => fail_match!("Pattern `{}` had nothing to match", p.syntax().text()),
385 (None, Some(c)) => {
386 fail_match!("Nothing in pattern to match code `{}`", c.syntax().text())
387 }
388 }
389 }
390
297 /// We want to allow the records to match in any order, so we have special matching logic for 391 /// We want to allow the records to match in any order, so we have special matching logic for
298 /// them. 392 /// them.
299 fn attempt_match_record_field_list( 393 fn attempt_match_record_field_list(
300 &mut self, 394 &self,
301 match_inputs: &MatchInputs, 395 phase: &mut Phase,
302 pattern: &SyntaxNode, 396 pattern: &SyntaxNode,
303 code: &SyntaxNode, 397 code: &SyntaxNode,
304 ) -> Result<(), MatchFailed> { 398 ) -> Result<(), MatchFailed> {
305 // Build a map keyed by field name. 399 // Build a map keyed by field name.
306 let mut fields_by_name = FxHashMap::default(); 400 let mut fields_by_name = FxHashMap::default();
307 for child in code.children() { 401 for child in code.children() {
308 if let Some(record) = ast::RecordField::cast(child.clone()) { 402 if let Some(record) = ast::RecordExprField::cast(child.clone()) {
309 if let Some(name) = record.field_name() { 403 if let Some(name) = record.field_name() {
310 fields_by_name.insert(name.text().clone(), child.clone()); 404 fields_by_name.insert(name.text().clone(), child.clone());
311 } 405 }
@@ -314,11 +408,11 @@ impl<'db, 'sema> MatchState<'db, 'sema> {
314 for p in pattern.children_with_tokens() { 408 for p in pattern.children_with_tokens() {
315 if let SyntaxElement::Node(p) = p { 409 if let SyntaxElement::Node(p) = p {
316 if let Some(name_element) = p.first_child_or_token() { 410 if let Some(name_element) = p.first_child_or_token() {
317 if match_inputs.get_placeholder(&name_element).is_some() { 411 if self.get_placeholder(&name_element).is_some() {
318 // If the pattern is using placeholders for field names then order 412 // If the pattern is using placeholders for field names then order
319 // independence doesn't make sense. Fall back to regular ordered 413 // independence doesn't make sense. Fall back to regular ordered
320 // matching. 414 // matching.
321 return self.attempt_match_node_children(match_inputs, pattern, code); 415 return self.attempt_match_node_children(phase, pattern, code);
322 } 416 }
323 if let Some(ident) = only_ident(name_element) { 417 if let Some(ident) = only_ident(name_element) {
324 let code_record = fields_by_name.remove(ident.text()).ok_or_else(|| { 418 let code_record = fields_by_name.remove(ident.text()).ok_or_else(|| {
@@ -327,7 +421,7 @@ impl<'db, 'sema> MatchState<'db, 'sema> {
327 ident 421 ident
328 ) 422 )
329 })?; 423 })?;
330 self.attempt_match_node(match_inputs, &p, &code_record)?; 424 self.attempt_match_node(phase, &p, &code_record)?;
331 } 425 }
332 } 426 }
333 } 427 }
@@ -347,16 +441,15 @@ impl<'db, 'sema> MatchState<'db, 'sema> {
347 /// pattern matches the macro invocation. For matches within the macro call, we'll already have 441 /// pattern matches the macro invocation. For matches within the macro call, we'll already have
348 /// expanded the macro. 442 /// expanded the macro.
349 fn attempt_match_token_tree( 443 fn attempt_match_token_tree(
350 &mut self, 444 &self,
351 match_inputs: &MatchInputs, 445 phase: &mut Phase,
352 pattern: &SyntaxNode, 446 pattern: &SyntaxNode,
353 code: &ra_syntax::SyntaxNode, 447 code: &ra_syntax::SyntaxNode,
354 ) -> Result<(), MatchFailed> { 448 ) -> Result<(), MatchFailed> {
355 let mut pattern = PatternIterator::new(pattern).peekable(); 449 let mut pattern = PatternIterator::new(pattern).peekable();
356 let mut children = code.children_with_tokens(); 450 let mut children = code.children_with_tokens();
357 while let Some(child) = children.next() { 451 while let Some(child) = children.next() {
358 if let Some(placeholder) = pattern.peek().and_then(|p| match_inputs.get_placeholder(p)) 452 if let Some(placeholder) = pattern.peek().and_then(|p| self.get_placeholder(p)) {
359 {
360 pattern.next(); 453 pattern.next();
361 let next_pattern_token = pattern 454 let next_pattern_token = pattern
362 .peek() 455 .peek()
@@ -382,7 +475,7 @@ impl<'db, 'sema> MatchState<'db, 'sema> {
382 if Some(first_token.to_string()) == next_pattern_token { 475 if Some(first_token.to_string()) == next_pattern_token {
383 if let Some(SyntaxElement::Node(p)) = pattern.next() { 476 if let Some(SyntaxElement::Node(p)) = pattern.next() {
384 // We have a subtree that starts with the next token in our pattern. 477 // We have a subtree that starts with the next token in our pattern.
385 self.attempt_match_token_tree(match_inputs, &p, &n)?; 478 self.attempt_match_token_tree(phase, &p, &n)?;
386 break; 479 break;
387 } 480 }
388 } 481 }
@@ -391,7 +484,7 @@ impl<'db, 'sema> MatchState<'db, 'sema> {
391 }; 484 };
392 last_matched_token = next; 485 last_matched_token = next;
393 } 486 }
394 if let Some(match_out) = &mut self.match_out { 487 if let Phase::Second(match_out) = phase {
395 match_out.placeholder_values.insert( 488 match_out.placeholder_values.insert(
396 Var(placeholder.ident.to_string()), 489 Var(placeholder.ident.to_string()),
397 PlaceholderMatch::from_range(FileRange { 490 PlaceholderMatch::from_range(FileRange {
@@ -407,11 +500,11 @@ impl<'db, 'sema> MatchState<'db, 'sema> {
407 // Match literal (non-placeholder) tokens. 500 // Match literal (non-placeholder) tokens.
408 match child { 501 match child {
409 SyntaxElement::Token(token) => { 502 SyntaxElement::Token(token) => {
410 self.attempt_match_token(&mut pattern, &token)?; 503 self.attempt_match_token(phase, &mut pattern, &token)?;
411 } 504 }
412 SyntaxElement::Node(node) => match pattern.next() { 505 SyntaxElement::Node(node) => match pattern.next() {
413 Some(SyntaxElement::Node(p)) => { 506 Some(SyntaxElement::Node(p)) => {
414 self.attempt_match_token_tree(match_inputs, &p, &node)?; 507 self.attempt_match_token_tree(phase, &p, &node)?;
415 } 508 }
416 Some(SyntaxElement::Token(p)) => fail_match!( 509 Some(SyntaxElement::Token(p)) => fail_match!(
417 "Pattern has token '{}', code has subtree '{}'", 510 "Pattern has token '{}', code has subtree '{}'",
@@ -428,6 +521,65 @@ impl<'db, 'sema> MatchState<'db, 'sema> {
428 Ok(()) 521 Ok(())
429 } 522 }
430 523
524 fn attempt_match_ufcs(
525 &self,
526 phase: &mut Phase,
527 pattern: &ast::CallExpr,
528 code: &ast::MethodCallExpr,
529 pattern_function: hir::Function,
530 ) -> Result<(), MatchFailed> {
531 use ast::ArgListOwner;
532 let code_resolved_function = self
533 .sema
534 .resolve_method_call(code)
535 .ok_or_else(|| match_error!("Failed to resolve method call"))?;
536 if pattern_function != code_resolved_function {
537 fail_match!("Method call resolved to a different function");
538 }
539 // Check arguments.
540 let mut pattern_args = pattern
541 .arg_list()
542 .ok_or_else(|| match_error!("Pattern function call has no args"))?
543 .args();
544 self.attempt_match_opt(phase, pattern_args.next(), code.expr())?;
545 let mut code_args =
546 code.arg_list().ok_or_else(|| match_error!("Code method call has no args"))?.args();
547 loop {
548 match (pattern_args.next(), code_args.next()) {
549 (None, None) => return Ok(()),
550 (p, c) => self.attempt_match_opt(phase, p, c)?,
551 }
552 }
553 }
554
555 fn get_placeholder(&self, element: &SyntaxElement) -> Option<&Placeholder> {
556 only_ident(element.clone()).and_then(|ident| self.rule.get_placeholder(&ident))
557 }
558}
559
560impl Match {
561 fn render_template_paths(
562 &mut self,
563 template: &ResolvedPattern,
564 sema: &Semantics<ra_ide_db::RootDatabase>,
565 ) -> Result<(), MatchFailed> {
566 let module = sema
567 .scope(&self.matched_node)
568 .module()
569 .ok_or_else(|| match_error!("Matched node isn't in a module"))?;
570 for (path, resolved_path) in &template.resolved_paths {
571 if let hir::PathResolution::Def(module_def) = resolved_path.resolution {
572 let mod_path = module.find_use_path(sema.db, module_def).ok_or_else(|| {
573 match_error!("Failed to render template path `{}` at match location")
574 })?;
575 self.rendered_template_paths.insert(path.clone(), mod_path);
576 }
577 }
578 Ok(())
579 }
580}
581
582impl Phase<'_> {
431 fn next_non_trivial(&mut self, code_it: &mut SyntaxElementChildren) -> Option<SyntaxElement> { 583 fn next_non_trivial(&mut self, code_it: &mut SyntaxElementChildren) -> Option<SyntaxElement> {
432 loop { 584 loop {
433 let c = code_it.next(); 585 let c = code_it.next();
@@ -443,7 +595,7 @@ impl<'db, 'sema> MatchState<'db, 'sema> {
443 595
444 fn record_ignored_comments(&mut self, token: &SyntaxToken) { 596 fn record_ignored_comments(&mut self, token: &SyntaxToken) {
445 if token.kind() == SyntaxKind::COMMENT { 597 if token.kind() == SyntaxKind::COMMENT {
446 if let Some(match_out) = &mut self.match_out { 598 if let Phase::Second(match_out) = self {
447 if let Some(comment) = ast::Comment::cast(token.clone()) { 599 if let Some(comment) = ast::Comment::cast(token.clone()) {
448 match_out.ignored_comments.push(comment); 600 match_out.ignored_comments.push(comment);
449 } 601 }
@@ -452,13 +604,6 @@ impl<'db, 'sema> MatchState<'db, 'sema> {
452 } 604 }
453} 605}
454 606
455impl MatchInputs<'_> {
456 fn get_placeholder(&self, element: &SyntaxElement) -> Option<&Placeholder> {
457 only_ident(element.clone())
458 .and_then(|ident| self.ssr_pattern.placeholders_by_stand_in.get(ident.text()))
459 }
460}
461
462fn is_closing_token(kind: SyntaxKind) -> bool { 607fn is_closing_token(kind: SyntaxKind) -> bool {
463 kind == SyntaxKind::R_PAREN || kind == SyntaxKind::R_CURLY || kind == SyntaxKind::R_BRACK 608 kind == SyntaxKind::R_PAREN || kind == SyntaxKind::R_CURLY || kind == SyntaxKind::R_BRACK
464} 609}
@@ -495,25 +640,18 @@ impl PlaceholderMatch {
495 } 640 }
496} 641}
497 642
498impl SsrPattern { 643impl NodeKind {
499 pub(crate) fn tree_for_kind(&self, kind: SyntaxKind) -> Result<&SyntaxNode, MatchFailed> { 644 fn matches(&self, node: &SyntaxNode) -> Result<(), MatchFailed> {
500 let (tree, kind_name) = if ast::Expr::can_cast(kind) { 645 let ok = match self {
501 (&self.expr, "expression") 646 Self::Literal => {
502 } else if ast::TypeRef::can_cast(kind) { 647 mark::hit!(literal_constraint);
503 (&self.type_ref, "type reference") 648 ast::Literal::can_cast(node.kind())
504 } else if ast::ModuleItem::can_cast(kind) { 649 }
505 (&self.item, "item")
506 } else if ast::Path::can_cast(kind) {
507 (&self.path, "path")
508 } else if ast::Pat::can_cast(kind) {
509 (&self.pattern, "pattern")
510 } else {
511 fail_match!("Matching nodes of kind {:?} is not supported", kind);
512 }; 650 };
513 match tree { 651 if !ok {
514 Some(tree) => Ok(tree), 652 fail_match!("Code '{}' isn't of kind {:?}", node.text(), self);
515 None => fail_match!("Pattern cannot be parsed as a {}", kind_name),
516 } 653 }
654 Ok(())
517 } 655 }
518} 656}
519 657
@@ -561,18 +699,17 @@ impl PatternIterator {
561#[cfg(test)] 699#[cfg(test)]
562mod tests { 700mod tests {
563 use super::*; 701 use super::*;
564 use crate::MatchFinder; 702 use crate::{MatchFinder, SsrRule};
565 703
566 #[test] 704 #[test]
567 fn parse_match_replace() { 705 fn parse_match_replace() {
568 let rule: SsrRule = "foo($x) ==>> bar($x)".parse().unwrap(); 706 let rule: SsrRule = "foo($x) ==>> bar($x)".parse().unwrap();
569 let input = "fn main() { foo(1+2); }"; 707 let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }";
570 708
571 use ra_db::fixture::WithFixture; 709 let (db, position, selections) = crate::tests::single_file(input);
572 let (db, file_id) = ra_ide_db::RootDatabase::with_single_file(input); 710 let mut match_finder = MatchFinder::in_context(&db, position, selections);
573 let mut match_finder = MatchFinder::new(&db); 711 match_finder.add_rule(rule).unwrap();
574 match_finder.add_rule(rule); 712 let matches = match_finder.matches();
575 let matches = match_finder.find_matches_in_file(file_id);
576 assert_eq!(matches.matches.len(), 1); 713 assert_eq!(matches.matches.len(), 1);
577 assert_eq!(matches.matches[0].matched_node.text(), "foo(1+2)"); 714 assert_eq!(matches.matches[0].matched_node.text(), "foo(1+2)");
578 assert_eq!(matches.matches[0].placeholder_values.len(), 1); 715 assert_eq!(matches.matches[0].placeholder_values.len(), 1);
@@ -585,9 +722,11 @@ mod tests {
585 "1+2" 722 "1+2"
586 ); 723 );
587 724
588 let edit = crate::replacing::matches_to_edit(&matches, input); 725 let edits = match_finder.edits();
726 assert_eq!(edits.len(), 1);
727 let edit = &edits[0];
589 let mut after = input.to_string(); 728 let mut after = input.to_string();
590 edit.apply(&mut after); 729 edit.edit.apply(&mut after);
591 assert_eq!(after, "fn main() { bar(1+2); }"); 730 assert_eq!(after, "fn foo() {} fn bar() {} fn main() { bar(1+2); }");
592 } 731 }
593} 732}
diff --git a/crates/ra_ssr/src/nester.rs b/crates/ra_ssr/src/nester.rs
new file mode 100644
index 000000000..b3e20579b
--- /dev/null
+++ b/crates/ra_ssr/src/nester.rs
@@ -0,0 +1,98 @@
1//! Converts a flat collection of matches into a nested form suitable for replacement. When there
2//! are multiple matches for a node, or that overlap, priority is given to the earlier rule. Nested
3//! matches are only permitted if the inner match is contained entirely within a placeholder of an
4//! outer match.
5//!
6//! For example, if our search pattern is `foo(foo($a))` and the code had `foo(foo(foo(foo(42))))`,
7//! then we'll get 3 matches, however only the outermost and innermost matches can be accepted. The
8//! middle match would take the second `foo` from the outer match.
9
10use crate::{Match, SsrMatches};
11use ra_syntax::SyntaxNode;
12use rustc_hash::FxHashMap;
13
14pub(crate) fn nest_and_remove_collisions(
15 mut matches: Vec<Match>,
16 sema: &hir::Semantics<ra_ide_db::RootDatabase>,
17) -> SsrMatches {
18 // We sort the matches by depth then by rule index. Sorting by depth means that by the time we
19 // see a match, any parent matches or conflicting matches will have already been seen. Sorting
20 // by rule_index means that if there are two matches for the same node, the rule added first
21 // will take precedence.
22 matches.sort_by(|a, b| a.depth.cmp(&b.depth).then_with(|| a.rule_index.cmp(&b.rule_index)));
23 let mut collector = MatchCollector::default();
24 for m in matches {
25 collector.add_match(m, sema);
26 }
27 collector.into()
28}
29
30#[derive(Default)]
31struct MatchCollector {
32 matches_by_node: FxHashMap<SyntaxNode, Match>,
33}
34
35impl MatchCollector {
36 /// Attempts to add `m` to matches. If it conflicts with an existing match, it is discarded. If
37 /// it is entirely within the a placeholder of an existing match, then it is added as a child
38 /// match of the existing match.
39 fn add_match(&mut self, m: Match, sema: &hir::Semantics<ra_ide_db::RootDatabase>) {
40 let matched_node = m.matched_node.clone();
41 if let Some(existing) = self.matches_by_node.get_mut(&matched_node) {
42 try_add_sub_match(m, existing, sema);
43 return;
44 }
45 for ancestor in sema.ancestors_with_macros(m.matched_node.clone()) {
46 if let Some(existing) = self.matches_by_node.get_mut(&ancestor) {
47 try_add_sub_match(m, existing, sema);
48 return;
49 }
50 }
51 self.matches_by_node.insert(matched_node, m);
52 }
53}
54
55/// Attempts to add `m` as a sub-match of `existing`.
56fn try_add_sub_match(
57 m: Match,
58 existing: &mut Match,
59 sema: &hir::Semantics<ra_ide_db::RootDatabase>,
60) {
61 for p in existing.placeholder_values.values_mut() {
62 // Note, no need to check if p.range.file is equal to m.range.file, since we
63 // already know we're within `existing`.
64 if p.range.range.contains_range(m.range.range) {
65 // Convert the inner matches in `p` into a temporary MatchCollector. When
66 // we're done, we then convert it back into an SsrMatches. If we expected
67 // lots of inner matches, it might be worthwhile keeping a MatchCollector
68 // around for each placeholder match. However we expect most placeholder
69 // will have 0 and a few will have 1. More than that should hopefully be
70 // exceptional.
71 let mut collector = MatchCollector::default();
72 for m in std::mem::replace(&mut p.inner_matches.matches, Vec::new()) {
73 collector.matches_by_node.insert(m.matched_node.clone(), m);
74 }
75 collector.add_match(m, sema);
76 p.inner_matches = collector.into();
77 break;
78 }
79 }
80}
81
82impl From<MatchCollector> for SsrMatches {
83 fn from(mut match_collector: MatchCollector) -> Self {
84 let mut matches = SsrMatches::default();
85 for (_, m) in match_collector.matches_by_node.drain() {
86 matches.matches.push(m);
87 }
88 matches.matches.sort_by(|a, b| {
89 // Order matches by file_id then by start range. This should be sufficient since ranges
90 // shouldn't be overlapping.
91 a.range
92 .file_id
93 .cmp(&b.range.file_id)
94 .then_with(|| a.range.range.start().cmp(&b.range.range.start()))
95 });
96 matches
97 }
98}
diff --git a/crates/ra_ssr/src/parsing.rs b/crates/ra_ssr/src/parsing.rs
index 1ae166d19..78e03f394 100644
--- a/crates/ra_ssr/src/parsing.rs
+++ b/crates/ra_ssr/src/parsing.rs
@@ -5,24 +5,22 @@
5//! search patterns, we go further and parse the pattern as each kind of thing that we can match. 5//! search patterns, we go further and parse the pattern as each kind of thing that we can match.
6//! e.g. expressions, type references etc. 6//! e.g. expressions, type references etc.
7 7
8use crate::errors::bail;
8use crate::{SsrError, SsrPattern, SsrRule}; 9use crate::{SsrError, SsrPattern, SsrRule};
9use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind}; 10use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind, SyntaxNode, T};
10use rustc_hash::{FxHashMap, FxHashSet}; 11use rustc_hash::{FxHashMap, FxHashSet};
11use std::str::FromStr; 12use std::str::FromStr;
13use test_utils::mark;
12 14
13/// Returns from the current function with an error, supplied by arguments as for format! 15#[derive(Debug)]
14macro_rules! bail { 16pub(crate) struct ParsedRule {
15 ($e:expr) => {return Err($crate::SsrError::new($e))}; 17 pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>,
16 ($fmt:expr, $($arg:tt)+) => {return Err($crate::SsrError::new(format!($fmt, $($arg)+)))} 18 pub(crate) pattern: SyntaxNode,
17} 19 pub(crate) template: Option<SyntaxNode>,
18
19#[derive(Clone, Debug)]
20pub(crate) struct SsrTemplate {
21 pub(crate) tokens: Vec<PatternElement>,
22} 20}
23 21
24#[derive(Debug)] 22#[derive(Debug)]
25pub(crate) struct RawSearchPattern { 23pub(crate) struct RawPattern {
26 tokens: Vec<PatternElement>, 24 tokens: Vec<PatternElement>,
27} 25}
28 26
@@ -39,6 +37,18 @@ pub(crate) struct Placeholder {
39 pub(crate) ident: SmolStr, 37 pub(crate) ident: SmolStr,
40 /// A unique name used in place of this placeholder when we parse the pattern as Rust code. 38 /// A unique name used in place of this placeholder when we parse the pattern as Rust code.
41 stand_in_name: String, 39 stand_in_name: String,
40 pub(crate) constraints: Vec<Constraint>,
41}
42
43#[derive(Clone, Debug, PartialEq, Eq)]
44pub(crate) enum Constraint {
45 Kind(NodeKind),
46 Not(Box<Constraint>),
47}
48
49#[derive(Clone, Debug, PartialEq, Eq)]
50pub(crate) enum NodeKind {
51 Literal,
42} 52}
43 53
44#[derive(Debug, Clone, PartialEq, Eq)] 54#[derive(Debug, Clone, PartialEq, Eq)]
@@ -47,6 +57,78 @@ pub(crate) struct Token {
47 pub(crate) text: SmolStr, 57 pub(crate) text: SmolStr,
48} 58}
49 59
60impl ParsedRule {
61 fn new(
62 pattern: &RawPattern,
63 template: Option<&RawPattern>,
64 ) -> Result<Vec<ParsedRule>, SsrError> {
65 let raw_pattern = pattern.as_rust_code();
66 let raw_template = template.map(|t| t.as_rust_code());
67 let raw_template = raw_template.as_ref().map(|s| s.as_str());
68 let mut builder = RuleBuilder {
69 placeholders_by_stand_in: pattern.placeholders_by_stand_in(),
70 rules: Vec::new(),
71 };
72 builder.try_add(ast::Expr::parse(&raw_pattern), raw_template.map(ast::Expr::parse));
73 builder.try_add(ast::TypeRef::parse(&raw_pattern), raw_template.map(ast::TypeRef::parse));
74 builder.try_add(ast::Item::parse(&raw_pattern), raw_template.map(ast::Item::parse));
75 builder.try_add(ast::Path::parse(&raw_pattern), raw_template.map(ast::Path::parse));
76 builder.try_add(ast::Pat::parse(&raw_pattern), raw_template.map(ast::Pat::parse));
77 builder.build()
78 }
79}
80
81struct RuleBuilder {
82 placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>,
83 rules: Vec<ParsedRule>,
84}
85
86impl RuleBuilder {
87 fn try_add<T: AstNode>(&mut self, pattern: Result<T, ()>, template: Option<Result<T, ()>>) {
88 match (pattern, template) {
89 (Ok(pattern), Some(Ok(template))) => self.rules.push(ParsedRule {
90 placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
91 pattern: pattern.syntax().clone(),
92 template: Some(template.syntax().clone()),
93 }),
94 (Ok(pattern), None) => self.rules.push(ParsedRule {
95 placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
96 pattern: pattern.syntax().clone(),
97 template: None,
98 }),
99 _ => {}
100 }
101 }
102
103 fn build(mut self) -> Result<Vec<ParsedRule>, SsrError> {
104 if self.rules.is_empty() {
105 bail!("Not a valid Rust expression, type, item, path or pattern");
106 }
107 // If any rules contain paths, then we reject any rules that don't contain paths. Allowing a
108 // mix leads to strange semantics, since the path-based rules only match things where the
109 // path refers to semantically the same thing, whereas the non-path-based rules could match
110 // anything. Specifically, if we have a rule like `foo ==>> bar` we only want to match the
111 // `foo` that is in the current scope, not any `foo`. However "foo" can be parsed as a
112 // pattern (BIND_PAT -> NAME -> IDENT). Allowing such a rule through would result in
113 // renaming everything called `foo` to `bar`. It'd also be slow, since without a path, we'd
114 // have to use the slow-scan search mechanism.
115 if self.rules.iter().any(|rule| contains_path(&rule.pattern)) {
116 let old_len = self.rules.len();
117 self.rules.retain(|rule| contains_path(&rule.pattern));
118 if self.rules.len() < old_len {
119 mark::hit!(pattern_is_a_single_segment_path);
120 }
121 }
122 Ok(self.rules)
123 }
124}
125
126/// Returns whether there are any paths in `node`.
127fn contains_path(node: &SyntaxNode) -> bool {
128 node.kind() == SyntaxKind::PATH
129 || node.descendants().any(|node| node.kind() == SyntaxKind::PATH)
130}
131
50impl FromStr for SsrRule { 132impl FromStr for SsrRule {
51 type Err = SsrError; 133 type Err = SsrError;
52 134
@@ -55,27 +137,30 @@ impl FromStr for SsrRule {
55 let pattern = it.next().expect("at least empty string").trim(); 137 let pattern = it.next().expect("at least empty string").trim();
56 let template = it 138 let template = it
57 .next() 139 .next()
58 .ok_or_else(|| SsrError("Cannot find delemiter `==>>`".into()))? 140 .ok_or_else(|| SsrError("Cannot find delimiter `==>>`".into()))?
59 .trim() 141 .trim()
60 .to_string(); 142 .to_string();
61 if it.next().is_some() { 143 if it.next().is_some() {
62 return Err(SsrError("More than one delimiter found".into())); 144 return Err(SsrError("More than one delimiter found".into()));
63 } 145 }
64 let rule = SsrRule { pattern: pattern.parse()?, template: template.parse()? }; 146 let raw_pattern = pattern.parse()?;
147 let raw_template = template.parse()?;
148 let parsed_rules = ParsedRule::new(&raw_pattern, Some(&raw_template))?;
149 let rule = SsrRule { pattern: raw_pattern, template: raw_template, parsed_rules };
65 validate_rule(&rule)?; 150 validate_rule(&rule)?;
66 Ok(rule) 151 Ok(rule)
67 } 152 }
68} 153}
69 154
70impl FromStr for RawSearchPattern { 155impl FromStr for RawPattern {
71 type Err = SsrError; 156 type Err = SsrError;
72 157
73 fn from_str(pattern_str: &str) -> Result<RawSearchPattern, SsrError> { 158 fn from_str(pattern_str: &str) -> Result<RawPattern, SsrError> {
74 Ok(RawSearchPattern { tokens: parse_pattern(pattern_str)? }) 159 Ok(RawPattern { tokens: parse_pattern(pattern_str)? })
75 } 160 }
76} 161}
77 162
78impl RawSearchPattern { 163impl RawPattern {
79 /// Returns this search pattern as Rust source code that we can feed to the Rust parser. 164 /// Returns this search pattern as Rust source code that we can feed to the Rust parser.
80 fn as_rust_code(&self) -> String { 165 fn as_rust_code(&self) -> String {
81 let mut res = String::new(); 166 let mut res = String::new();
@@ -88,7 +173,7 @@ impl RawSearchPattern {
88 res 173 res
89 } 174 }
90 175
91 fn placeholders_by_stand_in(&self) -> FxHashMap<SmolStr, Placeholder> { 176 pub(crate) fn placeholders_by_stand_in(&self) -> FxHashMap<SmolStr, Placeholder> {
92 let mut res = FxHashMap::default(); 177 let mut res = FxHashMap::default();
93 for t in &self.tokens { 178 for t in &self.tokens {
94 if let PatternElement::Placeholder(placeholder) = t { 179 if let PatternElement::Placeholder(placeholder) = t {
@@ -103,41 +188,9 @@ impl FromStr for SsrPattern {
103 type Err = SsrError; 188 type Err = SsrError;
104 189
105 fn from_str(pattern_str: &str) -> Result<SsrPattern, SsrError> { 190 fn from_str(pattern_str: &str) -> Result<SsrPattern, SsrError> {
106 let raw: RawSearchPattern = pattern_str.parse()?; 191 let raw_pattern = pattern_str.parse()?;
107 let raw_str = raw.as_rust_code(); 192 let parsed_rules = ParsedRule::new(&raw_pattern, None)?;
108 let res = SsrPattern { 193 Ok(SsrPattern { raw: raw_pattern, parsed_rules })
109 expr: ast::Expr::parse(&raw_str).ok().map(|n| n.syntax().clone()),
110 type_ref: ast::TypeRef::parse(&raw_str).ok().map(|n| n.syntax().clone()),
111 item: ast::ModuleItem::parse(&raw_str).ok().map(|n| n.syntax().clone()),
112 path: ast::Path::parse(&raw_str).ok().map(|n| n.syntax().clone()),
113 pattern: ast::Pat::parse(&raw_str).ok().map(|n| n.syntax().clone()),
114 placeholders_by_stand_in: raw.placeholders_by_stand_in(),
115 raw,
116 };
117 if res.expr.is_none()
118 && res.type_ref.is_none()
119 && res.item.is_none()
120 && res.path.is_none()
121 && res.pattern.is_none()
122 {
123 bail!("Pattern is not a valid Rust expression, type, item, path or pattern");
124 }
125 Ok(res)
126 }
127}
128
129impl FromStr for SsrTemplate {
130 type Err = SsrError;
131
132 fn from_str(pattern_str: &str) -> Result<SsrTemplate, SsrError> {
133 let tokens = parse_pattern(pattern_str)?;
134 // Validate that the template is a valid fragment of Rust code. We reuse the validation
135 // logic for search patterns since the only thing that differs is the error message.
136 if SsrPattern::from_str(pattern_str).is_err() {
137 bail!("Replacement is not a valid Rust expression, type, item, path or pattern");
138 }
139 // Our actual template needs to preserve whitespace, so we can't reuse `tokens`.
140 Ok(SsrTemplate { tokens })
141 } 194 }
142} 195}
143 196
@@ -149,7 +202,7 @@ fn parse_pattern(pattern_str: &str) -> Result<Vec<PatternElement>, SsrError> {
149 let mut placeholder_names = FxHashSet::default(); 202 let mut placeholder_names = FxHashSet::default();
150 let mut tokens = tokenize(pattern_str)?.into_iter(); 203 let mut tokens = tokenize(pattern_str)?.into_iter();
151 while let Some(token) = tokens.next() { 204 while let Some(token) = tokens.next() {
152 if token.kind == SyntaxKind::DOLLAR { 205 if token.kind == T![$] {
153 let placeholder = parse_placeholder(&mut tokens)?; 206 let placeholder = parse_placeholder(&mut tokens)?;
154 if !placeholder_names.insert(placeholder.ident.clone()) { 207 if !placeholder_names.insert(placeholder.ident.clone()) {
155 bail!("Name `{}` repeats more than once", placeholder.ident); 208 bail!("Name `{}` repeats more than once", placeholder.ident);
@@ -166,7 +219,7 @@ fn parse_pattern(pattern_str: &str) -> Result<Vec<PatternElement>, SsrError> {
166/// pattern didn't define. 219/// pattern didn't define.
167fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> { 220fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> {
168 let mut defined_placeholders = FxHashSet::default(); 221 let mut defined_placeholders = FxHashSet::default();
169 for p in &rule.pattern.raw.tokens { 222 for p in &rule.pattern.tokens {
170 if let PatternElement::Placeholder(placeholder) = p { 223 if let PatternElement::Placeholder(placeholder) = p {
171 defined_placeholders.insert(&placeholder.ident); 224 defined_placeholders.insert(&placeholder.ident);
172 } 225 }
@@ -177,6 +230,9 @@ fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> {
177 if !defined_placeholders.contains(&placeholder.ident) { 230 if !defined_placeholders.contains(&placeholder.ident) {
178 undefined.push(format!("${}", placeholder.ident)); 231 undefined.push(format!("${}", placeholder.ident));
179 } 232 }
233 if !placeholder.constraints.is_empty() {
234 bail!("Replacement placeholders cannot have constraints");
235 }
180 } 236 }
181 } 237 }
182 if !undefined.is_empty() { 238 if !undefined.is_empty() {
@@ -205,29 +261,90 @@ fn tokenize(source: &str) -> Result<Vec<Token>, SsrError> {
205 261
206fn parse_placeholder(tokens: &mut std::vec::IntoIter<Token>) -> Result<Placeholder, SsrError> { 262fn parse_placeholder(tokens: &mut std::vec::IntoIter<Token>) -> Result<Placeholder, SsrError> {
207 let mut name = None; 263 let mut name = None;
264 let mut constraints = Vec::new();
208 if let Some(token) = tokens.next() { 265 if let Some(token) = tokens.next() {
209 match token.kind { 266 match token.kind {
210 SyntaxKind::IDENT => { 267 SyntaxKind::IDENT => {
211 name = Some(token.text); 268 name = Some(token.text);
212 } 269 }
270 T!['{'] => {
271 let token =
272 tokens.next().ok_or_else(|| SsrError::new("Unexpected end of placeholder"))?;
273 if token.kind == SyntaxKind::IDENT {
274 name = Some(token.text);
275 }
276 loop {
277 let token = tokens
278 .next()
279 .ok_or_else(|| SsrError::new("Placeholder is missing closing brace '}'"))?;
280 match token.kind {
281 T![:] => {
282 constraints.push(parse_constraint(tokens)?);
283 }
284 T!['}'] => break,
285 _ => bail!("Unexpected token while parsing placeholder: '{}'", token.text),
286 }
287 }
288 }
213 _ => { 289 _ => {
214 bail!("Placeholders should be $name"); 290 bail!("Placeholders should either be $name or ${{name:constraints}}");
215 } 291 }
216 } 292 }
217 } 293 }
218 let name = name.ok_or_else(|| SsrError::new("Placeholder ($) with no name"))?; 294 let name = name.ok_or_else(|| SsrError::new("Placeholder ($) with no name"))?;
219 Ok(Placeholder::new(name)) 295 Ok(Placeholder::new(name, constraints))
220} 296}
221 297
222impl Placeholder { 298fn parse_constraint(tokens: &mut std::vec::IntoIter<Token>) -> Result<Constraint, SsrError> {
223 fn new(name: SmolStr) -> Self { 299 let constraint_type = tokens
224 Self { stand_in_name: format!("__placeholder_{}", name), ident: name } 300 .next()
301 .ok_or_else(|| SsrError::new("Found end of placeholder while looking for a constraint"))?
302 .text
303 .to_string();
304 match constraint_type.as_str() {
305 "kind" => {
306 expect_token(tokens, "(")?;
307 let t = tokens.next().ok_or_else(|| {
308 SsrError::new("Unexpected end of constraint while looking for kind")
309 })?;
310 if t.kind != SyntaxKind::IDENT {
311 bail!("Expected ident, found {:?} while parsing kind constraint", t.kind);
312 }
313 expect_token(tokens, ")")?;
314 Ok(Constraint::Kind(NodeKind::from(&t.text)?))
315 }
316 "not" => {
317 expect_token(tokens, "(")?;
318 let sub = parse_constraint(tokens)?;
319 expect_token(tokens, ")")?;
320 Ok(Constraint::Not(Box::new(sub)))
321 }
322 x => bail!("Unsupported constraint type '{}'", x),
323 }
324}
325
326fn expect_token(tokens: &mut std::vec::IntoIter<Token>, expected: &str) -> Result<(), SsrError> {
327 if let Some(t) = tokens.next() {
328 if t.text == expected {
329 return Ok(());
330 }
331 bail!("Expected {} found {}", expected, t.text);
332 }
333 bail!("Expected {} found end of stream", expected);
334}
335
336impl NodeKind {
337 fn from(name: &SmolStr) -> Result<NodeKind, SsrError> {
338 Ok(match name.as_str() {
339 "literal" => NodeKind::Literal,
340 _ => bail!("Unknown node kind '{}'", name),
341 })
225 } 342 }
226} 343}
227 344
228impl SsrError { 345impl Placeholder {
229 fn new(message: impl Into<String>) -> SsrError { 346 fn new(name: SmolStr, constraints: Vec<Constraint>) -> Self {
230 SsrError(message.into()) 347 Self { stand_in_name: format!("__placeholder_{}", name), constraints, ident: name }
231 } 348 }
232} 349}
233 350
@@ -241,31 +358,31 @@ mod tests {
241 PatternElement::Token(Token { kind, text: SmolStr::new(text) }) 358 PatternElement::Token(Token { kind, text: SmolStr::new(text) })
242 } 359 }
243 fn placeholder(name: &str) -> PatternElement { 360 fn placeholder(name: &str) -> PatternElement {
244 PatternElement::Placeholder(Placeholder::new(SmolStr::new(name))) 361 PatternElement::Placeholder(Placeholder::new(SmolStr::new(name), Vec::new()))
245 } 362 }
246 let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap(); 363 let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap();
247 assert_eq!( 364 assert_eq!(
248 result.pattern.raw.tokens, 365 result.pattern.tokens,
249 vec![ 366 vec![
250 token(SyntaxKind::IDENT, "foo"), 367 token(SyntaxKind::IDENT, "foo"),
251 token(SyntaxKind::L_PAREN, "("), 368 token(T!['('], "("),
252 placeholder("a"), 369 placeholder("a"),
253 token(SyntaxKind::COMMA, ","), 370 token(T![,], ","),
254 token(SyntaxKind::WHITESPACE, " "), 371 token(SyntaxKind::WHITESPACE, " "),
255 placeholder("b"), 372 placeholder("b"),
256 token(SyntaxKind::R_PAREN, ")"), 373 token(T![')'], ")"),
257 ] 374 ]
258 ); 375 );
259 assert_eq!( 376 assert_eq!(
260 result.template.tokens, 377 result.template.tokens,
261 vec![ 378 vec![
262 token(SyntaxKind::IDENT, "bar"), 379 token(SyntaxKind::IDENT, "bar"),
263 token(SyntaxKind::L_PAREN, "("), 380 token(T!['('], "("),
264 placeholder("b"), 381 placeholder("b"),
265 token(SyntaxKind::COMMA, ","), 382 token(T![,], ","),
266 token(SyntaxKind::WHITESPACE, " "), 383 token(SyntaxKind::WHITESPACE, " "),
267 placeholder("a"), 384 placeholder("a"),
268 token(SyntaxKind::R_PAREN, ")"), 385 token(T![')'], ")"),
269 ] 386 ]
270 ); 387 );
271 } 388 }
diff --git a/crates/ra_ssr/src/replacing.rs b/crates/ra_ssr/src/replacing.rs
index 70ce1c185..0943244ff 100644
--- a/crates/ra_ssr/src/replacing.rs
+++ b/crates/ra_ssr/src/replacing.rs
@@ -1,64 +1,194 @@
1//! Code for applying replacement templates for matches that have previously been found. 1//! Code for applying replacement templates for matches that have previously been found.
2 2
3use crate::matching::Var; 3use crate::matching::Var;
4use crate::parsing::PatternElement; 4use crate::{resolving::ResolvedRule, Match, SsrMatches};
5use crate::{Match, SsrMatches}; 5use ra_syntax::ast::{self, AstToken};
6use ra_syntax::ast::AstToken; 6use ra_syntax::{SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize};
7use ra_syntax::TextSize;
8use ra_text_edit::TextEdit; 7use ra_text_edit::TextEdit;
8use rustc_hash::{FxHashMap, FxHashSet};
9 9
10/// Returns a text edit that will replace each match in `matches` with its corresponding replacement 10/// Returns a text edit that will replace each match in `matches` with its corresponding replacement
11/// template. Placeholders in the template will have been substituted with whatever they matched to 11/// template. Placeholders in the template will have been substituted with whatever they matched to
12/// in the original code. 12/// in the original code.
13pub(crate) fn matches_to_edit(matches: &SsrMatches, file_src: &str) -> TextEdit { 13pub(crate) fn matches_to_edit(
14 matches_to_edit_at_offset(matches, file_src, 0.into()) 14 matches: &SsrMatches,
15 file_src: &str,
16 rules: &[ResolvedRule],
17) -> TextEdit {
18 matches_to_edit_at_offset(matches, file_src, 0.into(), rules)
15} 19}
16 20
17fn matches_to_edit_at_offset( 21fn matches_to_edit_at_offset(
18 matches: &SsrMatches, 22 matches: &SsrMatches,
19 file_src: &str, 23 file_src: &str,
20 relative_start: TextSize, 24 relative_start: TextSize,
25 rules: &[ResolvedRule],
21) -> TextEdit { 26) -> TextEdit {
22 let mut edit_builder = ra_text_edit::TextEditBuilder::default(); 27 let mut edit_builder = ra_text_edit::TextEditBuilder::default();
23 for m in &matches.matches { 28 for m in &matches.matches {
24 edit_builder 29 edit_builder.replace(
25 .replace(m.range.checked_sub(relative_start).unwrap(), render_replace(m, file_src)); 30 m.range.range.checked_sub(relative_start).unwrap(),
31 render_replace(m, file_src, rules),
32 );
26 } 33 }
27 edit_builder.finish() 34 edit_builder.finish()
28} 35}
29 36
30fn render_replace(match_info: &Match, file_src: &str) -> String { 37struct ReplacementRenderer<'a> {
31 let mut out = String::new(); 38 match_info: &'a Match,
32 for r in &match_info.template.tokens { 39 file_src: &'a str,
33 match r { 40 rules: &'a [ResolvedRule],
34 PatternElement::Token(t) => out.push_str(t.text.as_str()), 41 rule: &'a ResolvedRule,
35 PatternElement::Placeholder(p) => { 42 out: String,
36 if let Some(placeholder_value) = 43 // Map from a range within `out` to a token in `template` that represents a placeholder. This is
37 match_info.placeholder_values.get(&Var(p.ident.to_string())) 44 // used to validate that the generated source code doesn't split any placeholder expansions (see
38 { 45 // below).
39 let range = &placeholder_value.range.range; 46 placeholder_tokens_by_range: FxHashMap<TextRange, SyntaxToken>,
40 let mut matched_text = 47 // Which placeholder tokens need to be wrapped in parenthesis in order to ensure that when `out`
41 file_src[usize::from(range.start())..usize::from(range.end())].to_owned(); 48 // is parsed, placeholders don't get split. e.g. if a template of `$a.to_string()` results in `1
42 let edit = matches_to_edit_at_offset( 49 // + 2.to_string()` then the placeholder value `1 + 2` was split and needs parenthesis.
43 &placeholder_value.inner_matches, 50 placeholder_tokens_requiring_parenthesis: FxHashSet<SyntaxToken>,
44 file_src, 51}
45 range.start(), 52
46 ); 53fn render_replace(match_info: &Match, file_src: &str, rules: &[ResolvedRule]) -> String {
47 edit.apply(&mut matched_text); 54 let rule = &rules[match_info.rule_index];
48 out.push_str(&matched_text); 55 let template = rule
49 } else { 56 .template
50 // We validated that all placeholder references were valid before we 57 .as_ref()
51 // started, so this shouldn't happen. 58 .expect("You called MatchFinder::edits after calling MatchFinder::add_search_pattern");
52 panic!( 59 let mut renderer = ReplacementRenderer {
53 "Internal error: replacement referenced unknown placeholder {}", 60 match_info,
54 p.ident 61 file_src,
55 ); 62 rules,
63 rule,
64 out: String::new(),
65 placeholder_tokens_requiring_parenthesis: FxHashSet::default(),
66 placeholder_tokens_by_range: FxHashMap::default(),
67 };
68 renderer.render_node(&template.node);
69 renderer.maybe_rerender_with_extra_parenthesis(&template.node);
70 for comment in &match_info.ignored_comments {
71 renderer.out.push_str(&comment.syntax().to_string());
72 }
73 renderer.out
74}
75
76impl ReplacementRenderer<'_> {
77 fn render_node_children(&mut self, node: &SyntaxNode) {
78 for node_or_token in node.children_with_tokens() {
79 self.render_node_or_token(&node_or_token);
80 }
81 }
82
83 fn render_node_or_token(&mut self, node_or_token: &SyntaxElement) {
84 match node_or_token {
85 SyntaxElement::Token(token) => {
86 self.render_token(&token);
87 }
88 SyntaxElement::Node(child_node) => {
89 self.render_node(&child_node);
90 }
91 }
92 }
93
94 fn render_node(&mut self, node: &SyntaxNode) {
95 use ra_syntax::ast::AstNode;
96 if let Some(mod_path) = self.match_info.rendered_template_paths.get(&node) {
97 self.out.push_str(&mod_path.to_string());
98 // Emit everything except for the segment's name-ref, since we already effectively
99 // emitted that as part of `mod_path`.
100 if let Some(path) = ast::Path::cast(node.clone()) {
101 if let Some(segment) = path.segment() {
102 for node_or_token in segment.syntax().children_with_tokens() {
103 if node_or_token.kind() != SyntaxKind::NAME_REF {
104 self.render_node_or_token(&node_or_token);
105 }
106 }
56 } 107 }
57 } 108 }
109 } else {
110 self.render_node_children(&node);
58 } 111 }
59 } 112 }
60 for comment in &match_info.ignored_comments { 113
61 out.push_str(&comment.syntax().to_string()); 114 fn render_token(&mut self, token: &SyntaxToken) {
115 if let Some(placeholder) = self.rule.get_placeholder(&token) {
116 if let Some(placeholder_value) =
117 self.match_info.placeholder_values.get(&Var(placeholder.ident.to_string()))
118 {
119 let range = &placeholder_value.range.range;
120 let mut matched_text =
121 self.file_src[usize::from(range.start())..usize::from(range.end())].to_owned();
122 let edit = matches_to_edit_at_offset(
123 &placeholder_value.inner_matches,
124 self.file_src,
125 range.start(),
126 self.rules,
127 );
128 let needs_parenthesis =
129 self.placeholder_tokens_requiring_parenthesis.contains(token);
130 edit.apply(&mut matched_text);
131 if needs_parenthesis {
132 self.out.push('(');
133 }
134 self.placeholder_tokens_by_range.insert(
135 TextRange::new(
136 TextSize::of(&self.out),
137 TextSize::of(&self.out) + TextSize::of(&matched_text),
138 ),
139 token.clone(),
140 );
141 self.out.push_str(&matched_text);
142 if needs_parenthesis {
143 self.out.push(')');
144 }
145 } else {
146 // We validated that all placeholder references were valid before we
147 // started, so this shouldn't happen.
148 panic!(
149 "Internal error: replacement referenced unknown placeholder {}",
150 placeholder.ident
151 );
152 }
153 } else {
154 self.out.push_str(token.text().as_str());
155 }
156 }
157
158 // Checks if the resulting code, when parsed doesn't split any placeholders due to different
159 // order of operations between the search pattern and the replacement template. If any do, then
160 // we rerender the template and wrap the problematic placeholders with parenthesis.
161 fn maybe_rerender_with_extra_parenthesis(&mut self, template: &SyntaxNode) {
162 if let Some(node) = parse_as_kind(&self.out, template.kind()) {
163 self.remove_node_ranges(node);
164 if self.placeholder_tokens_by_range.is_empty() {
165 return;
166 }
167 self.placeholder_tokens_requiring_parenthesis =
168 self.placeholder_tokens_by_range.values().cloned().collect();
169 self.out.clear();
170 self.render_node(template);
171 }
172 }
173
174 fn remove_node_ranges(&mut self, node: SyntaxNode) {
175 self.placeholder_tokens_by_range.remove(&node.text_range());
176 for child in node.children() {
177 self.remove_node_ranges(child);
178 }
179 }
180}
181
182fn parse_as_kind(code: &str, kind: SyntaxKind) -> Option<SyntaxNode> {
183 use ra_syntax::ast::AstNode;
184 if ast::Expr::can_cast(kind) {
185 if let Ok(expr) = ast::Expr::parse(code) {
186 return Some(expr.syntax().clone());
187 }
188 } else if ast::Item::can_cast(kind) {
189 if let Ok(item) = ast::Item::parse(code) {
190 return Some(item.syntax().clone());
191 }
62 } 192 }
63 out 193 None
64} 194}
diff --git a/crates/ra_ssr/src/resolving.rs b/crates/ra_ssr/src/resolving.rs
new file mode 100644
index 000000000..78d456546
--- /dev/null
+++ b/crates/ra_ssr/src/resolving.rs
@@ -0,0 +1,228 @@
1//! This module is responsible for resolving paths within rules.
2
3use crate::errors::error;
4use crate::{parsing, SsrError};
5use parsing::Placeholder;
6use ra_db::FilePosition;
7use ra_syntax::{ast, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken};
8use rustc_hash::{FxHashMap, FxHashSet};
9use test_utils::mark;
10
11pub(crate) struct ResolutionScope<'db> {
12 scope: hir::SemanticsScope<'db>,
13 hygiene: hir::Hygiene,
14}
15
16pub(crate) struct ResolvedRule {
17 pub(crate) pattern: ResolvedPattern,
18 pub(crate) template: Option<ResolvedPattern>,
19 pub(crate) index: usize,
20}
21
22pub(crate) struct ResolvedPattern {
23 pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
24 pub(crate) node: SyntaxNode,
25 // Paths in `node` that we've resolved.
26 pub(crate) resolved_paths: FxHashMap<SyntaxNode, ResolvedPath>,
27 pub(crate) ufcs_function_calls: FxHashMap<SyntaxNode, hir::Function>,
28}
29
30pub(crate) struct ResolvedPath {
31 pub(crate) resolution: hir::PathResolution,
32 /// The depth of the ast::Path that was resolved within the pattern.
33 pub(crate) depth: u32,
34}
35
36impl ResolvedRule {
37 pub(crate) fn new(
38 rule: parsing::ParsedRule,
39 resolution_scope: &ResolutionScope,
40 index: usize,
41 ) -> Result<ResolvedRule, SsrError> {
42 let resolver =
43 Resolver { resolution_scope, placeholders_by_stand_in: rule.placeholders_by_stand_in };
44 let resolved_template = if let Some(template) = rule.template {
45 Some(resolver.resolve_pattern_tree(template)?)
46 } else {
47 None
48 };
49 Ok(ResolvedRule {
50 pattern: resolver.resolve_pattern_tree(rule.pattern)?,
51 template: resolved_template,
52 index,
53 })
54 }
55
56 pub(crate) fn get_placeholder(&self, token: &SyntaxToken) -> Option<&Placeholder> {
57 if token.kind() != SyntaxKind::IDENT {
58 return None;
59 }
60 self.pattern.placeholders_by_stand_in.get(token.text())
61 }
62}
63
64struct Resolver<'a, 'db> {
65 resolution_scope: &'a ResolutionScope<'db>,
66 placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
67}
68
69impl Resolver<'_, '_> {
70 fn resolve_pattern_tree(&self, pattern: SyntaxNode) -> Result<ResolvedPattern, SsrError> {
71 let mut resolved_paths = FxHashMap::default();
72 self.resolve(pattern.clone(), 0, &mut resolved_paths)?;
73 let ufcs_function_calls = resolved_paths
74 .iter()
75 .filter_map(|(path_node, resolved)| {
76 if let Some(grandparent) = path_node.parent().and_then(|parent| parent.parent()) {
77 if grandparent.kind() == SyntaxKind::CALL_EXPR {
78 if let hir::PathResolution::AssocItem(hir::AssocItem::Function(function)) =
79 &resolved.resolution
80 {
81 return Some((grandparent, *function));
82 }
83 }
84 }
85 None
86 })
87 .collect();
88 Ok(ResolvedPattern {
89 node: pattern,
90 resolved_paths,
91 placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
92 ufcs_function_calls,
93 })
94 }
95
96 fn resolve(
97 &self,
98 node: SyntaxNode,
99 depth: u32,
100 resolved_paths: &mut FxHashMap<SyntaxNode, ResolvedPath>,
101 ) -> Result<(), SsrError> {
102 use ra_syntax::ast::AstNode;
103 if let Some(path) = ast::Path::cast(node.clone()) {
104 // Check if this is an appropriate place in the path to resolve. If the path is
105 // something like `a::B::<i32>::c` then we want to resolve `a::B`. If the path contains
106 // a placeholder. e.g. `a::$b::c` then we want to resolve `a`.
107 if !path_contains_type_arguments(path.qualifier())
108 && !self.path_contains_placeholder(&path)
109 {
110 let resolution = self
111 .resolution_scope
112 .resolve_path(&path)
113 .ok_or_else(|| error!("Failed to resolve path `{}`", node.text()))?;
114 resolved_paths.insert(node, ResolvedPath { resolution, depth });
115 return Ok(());
116 }
117 }
118 for node in node.children() {
119 self.resolve(node, depth + 1, resolved_paths)?;
120 }
121 Ok(())
122 }
123
124 /// Returns whether `path` contains a placeholder, but ignores any placeholders within type
125 /// arguments.
126 fn path_contains_placeholder(&self, path: &ast::Path) -> bool {
127 if let Some(segment) = path.segment() {
128 if let Some(name_ref) = segment.name_ref() {
129 if self.placeholders_by_stand_in.contains_key(name_ref.text()) {
130 return true;
131 }
132 }
133 }
134 if let Some(qualifier) = path.qualifier() {
135 return self.path_contains_placeholder(&qualifier);
136 }
137 false
138 }
139}
140
141impl<'db> ResolutionScope<'db> {
142 pub(crate) fn new(
143 sema: &hir::Semantics<'db, ra_ide_db::RootDatabase>,
144 resolve_context: FilePosition,
145 ) -> ResolutionScope<'db> {
146 use ra_syntax::ast::AstNode;
147 let file = sema.parse(resolve_context.file_id);
148 // Find a node at the requested position, falling back to the whole file.
149 let node = file
150 .syntax()
151 .token_at_offset(resolve_context.offset)
152 .left_biased()
153 .map(|token| token.parent())
154 .unwrap_or_else(|| file.syntax().clone());
155 let node = pick_node_for_resolution(node);
156 let scope = sema.scope(&node);
157 ResolutionScope {
158 scope,
159 hygiene: hir::Hygiene::new(sema.db, resolve_context.file_id.into()),
160 }
161 }
162
163 fn resolve_path(&self, path: &ast::Path) -> Option<hir::PathResolution> {
164 let hir_path = hir::Path::from_src(path.clone(), &self.hygiene)?;
165 // First try resolving the whole path. This will work for things like
166 // `std::collections::HashMap`, but will fail for things like
167 // `std::collections::HashMap::new`.
168 if let Some(resolution) = self.scope.resolve_hir_path(&hir_path) {
169 return Some(resolution);
170 }
171 // Resolution failed, try resolving the qualifier (e.g. `std::collections::HashMap` and if
172 // that succeeds, then iterate through the candidates on the resolved type with the provided
173 // name.
174 let resolved_qualifier = self.scope.resolve_hir_path_qualifier(&hir_path.qualifier()?)?;
175 if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier {
176 adt.ty(self.scope.db).iterate_path_candidates(
177 self.scope.db,
178 self.scope.module()?.krate(),
179 &FxHashSet::default(),
180 Some(hir_path.segments().last()?.name),
181 |_ty, assoc_item| Some(hir::PathResolution::AssocItem(assoc_item)),
182 )
183 } else {
184 None
185 }
186 }
187}
188
189/// Returns a suitable node for resolving paths in the current scope. If we create a scope based on
190/// a statement node, then we can't resolve local variables that were defined in the current scope
191/// (only in parent scopes). So we find another node, ideally a child of the statement where local
192/// variable resolution is permitted.
193fn pick_node_for_resolution(node: SyntaxNode) -> SyntaxNode {
194 match node.kind() {
195 SyntaxKind::EXPR_STMT => {
196 if let Some(n) = node.first_child() {
197 mark::hit!(cursor_after_semicolon);
198 return n;
199 }
200 }
201 SyntaxKind::LET_STMT | SyntaxKind::BIND_PAT => {
202 if let Some(next) = node.next_sibling() {
203 return pick_node_for_resolution(next);
204 }
205 }
206 SyntaxKind::NAME => {
207 if let Some(parent) = node.parent() {
208 return pick_node_for_resolution(parent);
209 }
210 }
211 _ => {}
212 }
213 node
214}
215
216/// Returns whether `path` or any of its qualifiers contains type arguments.
217fn path_contains_type_arguments(path: Option<ast::Path>) -> bool {
218 if let Some(path) = path {
219 if let Some(segment) = path.segment() {
220 if segment.type_arg_list().is_some() {
221 mark::hit!(type_arguments_within_path);
222 return true;
223 }
224 }
225 return path_contains_type_arguments(path.qualifier());
226 }
227 false
228}
diff --git a/crates/ra_ssr/src/search.rs b/crates/ra_ssr/src/search.rs
new file mode 100644
index 000000000..213dc494f
--- /dev/null
+++ b/crates/ra_ssr/src/search.rs
@@ -0,0 +1,273 @@
1//! Searching for matches.
2
3use crate::{
4 matching,
5 resolving::{ResolvedPath, ResolvedPattern, ResolvedRule},
6 Match, MatchFinder,
7};
8use ra_db::{FileId, FileRange};
9use ra_ide_db::{
10 defs::Definition,
11 search::{Reference, SearchScope},
12};
13use ra_syntax::{ast, AstNode, SyntaxKind, SyntaxNode};
14use rustc_hash::FxHashSet;
15use test_utils::mark;
16
17/// A cache for the results of find_usages. This is for when we have multiple patterns that have the
18/// same path. e.g. if the pattern was `foo::Bar` that can parse as a path, an expression, a type
19/// and as a pattern. In each, the usages of `foo::Bar` are the same and we'd like to avoid finding
20/// them more than once.
21#[derive(Default)]
22pub(crate) struct UsageCache {
23 usages: Vec<(Definition, Vec<Reference>)>,
24}
25
26impl<'db> MatchFinder<'db> {
27 /// Adds all matches for `rule` to `matches_out`. Matches may overlap in ways that make
28 /// replacement impossible, so further processing is required in order to properly nest matches
29 /// and remove overlapping matches. This is done in the `nesting` module.
30 pub(crate) fn find_matches_for_rule(
31 &self,
32 rule: &ResolvedRule,
33 usage_cache: &mut UsageCache,
34 matches_out: &mut Vec<Match>,
35 ) {
36 if pick_path_for_usages(&rule.pattern).is_none() {
37 self.slow_scan(rule, matches_out);
38 return;
39 }
40 self.find_matches_for_pattern_tree(rule, &rule.pattern, usage_cache, matches_out);
41 }
42
43 fn find_matches_for_pattern_tree(
44 &self,
45 rule: &ResolvedRule,
46 pattern: &ResolvedPattern,
47 usage_cache: &mut UsageCache,
48 matches_out: &mut Vec<Match>,
49 ) {
50 if let Some(resolved_path) = pick_path_for_usages(pattern) {
51 let definition: Definition = resolved_path.resolution.clone().into();
52 for reference in self.find_usages(usage_cache, definition) {
53 if let Some(node_to_match) = self.find_node_to_match(resolved_path, reference) {
54 if !is_search_permitted_ancestors(&node_to_match) {
55 mark::hit!(use_declaration_with_braces);
56 continue;
57 }
58 self.try_add_match(rule, &node_to_match, &None, matches_out);
59 }
60 }
61 }
62 }
63
64 fn find_node_to_match(
65 &self,
66 resolved_path: &ResolvedPath,
67 reference: &Reference,
68 ) -> Option<SyntaxNode> {
69 let file = self.sema.parse(reference.file_range.file_id);
70 let depth = resolved_path.depth as usize;
71 let offset = reference.file_range.range.start();
72 if let Some(path) =
73 self.sema.find_node_at_offset_with_descend::<ast::Path>(file.syntax(), offset)
74 {
75 self.sema.ancestors_with_macros(path.syntax().clone()).skip(depth).next()
76 } else if let Some(path) =
77 self.sema.find_node_at_offset_with_descend::<ast::MethodCallExpr>(file.syntax(), offset)
78 {
79 // If the pattern contained a path and we found a reference to that path that wasn't
80 // itself a path, but was a method call, then we need to adjust how far up to try
81 // matching by how deep the path was within a CallExpr. The structure would have been
82 // CallExpr, PathExpr, Path - i.e. a depth offset of 2. We don't need to check if the
83 // path was part of a CallExpr because if it wasn't then all that will happen is we'll
84 // fail to match, which is the desired behavior.
85 const PATH_DEPTH_IN_CALL_EXPR: usize = 2;
86 if depth < PATH_DEPTH_IN_CALL_EXPR {
87 return None;
88 }
89 self.sema
90 .ancestors_with_macros(path.syntax().clone())
91 .skip(depth - PATH_DEPTH_IN_CALL_EXPR)
92 .next()
93 } else {
94 None
95 }
96 }
97
98 fn find_usages<'a>(
99 &self,
100 usage_cache: &'a mut UsageCache,
101 definition: Definition,
102 ) -> &'a [Reference] {
103 // Logically if a lookup succeeds we should just return it. Unfortunately returning it would
104 // extend the lifetime of the borrow, then we wouldn't be able to do the insertion on a
105 // cache miss. This is a limitation of NLL and is fixed with Polonius. For now we do two
106 // lookups in the case of a cache hit.
107 if usage_cache.find(&definition).is_none() {
108 let usages = definition.find_usages(&self.sema, Some(self.search_scope()));
109 usage_cache.usages.push((definition, usages));
110 return &usage_cache.usages.last().unwrap().1;
111 }
112 usage_cache.find(&definition).unwrap()
113 }
114
115 /// Returns the scope within which we want to search. We don't want un unrestricted search
116 /// scope, since we don't want to find references in external dependencies.
117 fn search_scope(&self) -> SearchScope {
118 // FIXME: We should ideally have a test that checks that we edit local roots and not library
119 // roots. This probably would require some changes to fixtures, since currently everything
120 // seems to get put into a single source root.
121 let mut files = Vec::new();
122 self.search_files_do(|file_id| {
123 files.push(file_id);
124 });
125 SearchScope::files(&files)
126 }
127
128 fn slow_scan(&self, rule: &ResolvedRule, matches_out: &mut Vec<Match>) {
129 self.search_files_do(|file_id| {
130 let file = self.sema.parse(file_id);
131 let code = file.syntax();
132 self.slow_scan_node(code, rule, &None, matches_out);
133 })
134 }
135
136 fn search_files_do(&self, mut callback: impl FnMut(FileId)) {
137 if self.restrict_ranges.is_empty() {
138 // Unrestricted search.
139 use ra_db::SourceDatabaseExt;
140 use ra_ide_db::symbol_index::SymbolsDatabase;
141 for &root in self.sema.db.local_roots().iter() {
142 let sr = self.sema.db.source_root(root);
143 for file_id in sr.iter() {
144 callback(file_id);
145 }
146 }
147 } else {
148 // Search is restricted, deduplicate file IDs (generally only one).
149 let mut files = FxHashSet::default();
150 for range in &self.restrict_ranges {
151 if files.insert(range.file_id) {
152 callback(range.file_id);
153 }
154 }
155 }
156 }
157
158 fn slow_scan_node(
159 &self,
160 code: &SyntaxNode,
161 rule: &ResolvedRule,
162 restrict_range: &Option<FileRange>,
163 matches_out: &mut Vec<Match>,
164 ) {
165 if !is_search_permitted(code) {
166 return;
167 }
168 self.try_add_match(rule, &code, restrict_range, matches_out);
169 // If we've got a macro call, we already tried matching it pre-expansion, which is the only
170 // way to match the whole macro, now try expanding it and matching the expansion.
171 if let Some(macro_call) = ast::MacroCall::cast(code.clone()) {
172 if let Some(expanded) = self.sema.expand(&macro_call) {
173 if let Some(tt) = macro_call.token_tree() {
174 // When matching within a macro expansion, we only want to allow matches of
175 // nodes that originated entirely from within the token tree of the macro call.
176 // i.e. we don't want to match something that came from the macro itself.
177 self.slow_scan_node(
178 &expanded,
179 rule,
180 &Some(self.sema.original_range(tt.syntax())),
181 matches_out,
182 );
183 }
184 }
185 }
186 for child in code.children() {
187 self.slow_scan_node(&child, rule, restrict_range, matches_out);
188 }
189 }
190
191 fn try_add_match(
192 &self,
193 rule: &ResolvedRule,
194 code: &SyntaxNode,
195 restrict_range: &Option<FileRange>,
196 matches_out: &mut Vec<Match>,
197 ) {
198 if !self.within_range_restrictions(code) {
199 mark::hit!(replace_nonpath_within_selection);
200 return;
201 }
202 if let Ok(m) = matching::get_match(false, rule, code, restrict_range, &self.sema) {
203 matches_out.push(m);
204 }
205 }
206
207 /// Returns whether `code` is within one of our range restrictions if we have any. No range
208 /// restrictions is considered unrestricted and always returns true.
209 fn within_range_restrictions(&self, code: &SyntaxNode) -> bool {
210 if self.restrict_ranges.is_empty() {
211 // There is no range restriction.
212 return true;
213 }
214 let node_range = self.sema.original_range(code);
215 for range in &self.restrict_ranges {
216 if range.file_id == node_range.file_id && range.range.contains_range(node_range.range) {
217 return true;
218 }
219 }
220 false
221 }
222}
223
224/// Returns whether we support matching within `node` and all of its ancestors.
225fn is_search_permitted_ancestors(node: &SyntaxNode) -> bool {
226 if let Some(parent) = node.parent() {
227 if !is_search_permitted_ancestors(&parent) {
228 return false;
229 }
230 }
231 is_search_permitted(node)
232}
233
234/// Returns whether we support matching within this kind of node.
235fn is_search_permitted(node: &SyntaxNode) -> bool {
236 // FIXME: Properly handle use declarations. At the moment, if our search pattern is `foo::bar`
237 // and the code is `use foo::{baz, bar}`, we'll match `bar`, since it resolves to `foo::bar`.
238 // However we'll then replace just the part we matched `bar`. We probably need to instead remove
239 // `bar` and insert a new use declaration.
240 node.kind() != SyntaxKind::USE
241}
242
243impl UsageCache {
244 fn find(&mut self, definition: &Definition) -> Option<&[Reference]> {
245 // We expect a very small number of cache entries (generally 1), so a linear scan should be
246 // fast enough and avoids the need to implement Hash for Definition.
247 for (d, refs) in &self.usages {
248 if d == definition {
249 return Some(refs);
250 }
251 }
252 None
253 }
254}
255
256/// Returns a path that's suitable for path resolution. We exclude builtin types, since they aren't
257/// something that we can find references to. We then somewhat arbitrarily pick the path that is the
258/// longest as this is hopefully more likely to be less common, making it faster to find.
259fn pick_path_for_usages(pattern: &ResolvedPattern) -> Option<&ResolvedPath> {
260 // FIXME: Take the scope of the resolved path into account. e.g. if there are any paths that are
261 // private to the current module, then we definitely would want to pick them over say a path
262 // from std. Possibly we should go further than this and intersect the search scopes for all
263 // resolved paths then search only in that scope.
264 pattern
265 .resolved_paths
266 .iter()
267 .filter(|(_, p)| {
268 !matches!(p.resolution, hir::PathResolution::Def(hir::ModuleDef::BuiltinType(_)))
269 })
270 .map(|(node, resolved)| (node.text().len(), resolved))
271 .max_by(|(a, _), (b, _)| a.cmp(b))
272 .map(|(_, resolved)| resolved)
273}
diff --git a/crates/ra_ssr/src/tests.rs b/crates/ra_ssr/src/tests.rs
index 8be60c293..a4fa2cb44 100644
--- a/crates/ra_ssr/src/tests.rs
+++ b/crates/ra_ssr/src/tests.rs
@@ -1,150 +1,9 @@
1use crate::matching::MatchFailureReason; 1use crate::{MatchFinder, SsrRule};
2use crate::{matching, Match, MatchFinder, SsrMatches, SsrPattern, SsrRule}; 2use expect::{expect, Expect};
3use matching::record_match_fails_reasons_scope; 3use ra_db::{salsa::Durability, FileId, FilePosition, FileRange, SourceDatabaseExt};
4use ra_db::{FileId, FileRange, SourceDatabaseExt}; 4use rustc_hash::FxHashSet;
5use ra_syntax::ast::AstNode; 5use std::sync::Arc;
6use ra_syntax::{ast, SyntaxKind, SyntaxNode, TextRange}; 6use test_utils::{mark, RangeOrOffset};
7
8struct MatchDebugInfo {
9 node: SyntaxNode,
10 /// Our search pattern parsed as the same kind of syntax node as `node`. e.g. expression, item,
11 /// etc. Will be absent if the pattern can't be parsed as that kind.
12 pattern: Result<SyntaxNode, MatchFailureReason>,
13 matched: Result<Match, MatchFailureReason>,
14}
15
16impl SsrPattern {
17 pub(crate) fn tree_for_kind_with_reason(
18 &self,
19 kind: SyntaxKind,
20 ) -> Result<&SyntaxNode, MatchFailureReason> {
21 record_match_fails_reasons_scope(true, || self.tree_for_kind(kind))
22 .map_err(|e| MatchFailureReason { reason: e.reason.unwrap() })
23 }
24}
25
26impl std::fmt::Debug for MatchDebugInfo {
27 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
28 write!(f, "========= PATTERN ==========\n")?;
29 match &self.pattern {
30 Ok(pattern) => {
31 write!(f, "{:#?}", pattern)?;
32 }
33 Err(err) => {
34 write!(f, "{}", err.reason)?;
35 }
36 }
37 write!(
38 f,
39 "\n============ AST ===========\n\
40 {:#?}\n============================",
41 self.node
42 )?;
43 match &self.matched {
44 Ok(_) => write!(f, "Node matched")?,
45 Err(reason) => write!(f, "Node failed to match because: {}", reason.reason)?,
46 }
47 Ok(())
48 }
49}
50
51impl SsrMatches {
52 /// Returns `self` with any nested matches removed and made into top-level matches.
53 pub(crate) fn flattened(self) -> SsrMatches {
54 let mut out = SsrMatches::default();
55 self.flatten_into(&mut out);
56 out
57 }
58
59 fn flatten_into(self, out: &mut SsrMatches) {
60 for mut m in self.matches {
61 for p in m.placeholder_values.values_mut() {
62 std::mem::replace(&mut p.inner_matches, SsrMatches::default()).flatten_into(out);
63 }
64 out.matches.push(m);
65 }
66 }
67}
68
69impl Match {
70 pub(crate) fn matched_text(&self) -> String {
71 self.matched_node.text().to_string()
72 }
73}
74
75impl<'db> MatchFinder<'db> {
76 /// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you
77 /// intend to do replacement, use `add_rule` instead.
78 fn add_search_pattern(&mut self, pattern: SsrPattern) {
79 self.add_rule(SsrRule { pattern, template: "()".parse().unwrap() })
80 }
81
82 /// Finds all nodes in `file_id` whose text is exactly equal to `snippet` and attempts to match
83 /// them, while recording reasons why they don't match. This API is useful for command
84 /// line-based debugging where providing a range is difficult.
85 fn debug_where_text_equal(&self, file_id: FileId, snippet: &str) -> Vec<MatchDebugInfo> {
86 let file = self.sema.parse(file_id);
87 let mut res = Vec::new();
88 let file_text = self.sema.db.file_text(file_id);
89 let mut remaining_text = file_text.as_str();
90 let mut base = 0;
91 let len = snippet.len() as u32;
92 while let Some(offset) = remaining_text.find(snippet) {
93 let start = base + offset as u32;
94 let end = start + len;
95 self.output_debug_for_nodes_at_range(
96 file.syntax(),
97 TextRange::new(start.into(), end.into()),
98 &None,
99 &mut res,
100 );
101 remaining_text = &remaining_text[offset + snippet.len()..];
102 base = end;
103 }
104 res
105 }
106
107 fn output_debug_for_nodes_at_range(
108 &self,
109 node: &SyntaxNode,
110 range: TextRange,
111 restrict_range: &Option<FileRange>,
112 out: &mut Vec<MatchDebugInfo>,
113 ) {
114 for node in node.children() {
115 if !node.text_range().contains_range(range) {
116 continue;
117 }
118 if node.text_range() == range {
119 for rule in &self.rules {
120 let pattern =
121 rule.pattern.tree_for_kind_with_reason(node.kind()).map(|p| p.clone());
122 out.push(MatchDebugInfo {
123 matched: matching::get_match(true, rule, &node, restrict_range, &self.sema)
124 .map_err(|e| MatchFailureReason {
125 reason: e.reason.unwrap_or_else(|| {
126 "Match failed, but no reason was given".to_owned()
127 }),
128 }),
129 pattern,
130 node: node.clone(),
131 });
132 }
133 } else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) {
134 if let Some(expanded) = self.sema.expand(&macro_call) {
135 if let Some(tt) = macro_call.token_tree() {
136 self.output_debug_for_nodes_at_range(
137 &expanded,
138 range,
139 &Some(self.sema.original_range(tt.syntax())),
140 out,
141 );
142 }
143 }
144 }
145 }
146 }
147}
148 7
149fn parse_error_text(query: &str) -> String { 8fn parse_error_text(query: &str) -> String {
150 format!("{}", query.parse::<SsrRule>().unwrap_err()) 9 format!("{}", query.parse::<SsrRule>().unwrap_err())
@@ -152,12 +11,12 @@ fn parse_error_text(query: &str) -> String {
152 11
153#[test] 12#[test]
154fn parser_empty_query() { 13fn parser_empty_query() {
155 assert_eq!(parse_error_text(""), "Parse error: Cannot find delemiter `==>>`"); 14 assert_eq!(parse_error_text(""), "Parse error: Cannot find delimiter `==>>`");
156} 15}
157 16
158#[test] 17#[test]
159fn parser_no_delimiter() { 18fn parser_no_delimiter() {
160 assert_eq!(parse_error_text("foo()"), "Parse error: Cannot find delemiter `==>>`"); 19 assert_eq!(parse_error_text("foo()"), "Parse error: Cannot find delimiter `==>>`");
161} 20}
162 21
163#[test] 22#[test]
@@ -180,7 +39,7 @@ fn parser_repeated_name() {
180fn parser_invalid_pattern() { 39fn parser_invalid_pattern() {
181 assert_eq!( 40 assert_eq!(
182 parse_error_text(" ==>> ()"), 41 parse_error_text(" ==>> ()"),
183 "Parse error: Pattern is not a valid Rust expression, type, item, path or pattern" 42 "Parse error: Not a valid Rust expression, type, item, path or pattern"
184 ); 43 );
185} 44}
186 45
@@ -188,7 +47,7 @@ fn parser_invalid_pattern() {
188fn parser_invalid_template() { 47fn parser_invalid_template() {
189 assert_eq!( 48 assert_eq!(
190 parse_error_text("() ==>> )"), 49 parse_error_text("() ==>> )"),
191 "Parse error: Replacement is not a valid Rust expression, type, item, path or pattern" 50 "Parse error: Not a valid Rust expression, type, item, path or pattern"
192 ); 51 );
193} 52}
194 53
@@ -200,72 +59,112 @@ fn parser_undefined_placeholder_in_replacement() {
200 ); 59 );
201} 60}
202 61
203fn single_file(code: &str) -> (ra_ide_db::RootDatabase, FileId) { 62/// `code` may optionally contain a cursor marker `<|>`. If it doesn't, then the position will be
63/// the start of the file. If there's a second cursor marker, then we'll return a single range.
64pub(crate) fn single_file(code: &str) -> (ra_ide_db::RootDatabase, FilePosition, Vec<FileRange>) {
204 use ra_db::fixture::WithFixture; 65 use ra_db::fixture::WithFixture;
205 ra_ide_db::RootDatabase::with_single_file(code) 66 use ra_ide_db::symbol_index::SymbolsDatabase;
206} 67 let (mut db, file_id, range_or_offset) = if code.contains(test_utils::CURSOR_MARKER) {
207 68 ra_ide_db::RootDatabase::with_range_or_offset(code)
208fn assert_ssr_transform(rule: &str, input: &str, result: &str) { 69 } else {
209 assert_ssr_transforms(&[rule], input, result); 70 let (db, file_id) = ra_ide_db::RootDatabase::with_single_file(code);
71 (db, file_id, RangeOrOffset::Offset(0.into()))
72 };
73 let selections;
74 let position;
75 match range_or_offset {
76 RangeOrOffset::Range(range) => {
77 position = FilePosition { file_id, offset: range.start() };
78 selections = vec![FileRange { file_id, range: range }];
79 }
80 RangeOrOffset::Offset(offset) => {
81 position = FilePosition { file_id, offset };
82 selections = vec![];
83 }
84 }
85 let mut local_roots = FxHashSet::default();
86 local_roots.insert(ra_db::fixture::WORKSPACE);
87 db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH);
88 (db, position, selections)
210} 89}
211 90
212fn normalize_code(code: &str) -> String { 91fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) {
213 let (db, file_id) = single_file(code); 92 assert_ssr_transforms(&[rule], input, expected);
214 db.file_text(file_id).to_string()
215} 93}
216 94
217fn assert_ssr_transforms(rules: &[&str], input: &str, result: &str) { 95fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) {
218 let (db, file_id) = single_file(input); 96 let (db, position, selections) = single_file(input);
219 let mut match_finder = MatchFinder::new(&db); 97 let mut match_finder = MatchFinder::in_context(&db, position, selections);
220 for rule in rules { 98 for rule in rules {
221 let rule: SsrRule = rule.parse().unwrap(); 99 let rule: SsrRule = rule.parse().unwrap();
222 match_finder.add_rule(rule); 100 match_finder.add_rule(rule).unwrap();
223 } 101 }
224 if let Some(edits) = match_finder.edits_for_file(file_id) { 102 let edits = match_finder.edits();
225 // Note, db.file_text is not necessarily the same as `input`, since fixture parsing alters 103 if edits.is_empty() {
226 // stuff.
227 let mut after = db.file_text(file_id).to_string();
228 edits.apply(&mut after);
229 // Likewise, we need to make sure that whatever transformations fixture parsing applies,
230 // also get appplied to our expected result.
231 let result = normalize_code(result);
232 assert_eq!(after, result);
233 } else {
234 panic!("No edits were made"); 104 panic!("No edits were made");
235 } 105 }
106 assert_eq!(edits[0].file_id, position.file_id);
107 // Note, db.file_text is not necessarily the same as `input`, since fixture parsing alters
108 // stuff.
109 let mut actual = db.file_text(position.file_id).to_string();
110 edits[0].edit.apply(&mut actual);
111 expected.assert_eq(&actual);
112}
113
114fn print_match_debug_info(match_finder: &MatchFinder, file_id: FileId, snippet: &str) {
115 let debug_info = match_finder.debug_where_text_equal(file_id, snippet);
116 println!(
117 "Match debug info: {} nodes had text exactly equal to '{}'",
118 debug_info.len(),
119 snippet
120 );
121 for (index, d) in debug_info.iter().enumerate() {
122 println!("Node #{}\n{:#?}\n", index, d);
123 }
236} 124}
237 125
238fn assert_matches(pattern: &str, code: &str, expected: &[&str]) { 126fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
239 let (db, file_id) = single_file(code); 127 let (db, position, selections) = single_file(code);
240 let mut match_finder = MatchFinder::new(&db); 128 let mut match_finder = MatchFinder::in_context(&db, position, selections);
241 match_finder.add_search_pattern(pattern.parse().unwrap()); 129 match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
242 let matched_strings: Vec<String> = match_finder 130 let matched_strings: Vec<String> =
243 .find_matches_in_file(file_id) 131 match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect();
244 .flattened()
245 .matches
246 .iter()
247 .map(|m| m.matched_text())
248 .collect();
249 if matched_strings != expected && !expected.is_empty() { 132 if matched_strings != expected && !expected.is_empty() {
250 let debug_info = match_finder.debug_where_text_equal(file_id, &expected[0]); 133 print_match_debug_info(&match_finder, position.file_id, &expected[0]);
251 eprintln!("Test is about to fail. Some possibly useful info: {} nodes had text exactly equal to '{}'", debug_info.len(), &expected[0]);
252 for d in debug_info {
253 eprintln!("{:#?}", d);
254 }
255 } 134 }
256 assert_eq!(matched_strings, expected); 135 assert_eq!(matched_strings, expected);
257} 136}
258 137
259fn assert_no_match(pattern: &str, code: &str) { 138fn assert_no_match(pattern: &str, code: &str) {
260 assert_matches(pattern, code, &[]); 139 let (db, position, selections) = single_file(code);
140 let mut match_finder = MatchFinder::in_context(&db, position, selections);
141 match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
142 let matches = match_finder.matches().flattened().matches;
143 if !matches.is_empty() {
144 print_match_debug_info(&match_finder, position.file_id, &matches[0].matched_text());
145 panic!("Got {} matches when we expected none: {:#?}", matches.len(), matches);
146 }
147}
148
149fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) {
150 let (db, position, selections) = single_file(code);
151 let mut match_finder = MatchFinder::in_context(&db, position, selections);
152 match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
153 let mut reasons = Vec::new();
154 for d in match_finder.debug_where_text_equal(position.file_id, snippet) {
155 if let Some(reason) = d.match_failure_reason() {
156 reasons.push(reason.to_owned());
157 }
158 }
159 assert_eq!(reasons, vec![expected_reason]);
261} 160}
262 161
263#[test] 162#[test]
264fn ssr_function_to_method() { 163fn ssr_function_to_method() {
265 assert_ssr_transform( 164 assert_ssr_transform(
266 "my_function($a, $b) ==>> ($a).my_method($b)", 165 "my_function($a, $b) ==>> ($a).my_method($b)",
267 "loop { my_function( other_func(x, y), z + w) }", 166 "fn my_function() {} fn main() { loop { my_function( other_func(x, y), z + w) } }",
268 "loop { (other_func(x, y)).my_method(z + w) }", 167 expect![["fn my_function() {} fn main() { loop { (other_func(x, y)).my_method(z + w) } }"]],
269 ) 168 )
270} 169}
271 170
@@ -273,8 +172,19 @@ fn ssr_function_to_method() {
273fn ssr_nested_function() { 172fn ssr_nested_function() {
274 assert_ssr_transform( 173 assert_ssr_transform(
275 "foo($a, $b, $c) ==>> bar($c, baz($a, $b))", 174 "foo($a, $b, $c) ==>> bar($c, baz($a, $b))",
276 "fn main { foo (x + value.method(b), x+y-z, true && false) }", 175 r#"
277 "fn main { bar(true && false, baz(x + value.method(b), x+y-z)) }", 176 //- /lib.rs crate:foo
177 fn foo() {}
178 fn bar() {}
179 fn baz() {}
180 fn main { foo (x + value.method(b), x+y-z, true && false) }
181 "#,
182 expect![[r#"
183 fn foo() {}
184 fn bar() {}
185 fn baz() {}
186 fn main { bar(true && false, baz(x + value.method(b), x+y-z)) }
187 "#]],
278 ) 188 )
279} 189}
280 190
@@ -282,8 +192,8 @@ fn ssr_nested_function() {
282fn ssr_expected_spacing() { 192fn ssr_expected_spacing() {
283 assert_ssr_transform( 193 assert_ssr_transform(
284 "foo($x) + bar() ==>> bar($x)", 194 "foo($x) + bar() ==>> bar($x)",
285 "fn main() { foo(5) + bar() }", 195 "fn foo() {} fn bar() {} fn main() { foo(5) + bar() }",
286 "fn main() { bar(5) }", 196 expect![["fn foo() {} fn bar() {} fn main() { bar(5) }"]],
287 ); 197 );
288} 198}
289 199
@@ -291,8 +201,8 @@ fn ssr_expected_spacing() {
291fn ssr_with_extra_space() { 201fn ssr_with_extra_space() {
292 assert_ssr_transform( 202 assert_ssr_transform(
293 "foo($x ) + bar() ==>> bar($x)", 203 "foo($x ) + bar() ==>> bar($x)",
294 "fn main() { foo( 5 ) +bar( ) }", 204 "fn foo() {} fn bar() {} fn main() { foo( 5 ) +bar( ) }",
295 "fn main() { bar(5) }", 205 expect![["fn foo() {} fn bar() {} fn main() { bar(5) }"]],
296 ); 206 );
297} 207}
298 208
@@ -300,8 +210,8 @@ fn ssr_with_extra_space() {
300fn ssr_keeps_nested_comment() { 210fn ssr_keeps_nested_comment() {
301 assert_ssr_transform( 211 assert_ssr_transform(
302 "foo($x) ==>> bar($x)", 212 "foo($x) ==>> bar($x)",
303 "fn main() { foo(other(5 /* using 5 */)) }", 213 "fn foo() {} fn bar() {} fn main() { foo(other(5 /* using 5 */)) }",
304 "fn main() { bar(other(5 /* using 5 */)) }", 214 expect![["fn foo() {} fn bar() {} fn main() { bar(other(5 /* using 5 */)) }"]],
305 ) 215 )
306} 216}
307 217
@@ -309,17 +219,25 @@ fn ssr_keeps_nested_comment() {
309fn ssr_keeps_comment() { 219fn ssr_keeps_comment() {
310 assert_ssr_transform( 220 assert_ssr_transform(
311 "foo($x) ==>> bar($x)", 221 "foo($x) ==>> bar($x)",
312 "fn main() { foo(5 /* using 5 */) }", 222 "fn foo() {} fn bar() {} fn main() { foo(5 /* using 5 */) }",
313 "fn main() { bar(5)/* using 5 */ }", 223 expect![["fn foo() {} fn bar() {} fn main() { bar(5)/* using 5 */ }"]],
314 ) 224 )
315} 225}
316 226
317#[test] 227#[test]
318fn ssr_struct_lit() { 228fn ssr_struct_lit() {
319 assert_ssr_transform( 229 assert_ssr_transform(
320 "foo{a: $a, b: $b} ==>> foo::new($a, $b)", 230 "Foo{a: $a, b: $b} ==>> Foo::new($a, $b)",
321 "fn main() { foo{b:2, a:1} }", 231 r#"
322 "fn main() { foo::new(1, 2) }", 232 struct Foo() {}
233 impl Foo { fn new() {} }
234 fn main() { Foo{b:2, a:1} }
235 "#,
236 expect![[r#"
237 struct Foo() {}
238 impl Foo { fn new() {} }
239 fn main() { Foo::new(1, 2) }
240 "#]],
323 ) 241 )
324} 242}
325 243
@@ -341,16 +259,18 @@ fn match_fn_definition() {
341 259
342#[test] 260#[test]
343fn match_struct_definition() { 261fn match_struct_definition() {
344 assert_matches( 262 let code = r#"
345 "struct $n {$f: Option<String>}", 263 struct Option<T> {}
346 "struct Bar {} struct Foo {name: Option<String>}", 264 struct Bar {}
347 &["struct Foo {name: Option<String>}"], 265 struct Foo {name: Option<String>}"#;
348 ); 266 assert_matches("struct $n {$f: Option<String>}", code, &["struct Foo {name: Option<String>}"]);
349} 267}
350 268
351#[test] 269#[test]
352fn match_expr() { 270fn match_expr() {
353 let code = "fn f() -> i32 {foo(40 + 2, 42)}"; 271 let code = r#"
272 fn foo() {}
273 fn f() -> i32 {foo(40 + 2, 42)}"#;
354 assert_matches("foo($a, $b)", code, &["foo(40 + 2, 42)"]); 274 assert_matches("foo($a, $b)", code, &["foo(40 + 2, 42)"]);
355 assert_no_match("foo($a, $b, $c)", code); 275 assert_no_match("foo($a, $b, $c)", code);
356 assert_no_match("foo($a)", code); 276 assert_no_match("foo($a)", code);
@@ -379,7 +299,9 @@ fn match_nested_method_calls_with_macro_call() {
379 299
380#[test] 300#[test]
381fn match_complex_expr() { 301fn match_complex_expr() {
382 let code = "fn f() -> i32 {foo(bar(40, 2), 42)}"; 302 let code = r#"
303 fn foo() {} fn bar() {}
304 fn f() -> i32 {foo(bar(40, 2), 42)}"#;
383 assert_matches("foo($a, $b)", code, &["foo(bar(40, 2), 42)"]); 305 assert_matches("foo($a, $b)", code, &["foo(bar(40, 2), 42)"]);
384 assert_no_match("foo($a, $b, $c)", code); 306 assert_no_match("foo($a, $b, $c)", code);
385 assert_no_match("foo($a)", code); 307 assert_no_match("foo($a)", code);
@@ -390,71 +312,162 @@ fn match_complex_expr() {
390#[test] 312#[test]
391fn match_with_trailing_commas() { 313fn match_with_trailing_commas() {
392 // Code has comma, pattern doesn't. 314 // Code has comma, pattern doesn't.
393 assert_matches("foo($a, $b)", "fn f() {foo(1, 2,);}", &["foo(1, 2,)"]); 315 assert_matches("foo($a, $b)", "fn foo() {} fn f() {foo(1, 2,);}", &["foo(1, 2,)"]);
394 assert_matches("Foo{$a, $b}", "fn f() {Foo{1, 2,};}", &["Foo{1, 2,}"]); 316 assert_matches("Foo{$a, $b}", "struct Foo {} fn f() {Foo{1, 2,};}", &["Foo{1, 2,}"]);
395 317
396 // Pattern has comma, code doesn't. 318 // Pattern has comma, code doesn't.
397 assert_matches("foo($a, $b,)", "fn f() {foo(1, 2);}", &["foo(1, 2)"]); 319 assert_matches("foo($a, $b,)", "fn foo() {} fn f() {foo(1, 2);}", &["foo(1, 2)"]);
398 assert_matches("Foo{$a, $b,}", "fn f() {Foo{1, 2};}", &["Foo{1, 2}"]); 320 assert_matches("Foo{$a, $b,}", "struct Foo {} fn f() {Foo{1, 2};}", &["Foo{1, 2}"]);
399} 321}
400 322
401#[test] 323#[test]
402fn match_type() { 324fn match_type() {
403 assert_matches("i32", "fn f() -> i32 {1 + 2}", &["i32"]); 325 assert_matches("i32", "fn f() -> i32 {1 + 2}", &["i32"]);
404 assert_matches("Option<$a>", "fn f() -> Option<i32> {42}", &["Option<i32>"]); 326 assert_matches(
405 assert_no_match("Option<$a>", "fn f() -> Result<i32, ()> {42}"); 327 "Option<$a>",
328 "struct Option<T> {} fn f() -> Option<i32> {42}",
329 &["Option<i32>"],
330 );
331 assert_no_match(
332 "Option<$a>",
333 "struct Option<T> {} struct Result<T, E> {} fn f() -> Result<i32, ()> {42}",
334 );
406} 335}
407 336
408#[test] 337#[test]
409fn match_struct_instantiation() { 338fn match_struct_instantiation() {
410 assert_matches( 339 let code = r#"
411 "Foo {bar: 1, baz: 2}", 340 struct Foo {bar: i32, baz: i32}
412 "fn f() {Foo {bar: 1, baz: 2}}", 341 fn f() {Foo {bar: 1, baz: 2}}"#;
413 &["Foo {bar: 1, baz: 2}"], 342 assert_matches("Foo {bar: 1, baz: 2}", code, &["Foo {bar: 1, baz: 2}"]);
414 );
415 // Now with placeholders for all parts of the struct. 343 // Now with placeholders for all parts of the struct.
416 assert_matches( 344 assert_matches("Foo {$a: $b, $c: $d}", code, &["Foo {bar: 1, baz: 2}"]);
417 "Foo {$a: $b, $c: $d}", 345 assert_matches("Foo {}", "struct Foo {} fn f() {Foo {}}", &["Foo {}"]);
418 "fn f() {Foo {bar: 1, baz: 2}}",
419 &["Foo {bar: 1, baz: 2}"],
420 );
421 assert_matches("Foo {}", "fn f() {Foo {}}", &["Foo {}"]);
422} 346}
423 347
424#[test] 348#[test]
425fn match_path() { 349fn match_path() {
426 assert_matches("foo::bar", "fn f() {foo::bar(42)}", &["foo::bar"]); 350 let code = r#"
427 assert_matches("$a::bar", "fn f() {foo::bar(42)}", &["foo::bar"]); 351 mod foo {
428 assert_matches("foo::$b", "fn f() {foo::bar(42)}", &["foo::bar"]); 352 pub fn bar() {}
353 }
354 fn f() {foo::bar(42)}"#;
355 assert_matches("foo::bar", code, &["foo::bar"]);
356 assert_matches("$a::bar", code, &["foo::bar"]);
357 assert_matches("foo::$b", code, &["foo::bar"]);
429} 358}
430 359
431#[test] 360#[test]
432fn match_pattern() { 361fn match_pattern() {
433 assert_matches("Some($a)", "fn f() {if let Some(x) = foo() {}}", &["Some(x)"]); 362 assert_matches("Some($a)", "struct Some(); fn f() {if let Some(x) = foo() {}}", &["Some(x)"]);
363}
364
365// If our pattern has a full path, e.g. a::b::c() and the code has c(), but c resolves to
366// a::b::c, then we should match.
367#[test]
368fn match_fully_qualified_fn_path() {
369 let code = r#"
370 mod a {
371 pub mod b {
372 pub fn c(_: i32) {}
373 }
374 }
375 use a::b::c;
376 fn f1() {
377 c(42);
378 }
379 "#;
380 assert_matches("a::b::c($a)", code, &["c(42)"]);
381}
382
383#[test]
384fn match_resolved_type_name() {
385 let code = r#"
386 mod m1 {
387 pub mod m2 {
388 pub trait Foo<T> {}
389 }
390 }
391 mod m3 {
392 trait Foo<T> {}
393 fn f1(f: Option<&dyn Foo<bool>>) {}
394 }
395 mod m4 {
396 use crate::m1::m2::Foo;
397 fn f1(f: Option<&dyn Foo<i32>>) {}
398 }
399 "#;
400 assert_matches("m1::m2::Foo<$t>", code, &["Foo<i32>"]);
401}
402
403#[test]
404fn type_arguments_within_path() {
405 mark::check!(type_arguments_within_path);
406 let code = r#"
407 mod foo {
408 pub struct Bar<T> {t: T}
409 impl<T> Bar<T> {
410 pub fn baz() {}
411 }
412 }
413 fn f1() {foo::Bar::<i32>::baz();}
414 "#;
415 assert_no_match("foo::Bar::<i64>::baz()", code);
416 assert_matches("foo::Bar::<i32>::baz()", code, &["foo::Bar::<i32>::baz()"]);
417}
418
419#[test]
420fn literal_constraint() {
421 mark::check!(literal_constraint);
422 let code = r#"
423 enum Option<T> { Some(T), None }
424 use Option::Some;
425 fn f1() {
426 let x1 = Some(42);
427 let x2 = Some("foo");
428 let x3 = Some(x1);
429 let x4 = Some(40 + 2);
430 let x5 = Some(true);
431 }
432 "#;
433 assert_matches("Some(${a:kind(literal)})", code, &["Some(42)", "Some(\"foo\")", "Some(true)"]);
434 assert_matches("Some(${a:not(kind(literal))})", code, &["Some(x1)", "Some(40 + 2)"]);
434} 435}
435 436
436#[test] 437#[test]
437fn match_reordered_struct_instantiation() { 438fn match_reordered_struct_instantiation() {
438 assert_matches( 439 assert_matches(
439 "Foo {aa: 1, b: 2, ccc: 3}", 440 "Foo {aa: 1, b: 2, ccc: 3}",
440 "fn f() {Foo {b: 2, ccc: 3, aa: 1}}", 441 "struct Foo {} fn f() {Foo {b: 2, ccc: 3, aa: 1}}",
441 &["Foo {b: 2, ccc: 3, aa: 1}"], 442 &["Foo {b: 2, ccc: 3, aa: 1}"],
442 ); 443 );
443 assert_no_match("Foo {a: 1}", "fn f() {Foo {b: 1}}"); 444 assert_no_match("Foo {a: 1}", "struct Foo {} fn f() {Foo {b: 1}}");
444 assert_no_match("Foo {a: 1}", "fn f() {Foo {a: 2}}"); 445 assert_no_match("Foo {a: 1}", "struct Foo {} fn f() {Foo {a: 2}}");
445 assert_no_match("Foo {a: 1, b: 2}", "fn f() {Foo {a: 1}}"); 446 assert_no_match("Foo {a: 1, b: 2}", "struct Foo {} fn f() {Foo {a: 1}}");
446 assert_no_match("Foo {a: 1, b: 2}", "fn f() {Foo {b: 2}}"); 447 assert_no_match("Foo {a: 1, b: 2}", "struct Foo {} fn f() {Foo {b: 2}}");
447 assert_no_match("Foo {a: 1, }", "fn f() {Foo {a: 1, b: 2}}"); 448 assert_no_match("Foo {a: 1, }", "struct Foo {} fn f() {Foo {a: 1, b: 2}}");
448 assert_no_match("Foo {a: 1, z: 9}", "fn f() {Foo {a: 1}}"); 449 assert_no_match("Foo {a: 1, z: 9}", "struct Foo {} fn f() {Foo {a: 1}}");
449} 450}
450 451
451#[test] 452#[test]
452fn match_macro_invocation() { 453fn match_macro_invocation() {
453 assert_matches("foo!($a)", "fn() {foo(foo!(foo()))}", &["foo!(foo())"]); 454 assert_matches(
454 assert_matches("foo!(41, $a, 43)", "fn() {foo!(41, 42, 43)}", &["foo!(41, 42, 43)"]); 455 "foo!($a)",
455 assert_no_match("foo!(50, $a, 43)", "fn() {foo!(41, 42, 43}"); 456 "macro_rules! foo {() => {}} fn() {foo(foo!(foo()))}",
456 assert_no_match("foo!(41, $a, 50)", "fn() {foo!(41, 42, 43}"); 457 &["foo!(foo())"],
457 assert_matches("foo!($a())", "fn() {foo!(bar())}", &["foo!(bar())"]); 458 );
459 assert_matches(
460 "foo!(41, $a, 43)",
461 "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43)}",
462 &["foo!(41, 42, 43)"],
463 );
464 assert_no_match("foo!(50, $a, 43)", "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43}");
465 assert_no_match("foo!(41, $a, 50)", "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43}");
466 assert_matches(
467 "foo!($a())",
468 "macro_rules! foo {() => {}} fn() {foo!(bar())}",
469 &["foo!(bar())"],
470 );
458} 471}
459 472
460// When matching within a macro expansion, we only allow matches of nodes that originated from 473// When matching within a macro expansion, we only allow matches of nodes that originated from
@@ -489,15 +502,20 @@ fn no_match_split_expression() {
489 502
490#[test] 503#[test]
491fn replace_function_call() { 504fn replace_function_call() {
492 assert_ssr_transform("foo() ==>> bar()", "fn f1() {foo(); foo();}", "fn f1() {bar(); bar();}"); 505 // This test also makes sure that we ignore empty-ranges.
506 assert_ssr_transform(
507 "foo() ==>> bar()",
508 "fn foo() {<|><|>} fn bar() {} fn f1() {foo(); foo();}",
509 expect![["fn foo() {} fn bar() {} fn f1() {bar(); bar();}"]],
510 );
493} 511}
494 512
495#[test] 513#[test]
496fn replace_function_call_with_placeholders() { 514fn replace_function_call_with_placeholders() {
497 assert_ssr_transform( 515 assert_ssr_transform(
498 "foo($a, $b) ==>> bar($b, $a)", 516 "foo($a, $b) ==>> bar($b, $a)",
499 "fn f1() {foo(5, 42)}", 517 "fn foo() {} fn bar() {} fn f1() {foo(5, 42)}",
500 "fn f1() {bar(42, 5)}", 518 expect![["fn foo() {} fn bar() {} fn f1() {bar(42, 5)}"]],
501 ); 519 );
502} 520}
503 521
@@ -505,26 +523,120 @@ fn replace_function_call_with_placeholders() {
505fn replace_nested_function_calls() { 523fn replace_nested_function_calls() {
506 assert_ssr_transform( 524 assert_ssr_transform(
507 "foo($a) ==>> bar($a)", 525 "foo($a) ==>> bar($a)",
508 "fn f1() {foo(foo(42))}", 526 "fn foo() {} fn bar() {} fn f1() {foo(foo(42))}",
509 "fn f1() {bar(bar(42))}", 527 expect![["fn foo() {} fn bar() {} fn f1() {bar(bar(42))}"]],
510 ); 528 );
511} 529}
512 530
513#[test] 531#[test]
514fn replace_type() { 532fn replace_associated_function_call() {
515 assert_ssr_transform( 533 assert_ssr_transform(
516 "Result<(), $a> ==>> Option<$a>", 534 "Foo::new() ==>> Bar::new()",
517 "fn f1() -> Result<(), Vec<Error>> {foo()}", 535 r#"
518 "fn f1() -> Option<Vec<Error>> {foo()}", 536 struct Foo {}
537 impl Foo { fn new() {} }
538 struct Bar {}
539 impl Bar { fn new() {} }
540 fn f1() {Foo::new();}
541 "#,
542 expect![[r#"
543 struct Foo {}
544 impl Foo { fn new() {} }
545 struct Bar {}
546 impl Bar { fn new() {} }
547 fn f1() {Bar::new();}
548 "#]],
519 ); 549 );
520} 550}
521 551
522#[test] 552#[test]
523fn replace_struct_init() { 553fn replace_path_in_different_contexts() {
554 // Note the <|> inside module a::b which marks the point where the rule is interpreted. We
555 // replace foo with bar, but both need different path qualifiers in different contexts. In f4,
556 // foo is unqualified because of a use statement, however the replacement needs to be fully
557 // qualified.
524 assert_ssr_transform( 558 assert_ssr_transform(
525 "Foo {a: $a, b: $b} ==>> Foo::new($a, $b)", 559 "c::foo() ==>> c::bar()",
526 "fn f1() {Foo{b: 1, a: 2}}", 560 r#"
527 "fn f1() {Foo::new(2, 1)}", 561 mod a {
562 pub mod b {<|>
563 pub mod c {
564 pub fn foo() {}
565 pub fn bar() {}
566 fn f1() { foo() }
567 }
568 fn f2() { c::foo() }
569 }
570 fn f3() { b::c::foo() }
571 }
572 use a::b::c::foo;
573 fn f4() { foo() }
574 "#,
575 expect![[r#"
576 mod a {
577 pub mod b {
578 pub mod c {
579 pub fn foo() {}
580 pub fn bar() {}
581 fn f1() { bar() }
582 }
583 fn f2() { c::bar() }
584 }
585 fn f3() { b::c::bar() }
586 }
587 use a::b::c::foo;
588 fn f4() { a::b::c::bar() }
589 "#]],
590 );
591}
592
593#[test]
594fn replace_associated_function_with_generics() {
595 assert_ssr_transform(
596 "c::Foo::<$a>::new() ==>> d::Bar::<$a>::default()",
597 r#"
598 mod c {
599 pub struct Foo<T> {v: T}
600 impl<T> Foo<T> { pub fn new() {} }
601 fn f1() {
602 Foo::<i32>::new();
603 }
604 }
605 mod d {
606 pub struct Bar<T> {v: T}
607 impl<T> Bar<T> { pub fn default() {} }
608 fn f1() {
609 super::c::Foo::<i32>::new();
610 }
611 }
612 "#,
613 expect![[r#"
614 mod c {
615 pub struct Foo<T> {v: T}
616 impl<T> Foo<T> { pub fn new() {} }
617 fn f1() {
618 crate::d::Bar::<i32>::default();
619 }
620 }
621 mod d {
622 pub struct Bar<T> {v: T}
623 impl<T> Bar<T> { pub fn default() {} }
624 fn f1() {
625 Bar::<i32>::default();
626 }
627 }
628 "#]],
629 );
630}
631
632#[test]
633fn replace_type() {
634 assert_ssr_transform(
635 "Result<(), $a> ==>> Option<$a>",
636 "struct Result<T, E> {} struct Option<T> {} fn f1() -> Result<(), Vec<Error>> {foo()}",
637 expect![[
638 "struct Result<T, E> {} struct Option<T> {} fn f1() -> Option<Vec<Error>> {foo()}"
639 ]],
528 ); 640 );
529} 641}
530 642
@@ -532,13 +644,13 @@ fn replace_struct_init() {
532fn replace_macro_invocations() { 644fn replace_macro_invocations() {
533 assert_ssr_transform( 645 assert_ssr_transform(
534 "try!($a) ==>> $a?", 646 "try!($a) ==>> $a?",
535 "fn f1() -> Result<(), E> {bar(try!(foo()));}", 647 "macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(try!(foo()));}",
536 "fn f1() -> Result<(), E> {bar(foo()?);}", 648 expect![["macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(foo()?);}"]],
537 ); 649 );
538 assert_ssr_transform( 650 assert_ssr_transform(
539 "foo!($a($b)) ==>> foo($b, $a)", 651 "foo!($a($b)) ==>> foo($b, $a)",
540 "fn f1() {foo!(abc(def() + 2));}", 652 "macro_rules! foo {() => {}} fn f1() {foo!(abc(def() + 2));}",
541 "fn f1() {foo(def() + 2, abc);}", 653 expect![["macro_rules! foo {() => {}} fn f1() {foo(def() + 2, abc);}"]],
542 ); 654 );
543} 655}
544 656
@@ -547,12 +659,12 @@ fn replace_binary_op() {
547 assert_ssr_transform( 659 assert_ssr_transform(
548 "$a + $b ==>> $b + $a", 660 "$a + $b ==>> $b + $a",
549 "fn f() {2 * 3 + 4 * 5}", 661 "fn f() {2 * 3 + 4 * 5}",
550 "fn f() {4 * 5 + 2 * 3}", 662 expect![["fn f() {4 * 5 + 2 * 3}"]],
551 ); 663 );
552 assert_ssr_transform( 664 assert_ssr_transform(
553 "$a + $b ==>> $b + $a", 665 "$a + $b ==>> $b + $a",
554 "fn f() {1 + 2 + 3 + 4}", 666 "fn f() {1 + 2 + 3 + 4}",
555 "fn f() {4 + 3 + 2 + 1}", 667 expect![[r#"fn f() {4 + (3 + (2 + 1))}"#]],
556 ); 668 );
557} 669}
558 670
@@ -565,8 +677,23 @@ fn match_binary_op() {
565fn multiple_rules() { 677fn multiple_rules() {
566 assert_ssr_transforms( 678 assert_ssr_transforms(
567 &["$a + 1 ==>> add_one($a)", "$a + $b ==>> add($a, $b)"], 679 &["$a + 1 ==>> add_one($a)", "$a + $b ==>> add($a, $b)"],
568 "fn f() -> i32 {3 + 2 + 1}", 680 "fn add() {} fn add_one() {} fn f() -> i32 {3 + 2 + 1}",
569 "fn f() -> i32 {add_one(add(3, 2))}", 681 expect![["fn add() {} fn add_one() {} fn f() -> i32 {add_one(add(3, 2))}"]],
682 )
683}
684
685#[test]
686fn multiple_rules_with_nested_matches() {
687 assert_ssr_transforms(
688 &["foo1($a) ==>> bar1($a)", "foo2($a) ==>> bar2($a)"],
689 r#"
690 fn foo1() {} fn foo2() {} fn bar1() {} fn bar2() {}
691 fn f() {foo1(foo2(foo1(foo2(foo1(42)))))}
692 "#,
693 expect![[r#"
694 fn foo1() {} fn foo2() {} fn bar1() {} fn bar2() {}
695 fn f() {bar1(bar2(bar1(bar2(bar1(42)))))}
696 "#]],
570 ) 697 )
571} 698}
572 699
@@ -598,12 +725,37 @@ fn replace_within_macro_expansion() {
598 macro_rules! macro1 { 725 macro_rules! macro1 {
599 ($a:expr) => {$a} 726 ($a:expr) => {$a}
600 } 727 }
601 fn f() {macro1!(5.x().foo().o2())}"#, 728 fn bar() {}
729 fn f() {macro1!(5.x().foo().o2())}
730 "#,
731 expect![[r#"
732 macro_rules! macro1 {
733 ($a:expr) => {$a}
734 }
735 fn bar() {}
736 fn f() {macro1!(bar(5.x()).o2())}
737 "#]],
738 )
739}
740
741#[test]
742fn replace_outside_and_within_macro_expansion() {
743 assert_ssr_transform(
744 "foo($a) ==>> bar($a)",
602 r#" 745 r#"
746 fn foo() {} fn bar() {}
603 macro_rules! macro1 { 747 macro_rules! macro1 {
604 ($a:expr) => {$a} 748 ($a:expr) => {$a}
605 } 749 }
606 fn f() {macro1!(bar(5.x()).o2())}"#, 750 fn f() {foo(foo(macro1!(foo(foo(42)))))}
751 "#,
752 expect![[r#"
753 fn foo() {} fn bar() {}
754 macro_rules! macro1 {
755 ($a:expr) => {$a}
756 }
757 fn f() {bar(bar(macro1!(bar(bar(42)))))}
758 "#]],
607 ) 759 )
608} 760}
609 761
@@ -615,11 +767,280 @@ fn preserves_whitespace_within_macro_expansion() {
615 macro_rules! macro1 { 767 macro_rules! macro1 {
616 ($a:expr) => {$a} 768 ($a:expr) => {$a}
617 } 769 }
618 fn f() {macro1!(1 * 2 + 3 + 4}"#, 770 fn f() {macro1!(1 * 2 + 3 + 4}
619 r#" 771 "#,
772 expect![[r#"
620 macro_rules! macro1 { 773 macro_rules! macro1 {
621 ($a:expr) => {$a} 774 ($a:expr) => {$a}
622 } 775 }
623 fn f() {macro1!(4 - 3 - 1 * 2}"#, 776 fn f() {macro1!(4 - (3 - 1 * 2)}
777 "#]],
778 )
779}
780
781#[test]
782fn add_parenthesis_when_necessary() {
783 assert_ssr_transform(
784 "foo($a) ==>> $a.to_string()",
785 r#"
786 fn foo(_: i32) {}
787 fn bar3(v: i32) {
788 foo(1 + 2);
789 foo(-v);
790 }
791 "#,
792 expect![[r#"
793 fn foo(_: i32) {}
794 fn bar3(v: i32) {
795 (1 + 2).to_string();
796 (-v).to_string();
797 }
798 "#]],
799 )
800}
801
802#[test]
803fn match_failure_reasons() {
804 let code = r#"
805 fn bar() {}
806 macro_rules! foo {
807 ($a:expr) => {
808 1 + $a + 2
809 };
810 }
811 fn f1() {
812 bar(1, 2);
813 foo!(5 + 43.to_string() + 5);
814 }
815 "#;
816 assert_match_failure_reason(
817 "bar($a, 3)",
818 code,
819 "bar(1, 2)",
820 r#"Pattern wanted token '3' (INT_NUMBER), but code had token '2' (INT_NUMBER)"#,
821 );
822 assert_match_failure_reason(
823 "42.to_string()",
824 code,
825 "43.to_string()",
826 r#"Pattern wanted token '42' (INT_NUMBER), but code had token '43' (INT_NUMBER)"#,
827 );
828}
829
830#[test]
831fn overlapping_possible_matches() {
832 // There are three possible matches here, however the middle one, `foo(foo(foo(42)))` shouldn't
833 // match because it overlaps with the outer match. The inner match is permitted since it's is
834 // contained entirely within the placeholder of the outer match.
835 assert_matches(
836 "foo(foo($a))",
837 "fn foo() {} fn main() {foo(foo(foo(foo(42))))}",
838 &["foo(foo(42))", "foo(foo(foo(foo(42))))"],
839 );
840}
841
842#[test]
843fn use_declaration_with_braces() {
844 // It would be OK for a path rule to match and alter a use declaration. We shouldn't mess it up
845 // though. In particular, we must not change `use foo::{baz, bar}` to `use foo::{baz,
846 // foo2::bar2}`.
847 mark::check!(use_declaration_with_braces);
848 assert_ssr_transform(
849 "foo::bar ==>> foo2::bar2",
850 r#"
851 mod foo { pub fn bar() {} pub fn baz() {} }
852 mod foo2 { pub fn bar2() {} }
853 use foo::{baz, bar};
854 fn main() { bar() }
855 "#,
856 expect![["
857 mod foo { pub fn bar() {} pub fn baz() {} }
858 mod foo2 { pub fn bar2() {} }
859 use foo::{baz, bar};
860 fn main() { foo2::bar2() }
861 "]],
862 )
863}
864
865#[test]
866fn ufcs_matches_method_call() {
867 let code = r#"
868 struct Foo {}
869 impl Foo {
870 fn new(_: i32) -> Foo { Foo {} }
871 fn do_stuff(&self, _: i32) {}
872 }
873 struct Bar {}
874 impl Bar {
875 fn new(_: i32) -> Bar { Bar {} }
876 fn do_stuff(&self, v: i32) {}
877 }
878 fn main() {
879 let b = Bar {};
880 let f = Foo {};
881 b.do_stuff(1);
882 f.do_stuff(2);
883 Foo::new(4).do_stuff(3);
884 // Too many / too few args - should never match
885 f.do_stuff(2, 10);
886 f.do_stuff();
887 }
888 "#;
889 assert_matches("Foo::do_stuff($a, $b)", code, &["f.do_stuff(2)", "Foo::new(4).do_stuff(3)"]);
890 // The arguments needs special handling in the case of a function call matching a method call
891 // and the first argument is different.
892 assert_matches("Foo::do_stuff($a, 2)", code, &["f.do_stuff(2)"]);
893 assert_matches("Foo::do_stuff(Foo::new(4), $b)", code, &["Foo::new(4).do_stuff(3)"]);
894
895 assert_ssr_transform(
896 "Foo::do_stuff(Foo::new($a), $b) ==>> Bar::new($b).do_stuff($a)",
897 code,
898 expect![[r#"
899 struct Foo {}
900 impl Foo {
901 fn new(_: i32) -> Foo { Foo {} }
902 fn do_stuff(&self, _: i32) {}
903 }
904 struct Bar {}
905 impl Bar {
906 fn new(_: i32) -> Bar { Bar {} }
907 fn do_stuff(&self, v: i32) {}
908 }
909 fn main() {
910 let b = Bar {};
911 let f = Foo {};
912 b.do_stuff(1);
913 f.do_stuff(2);
914 Bar::new(3).do_stuff(4);
915 // Too many / too few args - should never match
916 f.do_stuff(2, 10);
917 f.do_stuff();
918 }
919 "#]],
920 );
921}
922
923#[test]
924fn pattern_is_a_single_segment_path() {
925 mark::check!(pattern_is_a_single_segment_path);
926 // The first function should not be altered because the `foo` in scope at the cursor position is
927 // a different `foo`. This case is special because "foo" can be parsed as a pattern (BIND_PAT ->
928 // NAME -> IDENT), which contains no path. If we're not careful we'll end up matching the `foo`
929 // in `let foo` from the first function. Whether we should match the `let foo` in the second
930 // function is less clear. At the moment, we don't. Doing so sounds like a rename operation,
931 // which isn't really what SSR is for, especially since the replacement `bar` must be able to be
932 // resolved, which means if we rename `foo` we'll get a name collision.
933 assert_ssr_transform(
934 "foo ==>> bar",
935 r#"
936 fn f1() -> i32 {
937 let foo = 1;
938 let bar = 2;
939 foo
940 }
941 fn f1() -> i32 {
942 let foo = 1;
943 let bar = 2;
944 foo<|>
945 }
946 "#,
947 expect![[r#"
948 fn f1() -> i32 {
949 let foo = 1;
950 let bar = 2;
951 foo
952 }
953 fn f1() -> i32 {
954 let foo = 1;
955 let bar = 2;
956 bar
957 }
958 "#]],
959 );
960}
961
962#[test]
963fn replace_local_variable_reference() {
964 // The pattern references a local variable `foo` in the block containing the cursor. We should
965 // only replace references to this variable `foo`, not other variables that just happen to have
966 // the same name.
967 mark::check!(cursor_after_semicolon);
968 assert_ssr_transform(
969 "foo + $a ==>> $a - foo",
970 r#"
971 fn bar1() -> i32 {
972 let mut res = 0;
973 let foo = 5;
974 res += foo + 1;
975 let foo = 10;
976 res += foo + 2;<|>
977 res += foo + 3;
978 let foo = 15;
979 res += foo + 4;
980 res
981 }
982 "#,
983 expect![[r#"
984 fn bar1() -> i32 {
985 let mut res = 0;
986 let foo = 5;
987 res += foo + 1;
988 let foo = 10;
989 res += 2 - foo;
990 res += 3 - foo;
991 let foo = 15;
992 res += foo + 4;
993 res
994 }
995 "#]],
624 ) 996 )
625} 997}
998
999#[test]
1000fn replace_path_within_selection() {
1001 assert_ssr_transform(
1002 "foo ==>> bar",
1003 r#"
1004 fn main() {
1005 let foo = 41;
1006 let bar = 42;
1007 do_stuff(foo);
1008 do_stuff(foo);<|>
1009 do_stuff(foo);
1010 do_stuff(foo);<|>
1011 do_stuff(foo);
1012 }"#,
1013 expect![[r#"
1014 fn main() {
1015 let foo = 41;
1016 let bar = 42;
1017 do_stuff(foo);
1018 do_stuff(foo);
1019 do_stuff(bar);
1020 do_stuff(bar);
1021 do_stuff(foo);
1022 }"#]],
1023 );
1024}
1025
1026#[test]
1027fn replace_nonpath_within_selection() {
1028 mark::check!(replace_nonpath_within_selection);
1029 assert_ssr_transform(
1030 "$a + $b ==>> $b * $a",
1031 r#"
1032 fn main() {
1033 let v = 1 + 2;<|>
1034 let v2 = 3 + 3;
1035 let v3 = 4 + 5;<|>
1036 let v4 = 6 + 7;
1037 }"#,
1038 expect![[r#"
1039 fn main() {
1040 let v = 1 + 2;
1041 let v2 = 3 * 3;
1042 let v3 = 5 * 4;
1043 let v4 = 6 + 7;
1044 }"#]],
1045 );
1046}