aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
Diffstat (limited to 'crates')
-rw-r--r--crates/ra_hir/src/diagnostics.rs4
-rw-r--r--crates/ra_hir_expand/src/diagnostics.rs60
-rw-r--r--crates/ra_hir_ty/Cargo.toml2
-rw-r--r--crates/ra_hir_ty/src/diagnostics.rs7
-rw-r--r--crates/ra_ide/src/diagnostics.rs156
-rw-r--r--crates/ra_ide/src/lib.rs11
-rw-r--r--crates/ra_ide/src/ssr.rs35
-rw-r--r--crates/ra_ide_db/src/defs.rs35
-rw-r--r--crates/ra_ide_db/src/search.rs4
-rw-r--r--crates/ra_ssr/Cargo.toml3
-rw-r--r--crates/ra_ssr/src/lib.rs233
-rw-r--r--crates/ra_ssr/src/matching.rs203
-rw-r--r--crates/ra_ssr/src/nester.rs98
-rw-r--r--crates/ra_ssr/src/parsing.rs123
-rw-r--r--crates/ra_ssr/src/replacing.rs128
-rw-r--r--crates/ra_ssr/src/resolving.rs173
-rw-r--r--crates/ra_ssr/src/search.rs232
-rw-r--r--crates/ra_ssr/src/tests.rs473
-rw-r--r--crates/rust-analyzer/src/cli/analysis_bench.rs2
-rw-r--r--crates/rust-analyzer/src/cli/diagnostics.rs2
-rw-r--r--crates/rust-analyzer/src/cli/ssr.rs49
-rw-r--r--crates/rust-analyzer/src/config.rs4
-rw-r--r--crates/rust-analyzer/src/handlers.rs7
-rw-r--r--crates/rust-analyzer/src/lsp_ext.rs5
24 files changed, 1561 insertions, 488 deletions
diff --git a/crates/ra_hir/src/diagnostics.rs b/crates/ra_hir/src/diagnostics.rs
index 11a0ecb8b..266b513dc 100644
--- a/crates/ra_hir/src/diagnostics.rs
+++ b/crates/ra_hir/src/diagnostics.rs
@@ -1,6 +1,8 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2pub use hir_def::diagnostics::UnresolvedModule; 2pub use hir_def::diagnostics::UnresolvedModule;
3pub use hir_expand::diagnostics::{AstDiagnostic, Diagnostic, DiagnosticSink}; 3pub use hir_expand::diagnostics::{
4 AstDiagnostic, Diagnostic, DiagnosticSink, DiagnosticSinkBuilder,
5};
4pub use hir_ty::diagnostics::{ 6pub use hir_ty::diagnostics::{
5 MismatchedArgCount, MissingFields, MissingMatchArms, MissingOkInTailExpr, NoSuchField, 7 MismatchedArgCount, MissingFields, MissingMatchArms, MissingOkInTailExpr, NoSuchField,
6}; 8};
diff --git a/crates/ra_hir_expand/src/diagnostics.rs b/crates/ra_hir_expand/src/diagnostics.rs
index 545cff9bd..84ba97b14 100644
--- a/crates/ra_hir_expand/src/diagnostics.rs
+++ b/crates/ra_hir_expand/src/diagnostics.rs
@@ -24,6 +24,9 @@ pub trait Diagnostic: Any + Send + Sync + fmt::Debug + 'static {
24 fn message(&self) -> String; 24 fn message(&self) -> String;
25 fn source(&self) -> InFile<SyntaxNodePtr>; 25 fn source(&self) -> InFile<SyntaxNodePtr>;
26 fn as_any(&self) -> &(dyn Any + Send + 'static); 26 fn as_any(&self) -> &(dyn Any + Send + 'static);
27 fn is_experimental(&self) -> bool {
28 false
29 }
27} 30}
28 31
29pub trait AstDiagnostic { 32pub trait AstDiagnostic {
@@ -44,16 +47,48 @@ impl dyn Diagnostic {
44 47
45pub struct DiagnosticSink<'a> { 48pub struct DiagnosticSink<'a> {
46 callbacks: Vec<Box<dyn FnMut(&dyn Diagnostic) -> Result<(), ()> + 'a>>, 49 callbacks: Vec<Box<dyn FnMut(&dyn Diagnostic) -> Result<(), ()> + 'a>>,
50 filters: Vec<Box<dyn FnMut(&dyn Diagnostic) -> bool + 'a>>,
47 default_callback: Box<dyn FnMut(&dyn Diagnostic) + 'a>, 51 default_callback: Box<dyn FnMut(&dyn Diagnostic) + 'a>,
48} 52}
49 53
50impl<'a> DiagnosticSink<'a> { 54impl<'a> DiagnosticSink<'a> {
51 /// FIXME: split `new` and `on` into a separate builder type 55 pub fn push(&mut self, d: impl Diagnostic) {
52 pub fn new(cb: impl FnMut(&dyn Diagnostic) + 'a) -> DiagnosticSink<'a> { 56 let d: &dyn Diagnostic = &d;
53 DiagnosticSink { callbacks: Vec::new(), default_callback: Box::new(cb) } 57 self._push(d);
58 }
59
60 fn _push(&mut self, d: &dyn Diagnostic) {
61 for filter in &mut self.filters {
62 if !filter(d) {
63 return;
64 }
65 }
66 for cb in &mut self.callbacks {
67 match cb(d) {
68 Ok(()) => return,
69 Err(()) => (),
70 }
71 }
72 (self.default_callback)(d)
54 } 73 }
74}
55 75
56 pub fn on<D: Diagnostic, F: FnMut(&D) + 'a>(mut self, mut cb: F) -> DiagnosticSink<'a> { 76pub struct DiagnosticSinkBuilder<'a> {
77 callbacks: Vec<Box<dyn FnMut(&dyn Diagnostic) -> Result<(), ()> + 'a>>,
78 filters: Vec<Box<dyn FnMut(&dyn Diagnostic) -> bool + 'a>>,
79}
80
81impl<'a> DiagnosticSinkBuilder<'a> {
82 pub fn new() -> Self {
83 Self { callbacks: Vec::new(), filters: Vec::new() }
84 }
85
86 pub fn filter<F: FnMut(&dyn Diagnostic) -> bool + 'a>(mut self, cb: F) -> Self {
87 self.filters.push(Box::new(cb));
88 self
89 }
90
91 pub fn on<D: Diagnostic, F: FnMut(&D) + 'a>(mut self, mut cb: F) -> Self {
57 let cb = move |diag: &dyn Diagnostic| match diag.downcast_ref::<D>() { 92 let cb = move |diag: &dyn Diagnostic| match diag.downcast_ref::<D>() {
58 Some(d) => { 93 Some(d) => {
59 cb(d); 94 cb(d);
@@ -65,18 +100,11 @@ impl<'a> DiagnosticSink<'a> {
65 self 100 self
66 } 101 }
67 102
68 pub fn push(&mut self, d: impl Diagnostic) { 103 pub fn build<F: FnMut(&dyn Diagnostic) + 'a>(self, default_callback: F) -> DiagnosticSink<'a> {
69 let d: &dyn Diagnostic = &d; 104 DiagnosticSink {
70 self._push(d); 105 callbacks: self.callbacks,
71 } 106 filters: self.filters,
72 107 default_callback: Box::new(default_callback),
73 fn _push(&mut self, d: &dyn Diagnostic) {
74 for cb in self.callbacks.iter_mut() {
75 match cb(d) {
76 Ok(()) => return,
77 Err(()) => (),
78 }
79 } 108 }
80 (self.default_callback)(d)
81 } 109 }
82} 110}
diff --git a/crates/ra_hir_ty/Cargo.toml b/crates/ra_hir_ty/Cargo.toml
index cdabb359f..7242e2cb6 100644
--- a/crates/ra_hir_ty/Cargo.toml
+++ b/crates/ra_hir_ty/Cargo.toml
@@ -37,4 +37,4 @@ expect = { path = "../expect" }
37 37
38tracing = "0.1" 38tracing = "0.1"
39tracing-subscriber = { version = "0.2", default-features = false, features = ["env-filter", "registry"] } 39tracing-subscriber = { version = "0.2", default-features = false, features = ["env-filter", "registry"] }
40tracing-tree = { version = "0.1.3" } 40tracing-tree = { version = "0.1.4" }
diff --git a/crates/ra_hir_ty/src/diagnostics.rs b/crates/ra_hir_ty/src/diagnostics.rs
index d3ee9cf55..885abbaf2 100644
--- a/crates/ra_hir_ty/src/diagnostics.rs
+++ b/crates/ra_hir_ty/src/diagnostics.rs
@@ -234,6 +234,9 @@ impl Diagnostic for MismatchedArgCount {
234 fn as_any(&self) -> &(dyn Any + Send + 'static) { 234 fn as_any(&self) -> &(dyn Any + Send + 'static) {
235 self 235 self
236 } 236 }
237 fn is_experimental(&self) -> bool {
238 true
239 }
237} 240}
238 241
239impl AstDiagnostic for MismatchedArgCount { 242impl AstDiagnostic for MismatchedArgCount {
@@ -248,7 +251,7 @@ impl AstDiagnostic for MismatchedArgCount {
248#[cfg(test)] 251#[cfg(test)]
249mod tests { 252mod tests {
250 use hir_def::{db::DefDatabase, AssocItemId, ModuleDefId}; 253 use hir_def::{db::DefDatabase, AssocItemId, ModuleDefId};
251 use hir_expand::diagnostics::{Diagnostic, DiagnosticSink}; 254 use hir_expand::diagnostics::{Diagnostic, DiagnosticSinkBuilder};
252 use ra_db::{fixture::WithFixture, FileId, SourceDatabase, SourceDatabaseExt}; 255 use ra_db::{fixture::WithFixture, FileId, SourceDatabase, SourceDatabaseExt};
253 use ra_syntax::{TextRange, TextSize}; 256 use ra_syntax::{TextRange, TextSize};
254 use rustc_hash::FxHashMap; 257 use rustc_hash::FxHashMap;
@@ -280,7 +283,7 @@ mod tests {
280 } 283 }
281 284
282 for f in fns { 285 for f in fns {
283 let mut sink = DiagnosticSink::new(&mut cb); 286 let mut sink = DiagnosticSinkBuilder::new().build(&mut cb);
284 validate_body(self, f.into(), &mut sink); 287 validate_body(self, f.into(), &mut sink);
285 } 288 }
286 } 289 }
diff --git a/crates/ra_ide/src/diagnostics.rs b/crates/ra_ide/src/diagnostics.rs
index e029af0dc..897177d05 100644
--- a/crates/ra_ide/src/diagnostics.rs
+++ b/crates/ra_ide/src/diagnostics.rs
@@ -7,7 +7,7 @@
7use std::cell::RefCell; 7use std::cell::RefCell;
8 8
9use hir::{ 9use hir::{
10 diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink}, 10 diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSinkBuilder},
11 HasSource, HirDisplay, Semantics, VariantDef, 11 HasSource, HirDisplay, Semantics, VariantDef,
12}; 12};
13use itertools::Itertools; 13use itertools::Itertools;
@@ -29,7 +29,11 @@ pub enum Severity {
29 WeakWarning, 29 WeakWarning,
30} 30}
31 31
32pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> { 32pub(crate) fn diagnostics(
33 db: &RootDatabase,
34 file_id: FileId,
35 enable_experimental: bool,
36) -> Vec<Diagnostic> {
33 let _p = profile("diagnostics"); 37 let _p = profile("diagnostics");
34 let sema = Semantics::new(db); 38 let sema = Semantics::new(db);
35 let parse = db.parse(file_id); 39 let parse = db.parse(file_id);
@@ -48,79 +52,85 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
48 check_struct_shorthand_initialization(&mut res, file_id, &node); 52 check_struct_shorthand_initialization(&mut res, file_id, &node);
49 } 53 }
50 let res = RefCell::new(res); 54 let res = RefCell::new(res);
51 let mut sink = DiagnosticSink::new(|d| { 55 let mut sink = DiagnosticSinkBuilder::new()
52 res.borrow_mut().push(Diagnostic { 56 .on::<hir::diagnostics::UnresolvedModule, _>(|d| {
53 message: d.message(), 57 let original_file = d.source().file_id.original_file(db);
54 range: sema.diagnostics_range(d).range, 58 let fix = Fix::new(
55 severity: Severity::Error, 59 "Create module",
56 fix: None, 60 FileSystemEdit::CreateFile { anchor: original_file, dst: d.candidate.clone() }
57 }) 61 .into(),
58 }) 62 );
59 .on::<hir::diagnostics::UnresolvedModule, _>(|d| { 63 res.borrow_mut().push(Diagnostic {
60 let original_file = d.source().file_id.original_file(db); 64 range: sema.diagnostics_range(d).range,
61 let fix = Fix::new( 65 message: d.message(),
62 "Create module", 66 severity: Severity::Error,
63 FileSystemEdit::CreateFile { anchor: original_file, dst: d.candidate.clone() }.into(), 67 fix: Some(fix),
64 ); 68 })
65 res.borrow_mut().push(Diagnostic {
66 range: sema.diagnostics_range(d).range,
67 message: d.message(),
68 severity: Severity::Error,
69 fix: Some(fix),
70 }) 69 })
71 }) 70 .on::<hir::diagnostics::MissingFields, _>(|d| {
72 .on::<hir::diagnostics::MissingFields, _>(|d| { 71 // Note that although we could add a diagnostics to
73 // Note that although we could add a diagnostics to 72 // fill the missing tuple field, e.g :
74 // fill the missing tuple field, e.g : 73 // `struct A(usize);`
75 // `struct A(usize);` 74 // `let a = A { 0: () }`
76 // `let a = A { 0: () }` 75 // but it is uncommon usage and it should not be encouraged.
77 // but it is uncommon usage and it should not be encouraged. 76 let fix = if d.missed_fields.iter().any(|it| it.as_tuple_index().is_some()) {
78 let fix = if d.missed_fields.iter().any(|it| it.as_tuple_index().is_some()) { 77 None
79 None 78 } else {
80 } else { 79 let mut field_list = d.ast(db);
81 let mut field_list = d.ast(db); 80 for f in d.missed_fields.iter() {
82 for f in d.missed_fields.iter() { 81 let field =
83 let field = 82 make::record_field(make::name_ref(&f.to_string()), Some(make::expr_unit()));
84 make::record_field(make::name_ref(&f.to_string()), Some(make::expr_unit())); 83 field_list = field_list.append_field(&field);
85 field_list = field_list.append_field(&field); 84 }
86 } 85
87 86 let edit = {
88 let edit = { 87 let mut builder = TextEditBuilder::default();
89 let mut builder = TextEditBuilder::default(); 88 algo::diff(&d.ast(db).syntax(), &field_list.syntax())
90 algo::diff(&d.ast(db).syntax(), &field_list.syntax()).into_text_edit(&mut builder); 89 .into_text_edit(&mut builder);
91 builder.finish() 90 builder.finish()
91 };
92 Some(Fix::new("Fill struct fields", SourceFileEdit { file_id, edit }.into()))
92 }; 93 };
93 Some(Fix::new("Fill struct fields", SourceFileEdit { file_id, edit }.into()))
94 };
95 94
96 res.borrow_mut().push(Diagnostic { 95 res.borrow_mut().push(Diagnostic {
97 range: sema.diagnostics_range(d).range, 96 range: sema.diagnostics_range(d).range,
98 message: d.message(), 97 message: d.message(),
99 severity: Severity::Error, 98 severity: Severity::Error,
100 fix, 99 fix,
100 })
101 }) 101 })
102 }) 102 .on::<hir::diagnostics::MissingOkInTailExpr, _>(|d| {
103 .on::<hir::diagnostics::MissingOkInTailExpr, _>(|d| { 103 let node = d.ast(db);
104 let node = d.ast(db); 104 let replacement = format!("Ok({})", node.syntax());
105 let replacement = format!("Ok({})", node.syntax()); 105 let edit = TextEdit::replace(node.syntax().text_range(), replacement);
106 let edit = TextEdit::replace(node.syntax().text_range(), replacement); 106 let source_change = SourceFileEdit { file_id, edit }.into();
107 let source_change = SourceFileEdit { file_id, edit }.into(); 107 let fix = Fix::new("Wrap with ok", source_change);
108 let fix = Fix::new("Wrap with ok", source_change); 108 res.borrow_mut().push(Diagnostic {
109 res.borrow_mut().push(Diagnostic { 109 range: sema.diagnostics_range(d).range,
110 range: sema.diagnostics_range(d).range, 110 message: d.message(),
111 message: d.message(), 111 severity: Severity::Error,
112 severity: Severity::Error, 112 fix: Some(fix),
113 fix: Some(fix), 113 })
114 }) 114 })
115 }) 115 .on::<hir::diagnostics::NoSuchField, _>(|d| {
116 .on::<hir::diagnostics::NoSuchField, _>(|d| { 116 res.borrow_mut().push(Diagnostic {
117 res.borrow_mut().push(Diagnostic { 117 range: sema.diagnostics_range(d).range,
118 range: sema.diagnostics_range(d).range, 118 message: d.message(),
119 message: d.message(), 119 severity: Severity::Error,
120 severity: Severity::Error, 120 fix: missing_struct_field_fix(&sema, file_id, d),
121 fix: missing_struct_field_fix(&sema, file_id, d), 121 })
122 }) 122 })
123 }); 123 // Only collect experimental diagnostics when they're enabled.
124 .filter(|diag| !diag.is_experimental() || enable_experimental)
125 // Diagnostics not handled above get no fix and default treatment.
126 .build(|d| {
127 res.borrow_mut().push(Diagnostic {
128 message: d.message(),
129 range: sema.diagnostics_range(d).range,
130 severity: Severity::Error,
131 fix: None,
132 })
133 });
124 134
125 if let Some(m) = sema.to_module_def(file_id) { 135 if let Some(m) = sema.to_module_def(file_id) {
126 m.diagnostics(db, &mut sink); 136 m.diagnostics(db, &mut sink);
@@ -298,7 +308,7 @@ mod tests {
298 let after = trim_indent(ra_fixture_after); 308 let after = trim_indent(ra_fixture_after);
299 309
300 let (analysis, file_position) = analysis_and_position(ra_fixture_before); 310 let (analysis, file_position) = analysis_and_position(ra_fixture_before);
301 let diagnostic = analysis.diagnostics(file_position.file_id).unwrap().pop().unwrap(); 311 let diagnostic = analysis.diagnostics(file_position.file_id, true).unwrap().pop().unwrap();
302 let mut fix = diagnostic.fix.unwrap(); 312 let mut fix = diagnostic.fix.unwrap();
303 let edit = fix.source_change.source_file_edits.pop().unwrap().edit; 313 let edit = fix.source_change.source_file_edits.pop().unwrap().edit;
304 let target_file_contents = analysis.file_text(file_position.file_id).unwrap(); 314 let target_file_contents = analysis.file_text(file_position.file_id).unwrap();
@@ -324,7 +334,7 @@ mod tests {
324 let ra_fixture_after = &trim_indent(ra_fixture_after); 334 let ra_fixture_after = &trim_indent(ra_fixture_after);
325 let (analysis, file_pos) = analysis_and_position(ra_fixture_before); 335 let (analysis, file_pos) = analysis_and_position(ra_fixture_before);
326 let current_file_id = file_pos.file_id; 336 let current_file_id = file_pos.file_id;
327 let diagnostic = analysis.diagnostics(current_file_id).unwrap().pop().unwrap(); 337 let diagnostic = analysis.diagnostics(current_file_id, true).unwrap().pop().unwrap();
328 let mut fix = diagnostic.fix.unwrap(); 338 let mut fix = diagnostic.fix.unwrap();
329 let edit = fix.source_change.source_file_edits.pop().unwrap(); 339 let edit = fix.source_change.source_file_edits.pop().unwrap();
330 let changed_file_id = edit.file_id; 340 let changed_file_id = edit.file_id;
@@ -345,14 +355,14 @@ mod tests {
345 let analysis = mock.analysis(); 355 let analysis = mock.analysis();
346 let diagnostics = files 356 let diagnostics = files
347 .into_iter() 357 .into_iter()
348 .flat_map(|file_id| analysis.diagnostics(file_id).unwrap()) 358 .flat_map(|file_id| analysis.diagnostics(file_id, true).unwrap())
349 .collect::<Vec<_>>(); 359 .collect::<Vec<_>>();
350 assert_eq!(diagnostics.len(), 0, "unexpected diagnostics:\n{:#?}", diagnostics); 360 assert_eq!(diagnostics.len(), 0, "unexpected diagnostics:\n{:#?}", diagnostics);
351 } 361 }
352 362
353 fn check_expect(ra_fixture: &str, expect: Expect) { 363 fn check_expect(ra_fixture: &str, expect: Expect) {
354 let (analysis, file_id) = single_file(ra_fixture); 364 let (analysis, file_id) = single_file(ra_fixture);
355 let diagnostics = analysis.diagnostics(file_id).unwrap(); 365 let diagnostics = analysis.diagnostics(file_id, true).unwrap();
356 expect.assert_debug_eq(&diagnostics) 366 expect.assert_debug_eq(&diagnostics)
357 } 367 }
358 368
diff --git a/crates/ra_ide/src/lib.rs b/crates/ra_ide/src/lib.rs
index dc9192d42..4c4d9f6fa 100644
--- a/crates/ra_ide/src/lib.rs
+++ b/crates/ra_ide/src/lib.rs
@@ -487,8 +487,12 @@ impl Analysis {
487 } 487 }
488 488
489 /// Computes the set of diagnostics for the given file. 489 /// Computes the set of diagnostics for the given file.
490 pub fn diagnostics(&self, file_id: FileId) -> Cancelable<Vec<Diagnostic>> { 490 pub fn diagnostics(
491 self.with_db(|db| diagnostics::diagnostics(db, file_id)) 491 &self,
492 file_id: FileId,
493 enable_experimental: bool,
494 ) -> Cancelable<Vec<Diagnostic>> {
495 self.with_db(|db| diagnostics::diagnostics(db, file_id, enable_experimental))
492 } 496 }
493 497
494 /// Returns the edit required to rename reference at the position to the new 498 /// Returns the edit required to rename reference at the position to the new
@@ -505,9 +509,10 @@ impl Analysis {
505 &self, 509 &self,
506 query: &str, 510 query: &str,
507 parse_only: bool, 511 parse_only: bool,
512 position: FilePosition,
508 ) -> Cancelable<Result<SourceChange, SsrError>> { 513 ) -> Cancelable<Result<SourceChange, SsrError>> {
509 self.with_db(|db| { 514 self.with_db(|db| {
510 let edits = ssr::parse_search_replace(query, parse_only, db)?; 515 let edits = ssr::parse_search_replace(query, parse_only, db, position)?;
511 Ok(SourceChange::from(edits)) 516 Ok(SourceChange::from(edits))
512 }) 517 })
513 } 518 }
diff --git a/crates/ra_ide/src/ssr.rs b/crates/ra_ide/src/ssr.rs
index b3e9e5dfe..95d8f79b8 100644
--- a/crates/ra_ide/src/ssr.rs
+++ b/crates/ra_ide/src/ssr.rs
@@ -1,5 +1,5 @@
1use ra_db::SourceDatabaseExt; 1use ra_db::FilePosition;
2use ra_ide_db::{symbol_index::SymbolsDatabase, RootDatabase}; 2use ra_ide_db::RootDatabase;
3 3
4use crate::SourceFileEdit; 4use crate::SourceFileEdit;
5use ra_ssr::{MatchFinder, SsrError, SsrRule}; 5use ra_ssr::{MatchFinder, SsrError, SsrRule};
@@ -11,6 +11,19 @@ use ra_ssr::{MatchFinder, SsrError, SsrRule};
11// A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement. 11// A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement.
12// Within a macro call, a placeholder will match up until whatever token follows the placeholder. 12// Within a macro call, a placeholder will match up until whatever token follows the placeholder.
13// 13//
14// All paths in both the search pattern and the replacement template must resolve in the context
15// in which this command is invoked. Paths in the search pattern will then match the code if they
16// resolve to the same item, even if they're written differently. For example if we invoke the
17// command in the module `foo` with a pattern of `Bar`, then code in the parent module that refers
18// to `foo::Bar` will match.
19//
20// Paths in the replacement template will be rendered appropriately for the context in which the
21// replacement occurs. For example if our replacement template is `foo::Bar` and we match some
22// code in the `foo` module, we'll insert just `Bar`.
23//
24// Method calls should generally be written in UFCS form. e.g. `foo::Bar::baz($s, $a)` will match
25// `$s.baz($a)`, provided the method call `baz` resolves to the method `foo::Bar::baz`.
26//
14// Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`. 27// Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`.
15// 28//
16// Supported constraints: 29// Supported constraints:
@@ -43,21 +56,13 @@ pub fn parse_search_replace(
43 rule: &str, 56 rule: &str,
44 parse_only: bool, 57 parse_only: bool,
45 db: &RootDatabase, 58 db: &RootDatabase,
59 position: FilePosition,
46) -> Result<Vec<SourceFileEdit>, SsrError> { 60) -> Result<Vec<SourceFileEdit>, SsrError> {
47 let mut edits = vec![];
48 let rule: SsrRule = rule.parse()?; 61 let rule: SsrRule = rule.parse()?;
62 let mut match_finder = MatchFinder::in_context(db, position);
63 match_finder.add_rule(rule)?;
49 if parse_only { 64 if parse_only {
50 return Ok(edits); 65 return Ok(Vec::new());
51 }
52 let mut match_finder = MatchFinder::new(db);
53 match_finder.add_rule(rule);
54 for &root in db.local_roots().iter() {
55 let sr = db.source_root(root);
56 for file_id in sr.iter() {
57 if let Some(edit) = match_finder.edits_for_file(file_id) {
58 edits.push(SourceFileEdit { file_id, edit });
59 }
60 }
61 } 66 }
62 Ok(edits) 67 Ok(match_finder.edits())
63} 68}
diff --git a/crates/ra_ide_db/src/defs.rs b/crates/ra_ide_db/src/defs.rs
index e06b189a0..f391a8e43 100644
--- a/crates/ra_ide_db/src/defs.rs
+++ b/crates/ra_ide_db/src/defs.rs
@@ -290,20 +290,25 @@ pub fn classify_name_ref(
290 290
291 let path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?; 291 let path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?;
292 let resolved = sema.resolve_path(&path)?; 292 let resolved = sema.resolve_path(&path)?;
293 let res = match resolved { 293 Some(NameRefClass::Definition(resolved.into()))
294 PathResolution::Def(def) => Definition::ModuleDef(def), 294}
295 PathResolution::AssocItem(item) => { 295
296 let def = match item { 296impl From<PathResolution> for Definition {
297 hir::AssocItem::Function(it) => it.into(), 297 fn from(path_resolution: PathResolution) -> Self {
298 hir::AssocItem::Const(it) => it.into(), 298 match path_resolution {
299 hir::AssocItem::TypeAlias(it) => it.into(), 299 PathResolution::Def(def) => Definition::ModuleDef(def),
300 }; 300 PathResolution::AssocItem(item) => {
301 Definition::ModuleDef(def) 301 let def = match item {
302 hir::AssocItem::Function(it) => it.into(),
303 hir::AssocItem::Const(it) => it.into(),
304 hir::AssocItem::TypeAlias(it) => it.into(),
305 };
306 Definition::ModuleDef(def)
307 }
308 PathResolution::Local(local) => Definition::Local(local),
309 PathResolution::TypeParam(par) => Definition::TypeParam(par),
310 PathResolution::Macro(def) => Definition::Macro(def),
311 PathResolution::SelfType(impl_def) => Definition::SelfType(impl_def),
302 } 312 }
303 PathResolution::Local(local) => Definition::Local(local), 313 }
304 PathResolution::TypeParam(par) => Definition::TypeParam(par),
305 PathResolution::Macro(def) => Definition::Macro(def),
306 PathResolution::SelfType(impl_def) => Definition::SelfType(impl_def),
307 };
308 Some(NameRefClass::Definition(res))
309} 314}
diff --git a/crates/ra_ide_db/src/search.rs b/crates/ra_ide_db/src/search.rs
index 81553150b..a7cae37b0 100644
--- a/crates/ra_ide_db/src/search.rs
+++ b/crates/ra_ide_db/src/search.rs
@@ -60,6 +60,10 @@ impl SearchScope {
60 SearchScope::new(std::iter::once((file, None)).collect()) 60 SearchScope::new(std::iter::once((file, None)).collect())
61 } 61 }
62 62
63 pub fn files(files: &[FileId]) -> SearchScope {
64 SearchScope::new(files.iter().map(|f| (*f, None)).collect())
65 }
66
63 pub fn intersection(&self, other: &SearchScope) -> SearchScope { 67 pub fn intersection(&self, other: &SearchScope) -> SearchScope {
64 let (mut small, mut large) = (&self.entries, &other.entries); 68 let (mut small, mut large) = (&self.entries, &other.entries);
65 if small.len() > large.len() { 69 if small.len() > large.len() {
diff --git a/crates/ra_ssr/Cargo.toml b/crates/ra_ssr/Cargo.toml
index fe098aaee..84e4b171e 100644
--- a/crates/ra_ssr/Cargo.toml
+++ b/crates/ra_ssr/Cargo.toml
@@ -18,3 +18,6 @@ ra_ide_db = { path = "../ra_ide_db" }
18hir = { path = "../ra_hir", package = "ra_hir" } 18hir = { path = "../ra_hir", package = "ra_hir" }
19rustc-hash = "1.1.0" 19rustc-hash = "1.1.0"
20test_utils = { path = "../test_utils" } 20test_utils = { path = "../test_utils" }
21
22[dev-dependencies]
23expect = { path = "../expect" }
diff --git a/crates/ra_ssr/src/lib.rs b/crates/ra_ssr/src/lib.rs
index cca4576ce..2fb326b45 100644
--- a/crates/ra_ssr/src/lib.rs
+++ b/crates/ra_ssr/src/lib.rs
@@ -4,44 +4,41 @@
4//! based on a template. 4//! based on a template.
5 5
6mod matching; 6mod matching;
7mod nester;
7mod parsing; 8mod parsing;
8mod replacing; 9mod replacing;
10mod resolving;
11mod search;
9#[macro_use] 12#[macro_use]
10mod errors; 13mod errors;
11#[cfg(test)] 14#[cfg(test)]
12mod tests; 15mod tests;
13 16
17use crate::errors::bail;
14pub use crate::errors::SsrError; 18pub use crate::errors::SsrError;
15pub use crate::matching::Match; 19pub use crate::matching::Match;
16use crate::matching::{record_match_fails_reasons_scope, MatchFailureReason}; 20use crate::matching::MatchFailureReason;
17use hir::Semantics; 21use hir::Semantics;
18use ra_db::{FileId, FileRange}; 22use ra_db::{FileId, FilePosition, FileRange};
19use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind, SyntaxNode, TextRange}; 23use ra_ide_db::source_change::SourceFileEdit;
20use ra_text_edit::TextEdit; 24use ra_syntax::{ast, AstNode, SyntaxNode, TextRange};
25use resolving::ResolvedRule;
21use rustc_hash::FxHashMap; 26use rustc_hash::FxHashMap;
22 27
23// A structured search replace rule. Create by calling `parse` on a str. 28// A structured search replace rule. Create by calling `parse` on a str.
24#[derive(Debug)] 29#[derive(Debug)]
25pub struct SsrRule { 30pub struct SsrRule {
26 /// A structured pattern that we're searching for. 31 /// A structured pattern that we're searching for.
27 pattern: SsrPattern, 32 pattern: parsing::RawPattern,
28 /// What we'll replace it with. 33 /// What we'll replace it with.
29 template: parsing::SsrTemplate, 34 template: parsing::RawPattern,
35 parsed_rules: Vec<parsing::ParsedRule>,
30} 36}
31 37
32#[derive(Debug)] 38#[derive(Debug)]
33pub struct SsrPattern { 39pub struct SsrPattern {
34 raw: parsing::RawSearchPattern, 40 raw: parsing::RawPattern,
35 /// Placeholders keyed by the stand-in ident that we use in Rust source code. 41 parsed_rules: Vec<parsing::ParsedRule>,
36 placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
37 // We store our search pattern, parsed as each different kind of thing we can look for. As we
38 // traverse the AST, we get the appropriate one of these for the type of node we're on. For many
39 // search patterns, only some of these will be present.
40 expr: Option<SyntaxNode>,
41 type_ref: Option<SyntaxNode>,
42 item: Option<SyntaxNode>,
43 path: Option<SyntaxNode>,
44 pattern: Option<SyntaxNode>,
45} 42}
46 43
47#[derive(Debug, Default)] 44#[derive(Debug, Default)]
@@ -53,40 +50,112 @@ pub struct SsrMatches {
53pub struct MatchFinder<'db> { 50pub struct MatchFinder<'db> {
54 /// Our source of information about the user's code. 51 /// Our source of information about the user's code.
55 sema: Semantics<'db, ra_ide_db::RootDatabase>, 52 sema: Semantics<'db, ra_ide_db::RootDatabase>,
56 rules: Vec<SsrRule>, 53 rules: Vec<ResolvedRule>,
54 scope: hir::SemanticsScope<'db>,
55 hygiene: hir::Hygiene,
57} 56}
58 57
59impl<'db> MatchFinder<'db> { 58impl<'db> MatchFinder<'db> {
60 pub fn new(db: &'db ra_ide_db::RootDatabase) -> MatchFinder<'db> { 59 /// Constructs a new instance where names will be looked up as if they appeared at
61 MatchFinder { sema: Semantics::new(db), rules: Vec::new() } 60 /// `lookup_context`.
61 pub fn in_context(
62 db: &'db ra_ide_db::RootDatabase,
63 lookup_context: FilePosition,
64 ) -> MatchFinder<'db> {
65 let sema = Semantics::new(db);
66 let file = sema.parse(lookup_context.file_id);
67 // Find a node at the requested position, falling back to the whole file.
68 let node = file
69 .syntax()
70 .token_at_offset(lookup_context.offset)
71 .left_biased()
72 .map(|token| token.parent())
73 .unwrap_or_else(|| file.syntax().clone());
74 let scope = sema.scope(&node);
75 MatchFinder {
76 sema: Semantics::new(db),
77 rules: Vec::new(),
78 scope,
79 hygiene: hir::Hygiene::new(db, lookup_context.file_id.into()),
80 }
62 } 81 }
63 82
64 pub fn add_rule(&mut self, rule: SsrRule) { 83 /// Constructs an instance using the start of the first file in `db` as the lookup context.
65 self.rules.push(rule); 84 pub fn at_first_file(db: &'db ra_ide_db::RootDatabase) -> Result<MatchFinder<'db>, SsrError> {
85 use ra_db::SourceDatabaseExt;
86 use ra_ide_db::symbol_index::SymbolsDatabase;
87 if let Some(first_file_id) = db
88 .local_roots()
89 .iter()
90 .next()
91 .and_then(|root| db.source_root(root.clone()).iter().next())
92 {
93 Ok(MatchFinder::in_context(
94 db,
95 FilePosition { file_id: first_file_id, offset: 0.into() },
96 ))
97 } else {
98 bail!("No files to search");
99 }
66 } 100 }
67 101
68 /// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you 102 /// Adds a rule to be applied. The order in which rules are added matters. Earlier rules take
69 /// intend to do replacement, use `add_rule` instead. 103 /// precedence. If a node is matched by an earlier rule, then later rules won't be permitted to
70 pub fn add_search_pattern(&mut self, pattern: SsrPattern) { 104 /// match to it.
71 self.add_rule(SsrRule { pattern, template: "()".parse().unwrap() }) 105 pub fn add_rule(&mut self, rule: SsrRule) -> Result<(), SsrError> {
106 for parsed_rule in rule.parsed_rules {
107 self.rules.push(ResolvedRule::new(
108 parsed_rule,
109 &self.scope,
110 &self.hygiene,
111 self.rules.len(),
112 )?);
113 }
114 Ok(())
72 } 115 }
73 116
74 pub fn edits_for_file(&self, file_id: FileId) -> Option<TextEdit> { 117 /// Finds matches for all added rules and returns edits for all found matches.
75 let matches = self.find_matches_in_file(file_id); 118 pub fn edits(&self) -> Vec<SourceFileEdit> {
76 if matches.matches.is_empty() { 119 use ra_db::SourceDatabaseExt;
77 None 120 let mut matches_by_file = FxHashMap::default();
78 } else { 121 for m in self.matches().matches {
79 use ra_db::SourceDatabaseExt; 122 matches_by_file
80 Some(replacing::matches_to_edit(&matches, &self.sema.db.file_text(file_id))) 123 .entry(m.range.file_id)
124 .or_insert_with(|| SsrMatches::default())
125 .matches
126 .push(m);
127 }
128 let mut edits = vec![];
129 for (file_id, matches) in matches_by_file {
130 let edit =
131 replacing::matches_to_edit(&matches, &self.sema.db.file_text(file_id), &self.rules);
132 edits.push(SourceFileEdit { file_id, edit });
81 } 133 }
134 edits
82 } 135 }
83 136
84 pub fn find_matches_in_file(&self, file_id: FileId) -> SsrMatches { 137 /// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you
85 let file = self.sema.parse(file_id); 138 /// intend to do replacement, use `add_rule` instead.
86 let code = file.syntax(); 139 pub fn add_search_pattern(&mut self, pattern: SsrPattern) -> Result<(), SsrError> {
87 let mut matches = SsrMatches::default(); 140 for parsed_rule in pattern.parsed_rules {
88 self.find_matches(code, &None, &mut matches); 141 self.rules.push(ResolvedRule::new(
89 matches 142 parsed_rule,
143 &self.scope,
144 &self.hygiene,
145 self.rules.len(),
146 )?);
147 }
148 Ok(())
149 }
150
151 /// Returns matches for all added rules.
152 pub fn matches(&self) -> SsrMatches {
153 let mut matches = Vec::new();
154 let mut usage_cache = search::UsageCache::default();
155 for rule in &self.rules {
156 self.find_matches_for_rule(rule, &mut usage_cache, &mut matches);
157 }
158 nester::nest_and_remove_collisions(matches, &self.sema)
90 } 159 }
91 160
92 /// Finds all nodes in `file_id` whose text is exactly equal to `snippet` and attempts to match 161 /// Finds all nodes in `file_id` whose text is exactly equal to `snippet` and attempts to match
@@ -115,53 +184,6 @@ impl<'db> MatchFinder<'db> {
115 res 184 res
116 } 185 }
117 186
118 fn find_matches(
119 &self,
120 code: &SyntaxNode,
121 restrict_range: &Option<FileRange>,
122 matches_out: &mut SsrMatches,
123 ) {
124 for rule in &self.rules {
125 if let Ok(mut m) = matching::get_match(false, rule, &code, restrict_range, &self.sema) {
126 // Continue searching in each of our placeholders.
127 for placeholder_value in m.placeholder_values.values_mut() {
128 if let Some(placeholder_node) = &placeholder_value.node {
129 // Don't search our placeholder if it's the entire matched node, otherwise we'd
130 // find the same match over and over until we got a stack overflow.
131 if placeholder_node != code {
132 self.find_matches(
133 placeholder_node,
134 restrict_range,
135 &mut placeholder_value.inner_matches,
136 );
137 }
138 }
139 }
140 matches_out.matches.push(m);
141 return;
142 }
143 }
144 // If we've got a macro call, we already tried matching it pre-expansion, which is the only
145 // way to match the whole macro, now try expanding it and matching the expansion.
146 if let Some(macro_call) = ast::MacroCall::cast(code.clone()) {
147 if let Some(expanded) = self.sema.expand(&macro_call) {
148 if let Some(tt) = macro_call.token_tree() {
149 // When matching within a macro expansion, we only want to allow matches of
150 // nodes that originated entirely from within the token tree of the macro call.
151 // i.e. we don't want to match something that came from the macro itself.
152 self.find_matches(
153 &expanded,
154 &Some(self.sema.original_range(tt.syntax())),
155 matches_out,
156 );
157 }
158 }
159 }
160 for child in code.children() {
161 self.find_matches(&child, restrict_range, matches_out);
162 }
163 }
164
165 fn output_debug_for_nodes_at_range( 187 fn output_debug_for_nodes_at_range(
166 &self, 188 &self,
167 node: &SyntaxNode, 189 node: &SyntaxNode,
@@ -177,8 +199,17 @@ impl<'db> MatchFinder<'db> {
177 } 199 }
178 if node_range.range == range.range { 200 if node_range.range == range.range {
179 for rule in &self.rules { 201 for rule in &self.rules {
180 let pattern = 202 // For now we ignore rules that have a different kind than our node, otherwise
181 rule.pattern.tree_for_kind_with_reason(node.kind()).map(|p| p.clone()); 203 // we get lots of noise. If at some point we add support for restricting rules
204 // to a particular kind of thing (e.g. only match type references), then we can
205 // relax this. We special-case expressions, since function calls can match
206 // method calls.
207 if rule.pattern.node.kind() != node.kind()
208 && !(ast::Expr::can_cast(rule.pattern.node.kind())
209 && ast::Expr::can_cast(node.kind()))
210 {
211 continue;
212 }
182 out.push(MatchDebugInfo { 213 out.push(MatchDebugInfo {
183 matched: matching::get_match(true, rule, &node, restrict_range, &self.sema) 214 matched: matching::get_match(true, rule, &node, restrict_range, &self.sema)
184 .map_err(|e| MatchFailureReason { 215 .map_err(|e| MatchFailureReason {
@@ -186,7 +217,7 @@ impl<'db> MatchFinder<'db> {
186 "Match failed, but no reason was given".to_owned() 217 "Match failed, but no reason was given".to_owned()
187 }), 218 }),
188 }), 219 }),
189 pattern, 220 pattern: rule.pattern.node.clone(),
190 node: node.clone(), 221 node: node.clone(),
191 }); 222 });
192 } 223 }
@@ -209,9 +240,8 @@ impl<'db> MatchFinder<'db> {
209 240
210pub struct MatchDebugInfo { 241pub struct MatchDebugInfo {
211 node: SyntaxNode, 242 node: SyntaxNode,
212 /// Our search pattern parsed as the same kind of syntax node as `node`. e.g. expression, item, 243 /// Our search pattern parsed as an expression or item, etc
213 /// etc. Will be absent if the pattern can't be parsed as that kind. 244 pattern: SyntaxNode,
214 pattern: Result<SyntaxNode, MatchFailureReason>,
215 matched: Result<Match, MatchFailureReason>, 245 matched: Result<Match, MatchFailureReason>,
216} 246}
217 247
@@ -228,29 +258,12 @@ impl std::fmt::Debug for MatchDebugInfo {
228 self.node 258 self.node
229 )?; 259 )?;
230 writeln!(f, "========= PATTERN ==========")?; 260 writeln!(f, "========= PATTERN ==========")?;
231 match &self.pattern { 261 writeln!(f, "{:#?}", self.pattern)?;
232 Ok(pattern) => {
233 writeln!(f, "{:#?}", pattern)?;
234 }
235 Err(err) => {
236 writeln!(f, "{}", err.reason)?;
237 }
238 }
239 writeln!(f, "============================")?; 262 writeln!(f, "============================")?;
240 Ok(()) 263 Ok(())
241 } 264 }
242} 265}
243 266
244impl SsrPattern {
245 fn tree_for_kind_with_reason(
246 &self,
247 kind: SyntaxKind,
248 ) -> Result<&SyntaxNode, MatchFailureReason> {
249 record_match_fails_reasons_scope(true, || self.tree_for_kind(kind))
250 .map_err(|e| MatchFailureReason { reason: e.reason.unwrap() })
251 }
252}
253
254impl SsrMatches { 267impl SsrMatches {
255 /// Returns `self` with any nested matches removed and made into top-level matches. 268 /// Returns `self` with any nested matches removed and made into top-level matches.
256 pub fn flattened(self) -> SsrMatches { 269 pub fn flattened(self) -> SsrMatches {
diff --git a/crates/ra_ssr/src/matching.rs b/crates/ra_ssr/src/matching.rs
index 50b29eab2..4862622bd 100644
--- a/crates/ra_ssr/src/matching.rs
+++ b/crates/ra_ssr/src/matching.rs
@@ -2,8 +2,9 @@
2//! process of matching, placeholder values are recorded. 2//! process of matching, placeholder values are recorded.
3 3
4use crate::{ 4use crate::{
5 parsing::{Constraint, NodeKind, Placeholder, SsrTemplate}, 5 parsing::{Constraint, NodeKind, Placeholder},
6 SsrMatches, SsrPattern, SsrRule, 6 resolving::{ResolvedPattern, ResolvedRule},
7 SsrMatches,
7}; 8};
8use hir::Semantics; 9use hir::Semantics;
9use ra_db::FileRange; 10use ra_db::FileRange;
@@ -48,9 +49,11 @@ pub struct Match {
48 pub(crate) matched_node: SyntaxNode, 49 pub(crate) matched_node: SyntaxNode,
49 pub(crate) placeholder_values: FxHashMap<Var, PlaceholderMatch>, 50 pub(crate) placeholder_values: FxHashMap<Var, PlaceholderMatch>,
50 pub(crate) ignored_comments: Vec<ast::Comment>, 51 pub(crate) ignored_comments: Vec<ast::Comment>,
51 // A copy of the template for the rule that produced this match. We store this on the match for 52 pub(crate) rule_index: usize,
52 // if/when we do replacement. 53 /// The depth of matched_node.
53 pub(crate) template: SsrTemplate, 54 pub(crate) depth: usize,
55 // Each path in the template rendered for the module in which the match was found.
56 pub(crate) rendered_template_paths: FxHashMap<SyntaxNode, hir::ModPath>,
54} 57}
55 58
56/// Represents a `$var` in an SSR query. 59/// Represents a `$var` in an SSR query.
@@ -86,7 +89,7 @@ pub(crate) struct MatchFailed {
86/// parent module, we don't populate nested matches. 89/// parent module, we don't populate nested matches.
87pub(crate) fn get_match( 90pub(crate) fn get_match(
88 debug_active: bool, 91 debug_active: bool,
89 rule: &SsrRule, 92 rule: &ResolvedRule,
90 code: &SyntaxNode, 93 code: &SyntaxNode,
91 restrict_range: &Option<FileRange>, 94 restrict_range: &Option<FileRange>,
92 sema: &Semantics<ra_ide_db::RootDatabase>, 95 sema: &Semantics<ra_ide_db::RootDatabase>,
@@ -102,7 +105,7 @@ struct Matcher<'db, 'sema> {
102 /// If any placeholders come from anywhere outside of this range, then the match will be 105 /// If any placeholders come from anywhere outside of this range, then the match will be
103 /// rejected. 106 /// rejected.
104 restrict_range: Option<FileRange>, 107 restrict_range: Option<FileRange>,
105 rule: &'sema SsrRule, 108 rule: &'sema ResolvedRule,
106} 109}
107 110
108/// Which phase of matching we're currently performing. We do two phases because most attempted 111/// Which phase of matching we're currently performing. We do two phases because most attempted
@@ -117,26 +120,35 @@ enum Phase<'a> {
117 120
118impl<'db, 'sema> Matcher<'db, 'sema> { 121impl<'db, 'sema> Matcher<'db, 'sema> {
119 fn try_match( 122 fn try_match(
120 rule: &'sema SsrRule, 123 rule: &ResolvedRule,
121 code: &SyntaxNode, 124 code: &SyntaxNode,
122 restrict_range: &Option<FileRange>, 125 restrict_range: &Option<FileRange>,
123 sema: &'sema Semantics<'db, ra_ide_db::RootDatabase>, 126 sema: &'sema Semantics<'db, ra_ide_db::RootDatabase>,
124 ) -> Result<Match, MatchFailed> { 127 ) -> Result<Match, MatchFailed> {
125 let match_state = Matcher { sema, restrict_range: restrict_range.clone(), rule }; 128 let match_state = Matcher { sema, restrict_range: restrict_range.clone(), rule };
126 let pattern_tree = rule.pattern.tree_for_kind(code.kind())?;
127 // First pass at matching, where we check that node types and idents match. 129 // First pass at matching, where we check that node types and idents match.
128 match_state.attempt_match_node(&mut Phase::First, &pattern_tree, code)?; 130 match_state.attempt_match_node(&mut Phase::First, &rule.pattern.node, code)?;
129 match_state.validate_range(&sema.original_range(code))?; 131 match_state.validate_range(&sema.original_range(code))?;
130 let mut the_match = Match { 132 let mut the_match = Match {
131 range: sema.original_range(code), 133 range: sema.original_range(code),
132 matched_node: code.clone(), 134 matched_node: code.clone(),
133 placeholder_values: FxHashMap::default(), 135 placeholder_values: FxHashMap::default(),
134 ignored_comments: Vec::new(), 136 ignored_comments: Vec::new(),
135 template: rule.template.clone(), 137 rule_index: rule.index,
138 depth: 0,
139 rendered_template_paths: FxHashMap::default(),
136 }; 140 };
137 // Second matching pass, where we record placeholder matches, ignored comments and maybe do 141 // Second matching pass, where we record placeholder matches, ignored comments and maybe do
138 // any other more expensive checks that we didn't want to do on the first pass. 142 // any other more expensive checks that we didn't want to do on the first pass.
139 match_state.attempt_match_node(&mut Phase::Second(&mut the_match), &pattern_tree, code)?; 143 match_state.attempt_match_node(
144 &mut Phase::Second(&mut the_match),
145 &rule.pattern.node,
146 code,
147 )?;
148 the_match.depth = sema.ancestors_with_macros(the_match.matched_node.clone()).count();
149 if let Some(template) = &rule.template {
150 the_match.render_template_paths(template, sema)?;
151 }
140 Ok(the_match) 152 Ok(the_match)
141 } 153 }
142 154
@@ -177,10 +189,17 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
177 } 189 }
178 return Ok(()); 190 return Ok(());
179 } 191 }
180 // Non-placeholders. 192 // We allow a UFCS call to match a method call, provided they resolve to the same function.
193 if let Some(pattern_function) = self.rule.pattern.ufcs_function_calls.get(pattern) {
194 if let (Some(pattern), Some(code)) =
195 (ast::CallExpr::cast(pattern.clone()), ast::MethodCallExpr::cast(code.clone()))
196 {
197 return self.attempt_match_ufcs(phase, &pattern, &code, *pattern_function);
198 }
199 }
181 if pattern.kind() != code.kind() { 200 if pattern.kind() != code.kind() {
182 fail_match!( 201 fail_match!(
183 "Pattern had a `{}` ({:?}), code had `{}` ({:?})", 202 "Pattern had `{}` ({:?}), code had `{}` ({:?})",
184 pattern.text(), 203 pattern.text(),
185 pattern.kind(), 204 pattern.kind(),
186 code.text(), 205 code.text(),
@@ -194,6 +213,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
194 self.attempt_match_record_field_list(phase, pattern, code) 213 self.attempt_match_record_field_list(phase, pattern, code)
195 } 214 }
196 SyntaxKind::TOKEN_TREE => self.attempt_match_token_tree(phase, pattern, code), 215 SyntaxKind::TOKEN_TREE => self.attempt_match_token_tree(phase, pattern, code),
216 SyntaxKind::PATH => self.attempt_match_path(phase, pattern, code),
197 _ => self.attempt_match_node_children(phase, pattern, code), 217 _ => self.attempt_match_node_children(phase, pattern, code),
198 } 218 }
199 } 219 }
@@ -310,6 +330,64 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
310 Ok(()) 330 Ok(())
311 } 331 }
312 332
333 /// Paths are matched based on whether they refer to the same thing, even if they're written
334 /// differently.
335 fn attempt_match_path(
336 &self,
337 phase: &mut Phase,
338 pattern: &SyntaxNode,
339 code: &SyntaxNode,
340 ) -> Result<(), MatchFailed> {
341 if let Some(pattern_resolved) = self.rule.pattern.resolved_paths.get(pattern) {
342 let pattern_path = ast::Path::cast(pattern.clone()).unwrap();
343 let code_path = ast::Path::cast(code.clone()).unwrap();
344 if let (Some(pattern_segment), Some(code_segment)) =
345 (pattern_path.segment(), code_path.segment())
346 {
347 // Match everything within the segment except for the name-ref, which is handled
348 // separately via comparing what the path resolves to below.
349 self.attempt_match_opt(
350 phase,
351 pattern_segment.type_arg_list(),
352 code_segment.type_arg_list(),
353 )?;
354 self.attempt_match_opt(
355 phase,
356 pattern_segment.param_list(),
357 code_segment.param_list(),
358 )?;
359 }
360 if matches!(phase, Phase::Second(_)) {
361 let resolution = self
362 .sema
363 .resolve_path(&code_path)
364 .ok_or_else(|| match_error!("Failed to resolve path `{}`", code.text()))?;
365 if pattern_resolved.resolution != resolution {
366 fail_match!("Pattern had path `{}` code had `{}`", pattern.text(), code.text());
367 }
368 }
369 } else {
370 return self.attempt_match_node_children(phase, pattern, code);
371 }
372 Ok(())
373 }
374
375 fn attempt_match_opt<T: AstNode>(
376 &self,
377 phase: &mut Phase,
378 pattern: Option<T>,
379 code: Option<T>,
380 ) -> Result<(), MatchFailed> {
381 match (pattern, code) {
382 (Some(p), Some(c)) => self.attempt_match_node(phase, &p.syntax(), &c.syntax()),
383 (None, None) => Ok(()),
384 (Some(p), None) => fail_match!("Pattern `{}` had nothing to match", p.syntax().text()),
385 (None, Some(c)) => {
386 fail_match!("Nothing in pattern to match code `{}`", c.syntax().text())
387 }
388 }
389 }
390
313 /// We want to allow the records to match in any order, so we have special matching logic for 391 /// We want to allow the records to match in any order, so we have special matching logic for
314 /// them. 392 /// them.
315 fn attempt_match_record_field_list( 393 fn attempt_match_record_field_list(
@@ -443,9 +521,61 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
443 Ok(()) 521 Ok(())
444 } 522 }
445 523
524 fn attempt_match_ufcs(
525 &self,
526 phase: &mut Phase,
527 pattern: &ast::CallExpr,
528 code: &ast::MethodCallExpr,
529 pattern_function: hir::Function,
530 ) -> Result<(), MatchFailed> {
531 use ast::ArgListOwner;
532 let code_resolved_function = self
533 .sema
534 .resolve_method_call(code)
535 .ok_or_else(|| match_error!("Failed to resolve method call"))?;
536 if pattern_function != code_resolved_function {
537 fail_match!("Method call resolved to a different function");
538 }
539 // Check arguments.
540 let mut pattern_args = pattern
541 .arg_list()
542 .ok_or_else(|| match_error!("Pattern function call has no args"))?
543 .args();
544 self.attempt_match_opt(phase, pattern_args.next(), code.expr())?;
545 let mut code_args =
546 code.arg_list().ok_or_else(|| match_error!("Code method call has no args"))?.args();
547 loop {
548 match (pattern_args.next(), code_args.next()) {
549 (None, None) => return Ok(()),
550 (p, c) => self.attempt_match_opt(phase, p, c)?,
551 }
552 }
553 }
554
446 fn get_placeholder(&self, element: &SyntaxElement) -> Option<&Placeholder> { 555 fn get_placeholder(&self, element: &SyntaxElement) -> Option<&Placeholder> {
447 only_ident(element.clone()) 556 only_ident(element.clone()).and_then(|ident| self.rule.get_placeholder(&ident))
448 .and_then(|ident| self.rule.pattern.placeholders_by_stand_in.get(ident.text())) 557 }
558}
559
560impl Match {
561 fn render_template_paths(
562 &mut self,
563 template: &ResolvedPattern,
564 sema: &Semantics<ra_ide_db::RootDatabase>,
565 ) -> Result<(), MatchFailed> {
566 let module = sema
567 .scope(&self.matched_node)
568 .module()
569 .ok_or_else(|| match_error!("Matched node isn't in a module"))?;
570 for (path, resolved_path) in &template.resolved_paths {
571 if let hir::PathResolution::Def(module_def) = resolved_path.resolution {
572 let mod_path = module.find_use_path(sema.db, module_def).ok_or_else(|| {
573 match_error!("Failed to render template path `{}` at match location")
574 })?;
575 self.rendered_template_paths.insert(path.clone(), mod_path);
576 }
577 }
578 Ok(())
449 } 579 }
450} 580}
451 581
@@ -510,28 +640,6 @@ impl PlaceholderMatch {
510 } 640 }
511} 641}
512 642
513impl SsrPattern {
514 pub(crate) fn tree_for_kind(&self, kind: SyntaxKind) -> Result<&SyntaxNode, MatchFailed> {
515 let (tree, kind_name) = if ast::Expr::can_cast(kind) {
516 (&self.expr, "expression")
517 } else if ast::TypeRef::can_cast(kind) {
518 (&self.type_ref, "type reference")
519 } else if ast::ModuleItem::can_cast(kind) {
520 (&self.item, "item")
521 } else if ast::Path::can_cast(kind) {
522 (&self.path, "path")
523 } else if ast::Pat::can_cast(kind) {
524 (&self.pattern, "pattern")
525 } else {
526 fail_match!("Matching nodes of kind {:?} is not supported", kind);
527 };
528 match tree {
529 Some(tree) => Ok(tree),
530 None => fail_match!("Pattern cannot be parsed as a {}", kind_name),
531 }
532 }
533}
534
535impl NodeKind { 643impl NodeKind {
536 fn matches(&self, node: &SyntaxNode) -> Result<(), MatchFailed> { 644 fn matches(&self, node: &SyntaxNode) -> Result<(), MatchFailed> {
537 let ok = match self { 645 let ok = match self {
@@ -596,13 +704,12 @@ mod tests {
596 #[test] 704 #[test]
597 fn parse_match_replace() { 705 fn parse_match_replace() {
598 let rule: SsrRule = "foo($x) ==>> bar($x)".parse().unwrap(); 706 let rule: SsrRule = "foo($x) ==>> bar($x)".parse().unwrap();
599 let input = "fn foo() {} fn main() { foo(1+2); }"; 707 let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }";
600 708
601 use ra_db::fixture::WithFixture; 709 let (db, position) = crate::tests::single_file(input);
602 let (db, file_id) = ra_ide_db::RootDatabase::with_single_file(input); 710 let mut match_finder = MatchFinder::in_context(&db, position);
603 let mut match_finder = MatchFinder::new(&db); 711 match_finder.add_rule(rule).unwrap();
604 match_finder.add_rule(rule); 712 let matches = match_finder.matches();
605 let matches = match_finder.find_matches_in_file(file_id);
606 assert_eq!(matches.matches.len(), 1); 713 assert_eq!(matches.matches.len(), 1);
607 assert_eq!(matches.matches[0].matched_node.text(), "foo(1+2)"); 714 assert_eq!(matches.matches[0].matched_node.text(), "foo(1+2)");
608 assert_eq!(matches.matches[0].placeholder_values.len(), 1); 715 assert_eq!(matches.matches[0].placeholder_values.len(), 1);
@@ -615,9 +722,11 @@ mod tests {
615 "1+2" 722 "1+2"
616 ); 723 );
617 724
618 let edit = crate::replacing::matches_to_edit(&matches, input); 725 let edits = match_finder.edits();
726 assert_eq!(edits.len(), 1);
727 let edit = &edits[0];
619 let mut after = input.to_string(); 728 let mut after = input.to_string();
620 edit.apply(&mut after); 729 edit.edit.apply(&mut after);
621 assert_eq!(after, "fn foo() {} fn main() { bar(1+2); }"); 730 assert_eq!(after, "fn foo() {} fn bar() {} fn main() { bar(1+2); }");
622 } 731 }
623} 732}
diff --git a/crates/ra_ssr/src/nester.rs b/crates/ra_ssr/src/nester.rs
new file mode 100644
index 000000000..b3e20579b
--- /dev/null
+++ b/crates/ra_ssr/src/nester.rs
@@ -0,0 +1,98 @@
1//! Converts a flat collection of matches into a nested form suitable for replacement. When there
2//! are multiple matches for a node, or that overlap, priority is given to the earlier rule. Nested
3//! matches are only permitted if the inner match is contained entirely within a placeholder of an
4//! outer match.
5//!
6//! For example, if our search pattern is `foo(foo($a))` and the code had `foo(foo(foo(foo(42))))`,
7//! then we'll get 3 matches, however only the outermost and innermost matches can be accepted. The
8//! middle match would take the second `foo` from the outer match.
9
10use crate::{Match, SsrMatches};
11use ra_syntax::SyntaxNode;
12use rustc_hash::FxHashMap;
13
14pub(crate) fn nest_and_remove_collisions(
15 mut matches: Vec<Match>,
16 sema: &hir::Semantics<ra_ide_db::RootDatabase>,
17) -> SsrMatches {
18 // We sort the matches by depth then by rule index. Sorting by depth means that by the time we
19 // see a match, any parent matches or conflicting matches will have already been seen. Sorting
20 // by rule_index means that if there are two matches for the same node, the rule added first
21 // will take precedence.
22 matches.sort_by(|a, b| a.depth.cmp(&b.depth).then_with(|| a.rule_index.cmp(&b.rule_index)));
23 let mut collector = MatchCollector::default();
24 for m in matches {
25 collector.add_match(m, sema);
26 }
27 collector.into()
28}
29
30#[derive(Default)]
31struct MatchCollector {
32 matches_by_node: FxHashMap<SyntaxNode, Match>,
33}
34
35impl MatchCollector {
36 /// Attempts to add `m` to matches. If it conflicts with an existing match, it is discarded. If
37 /// it is entirely within the a placeholder of an existing match, then it is added as a child
38 /// match of the existing match.
39 fn add_match(&mut self, m: Match, sema: &hir::Semantics<ra_ide_db::RootDatabase>) {
40 let matched_node = m.matched_node.clone();
41 if let Some(existing) = self.matches_by_node.get_mut(&matched_node) {
42 try_add_sub_match(m, existing, sema);
43 return;
44 }
45 for ancestor in sema.ancestors_with_macros(m.matched_node.clone()) {
46 if let Some(existing) = self.matches_by_node.get_mut(&ancestor) {
47 try_add_sub_match(m, existing, sema);
48 return;
49 }
50 }
51 self.matches_by_node.insert(matched_node, m);
52 }
53}
54
55/// Attempts to add `m` as a sub-match of `existing`.
56fn try_add_sub_match(
57 m: Match,
58 existing: &mut Match,
59 sema: &hir::Semantics<ra_ide_db::RootDatabase>,
60) {
61 for p in existing.placeholder_values.values_mut() {
62 // Note, no need to check if p.range.file is equal to m.range.file, since we
63 // already know we're within `existing`.
64 if p.range.range.contains_range(m.range.range) {
65 // Convert the inner matches in `p` into a temporary MatchCollector. When
66 // we're done, we then convert it back into an SsrMatches. If we expected
67 // lots of inner matches, it might be worthwhile keeping a MatchCollector
68 // around for each placeholder match. However we expect most placeholder
69 // will have 0 and a few will have 1. More than that should hopefully be
70 // exceptional.
71 let mut collector = MatchCollector::default();
72 for m in std::mem::replace(&mut p.inner_matches.matches, Vec::new()) {
73 collector.matches_by_node.insert(m.matched_node.clone(), m);
74 }
75 collector.add_match(m, sema);
76 p.inner_matches = collector.into();
77 break;
78 }
79 }
80}
81
82impl From<MatchCollector> for SsrMatches {
83 fn from(mut match_collector: MatchCollector) -> Self {
84 let mut matches = SsrMatches::default();
85 for (_, m) in match_collector.matches_by_node.drain() {
86 matches.matches.push(m);
87 }
88 matches.matches.sort_by(|a, b| {
89 // Order matches by file_id then by start range. This should be sufficient since ranges
90 // shouldn't be overlapping.
91 a.range
92 .file_id
93 .cmp(&b.range.file_id)
94 .then_with(|| a.range.range.start().cmp(&b.range.range.start()))
95 });
96 matches
97 }
98}
diff --git a/crates/ra_ssr/src/parsing.rs b/crates/ra_ssr/src/parsing.rs
index 4aee97bb2..2d6f4e514 100644
--- a/crates/ra_ssr/src/parsing.rs
+++ b/crates/ra_ssr/src/parsing.rs
@@ -7,17 +7,19 @@
7 7
8use crate::errors::bail; 8use crate::errors::bail;
9use crate::{SsrError, SsrPattern, SsrRule}; 9use crate::{SsrError, SsrPattern, SsrRule};
10use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind, T}; 10use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind, SyntaxNode, T};
11use rustc_hash::{FxHashMap, FxHashSet}; 11use rustc_hash::{FxHashMap, FxHashSet};
12use std::str::FromStr; 12use std::str::FromStr;
13 13
14#[derive(Clone, Debug)] 14#[derive(Debug)]
15pub(crate) struct SsrTemplate { 15pub(crate) struct ParsedRule {
16 pub(crate) tokens: Vec<PatternElement>, 16 pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>,
17 pub(crate) pattern: SyntaxNode,
18 pub(crate) template: Option<SyntaxNode>,
17} 19}
18 20
19#[derive(Debug)] 21#[derive(Debug)]
20pub(crate) struct RawSearchPattern { 22pub(crate) struct RawPattern {
21 tokens: Vec<PatternElement>, 23 tokens: Vec<PatternElement>,
22} 24}
23 25
@@ -54,6 +56,60 @@ pub(crate) struct Token {
54 pub(crate) text: SmolStr, 56 pub(crate) text: SmolStr,
55} 57}
56 58
59impl ParsedRule {
60 fn new(
61 pattern: &RawPattern,
62 template: Option<&RawPattern>,
63 ) -> Result<Vec<ParsedRule>, SsrError> {
64 let raw_pattern = pattern.as_rust_code();
65 let raw_template = template.map(|t| t.as_rust_code());
66 let raw_template = raw_template.as_ref().map(|s| s.as_str());
67 let mut builder = RuleBuilder {
68 placeholders_by_stand_in: pattern.placeholders_by_stand_in(),
69 rules: Vec::new(),
70 };
71 builder.try_add(ast::Expr::parse(&raw_pattern), raw_template.map(ast::Expr::parse));
72 builder.try_add(ast::TypeRef::parse(&raw_pattern), raw_template.map(ast::TypeRef::parse));
73 builder.try_add(
74 ast::ModuleItem::parse(&raw_pattern),
75 raw_template.map(ast::ModuleItem::parse),
76 );
77 builder.try_add(ast::Path::parse(&raw_pattern), raw_template.map(ast::Path::parse));
78 builder.try_add(ast::Pat::parse(&raw_pattern), raw_template.map(ast::Pat::parse));
79 builder.build()
80 }
81}
82
83struct RuleBuilder {
84 placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>,
85 rules: Vec<ParsedRule>,
86}
87
88impl RuleBuilder {
89 fn try_add<T: AstNode>(&mut self, pattern: Result<T, ()>, template: Option<Result<T, ()>>) {
90 match (pattern, template) {
91 (Ok(pattern), Some(Ok(template))) => self.rules.push(ParsedRule {
92 placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
93 pattern: pattern.syntax().clone(),
94 template: Some(template.syntax().clone()),
95 }),
96 (Ok(pattern), None) => self.rules.push(ParsedRule {
97 placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
98 pattern: pattern.syntax().clone(),
99 template: None,
100 }),
101 _ => {}
102 }
103 }
104
105 fn build(self) -> Result<Vec<ParsedRule>, SsrError> {
106 if self.rules.is_empty() {
107 bail!("Not a valid Rust expression, type, item, path or pattern");
108 }
109 Ok(self.rules)
110 }
111}
112
57impl FromStr for SsrRule { 113impl FromStr for SsrRule {
58 type Err = SsrError; 114 type Err = SsrError;
59 115
@@ -68,21 +124,24 @@ impl FromStr for SsrRule {
68 if it.next().is_some() { 124 if it.next().is_some() {
69 return Err(SsrError("More than one delimiter found".into())); 125 return Err(SsrError("More than one delimiter found".into()));
70 } 126 }
71 let rule = SsrRule { pattern: pattern.parse()?, template: template.parse()? }; 127 let raw_pattern = pattern.parse()?;
128 let raw_template = template.parse()?;
129 let parsed_rules = ParsedRule::new(&raw_pattern, Some(&raw_template))?;
130 let rule = SsrRule { pattern: raw_pattern, template: raw_template, parsed_rules };
72 validate_rule(&rule)?; 131 validate_rule(&rule)?;
73 Ok(rule) 132 Ok(rule)
74 } 133 }
75} 134}
76 135
77impl FromStr for RawSearchPattern { 136impl FromStr for RawPattern {
78 type Err = SsrError; 137 type Err = SsrError;
79 138
80 fn from_str(pattern_str: &str) -> Result<RawSearchPattern, SsrError> { 139 fn from_str(pattern_str: &str) -> Result<RawPattern, SsrError> {
81 Ok(RawSearchPattern { tokens: parse_pattern(pattern_str)? }) 140 Ok(RawPattern { tokens: parse_pattern(pattern_str)? })
82 } 141 }
83} 142}
84 143
85impl RawSearchPattern { 144impl RawPattern {
86 /// Returns this search pattern as Rust source code that we can feed to the Rust parser. 145 /// Returns this search pattern as Rust source code that we can feed to the Rust parser.
87 fn as_rust_code(&self) -> String { 146 fn as_rust_code(&self) -> String {
88 let mut res = String::new(); 147 let mut res = String::new();
@@ -95,7 +154,7 @@ impl RawSearchPattern {
95 res 154 res
96 } 155 }
97 156
98 fn placeholders_by_stand_in(&self) -> FxHashMap<SmolStr, Placeholder> { 157 pub(crate) fn placeholders_by_stand_in(&self) -> FxHashMap<SmolStr, Placeholder> {
99 let mut res = FxHashMap::default(); 158 let mut res = FxHashMap::default();
100 for t in &self.tokens { 159 for t in &self.tokens {
101 if let PatternElement::Placeholder(placeholder) = t { 160 if let PatternElement::Placeholder(placeholder) = t {
@@ -110,41 +169,9 @@ impl FromStr for SsrPattern {
110 type Err = SsrError; 169 type Err = SsrError;
111 170
112 fn from_str(pattern_str: &str) -> Result<SsrPattern, SsrError> { 171 fn from_str(pattern_str: &str) -> Result<SsrPattern, SsrError> {
113 let raw: RawSearchPattern = pattern_str.parse()?; 172 let raw_pattern = pattern_str.parse()?;
114 let raw_str = raw.as_rust_code(); 173 let parsed_rules = ParsedRule::new(&raw_pattern, None)?;
115 let res = SsrPattern { 174 Ok(SsrPattern { raw: raw_pattern, parsed_rules })
116 expr: ast::Expr::parse(&raw_str).ok().map(|n| n.syntax().clone()),
117 type_ref: ast::TypeRef::parse(&raw_str).ok().map(|n| n.syntax().clone()),
118 item: ast::ModuleItem::parse(&raw_str).ok().map(|n| n.syntax().clone()),
119 path: ast::Path::parse(&raw_str).ok().map(|n| n.syntax().clone()),
120 pattern: ast::Pat::parse(&raw_str).ok().map(|n| n.syntax().clone()),
121 placeholders_by_stand_in: raw.placeholders_by_stand_in(),
122 raw,
123 };
124 if res.expr.is_none()
125 && res.type_ref.is_none()
126 && res.item.is_none()
127 && res.path.is_none()
128 && res.pattern.is_none()
129 {
130 bail!("Pattern is not a valid Rust expression, type, item, path or pattern");
131 }
132 Ok(res)
133 }
134}
135
136impl FromStr for SsrTemplate {
137 type Err = SsrError;
138
139 fn from_str(pattern_str: &str) -> Result<SsrTemplate, SsrError> {
140 let tokens = parse_pattern(pattern_str)?;
141 // Validate that the template is a valid fragment of Rust code. We reuse the validation
142 // logic for search patterns since the only thing that differs is the error message.
143 if SsrPattern::from_str(pattern_str).is_err() {
144 bail!("Replacement is not a valid Rust expression, type, item, path or pattern");
145 }
146 // Our actual template needs to preserve whitespace, so we can't reuse `tokens`.
147 Ok(SsrTemplate { tokens })
148 } 175 }
149} 176}
150 177
@@ -173,7 +200,7 @@ fn parse_pattern(pattern_str: &str) -> Result<Vec<PatternElement>, SsrError> {
173/// pattern didn't define. 200/// pattern didn't define.
174fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> { 201fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> {
175 let mut defined_placeholders = FxHashSet::default(); 202 let mut defined_placeholders = FxHashSet::default();
176 for p in &rule.pattern.raw.tokens { 203 for p in &rule.pattern.tokens {
177 if let PatternElement::Placeholder(placeholder) = p { 204 if let PatternElement::Placeholder(placeholder) = p {
178 defined_placeholders.insert(&placeholder.ident); 205 defined_placeholders.insert(&placeholder.ident);
179 } 206 }
@@ -316,7 +343,7 @@ mod tests {
316 } 343 }
317 let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap(); 344 let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap();
318 assert_eq!( 345 assert_eq!(
319 result.pattern.raw.tokens, 346 result.pattern.tokens,
320 vec![ 347 vec![
321 token(SyntaxKind::IDENT, "foo"), 348 token(SyntaxKind::IDENT, "foo"),
322 token(T!['('], "("), 349 token(T!['('], "("),
diff --git a/crates/ra_ssr/src/replacing.rs b/crates/ra_ssr/src/replacing.rs
index e43cc5167..4b3f5509c 100644
--- a/crates/ra_ssr/src/replacing.rs
+++ b/crates/ra_ssr/src/replacing.rs
@@ -1,66 +1,124 @@
1//! Code for applying replacement templates for matches that have previously been found. 1//! Code for applying replacement templates for matches that have previously been found.
2 2
3use crate::matching::Var; 3use crate::matching::Var;
4use crate::parsing::PatternElement; 4use crate::{resolving::ResolvedRule, Match, SsrMatches};
5use crate::{Match, SsrMatches}; 5use ra_syntax::ast::{self, AstToken};
6use ra_syntax::ast::AstToken; 6use ra_syntax::{SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextSize};
7use ra_syntax::TextSize;
8use ra_text_edit::TextEdit; 7use ra_text_edit::TextEdit;
9 8
10/// Returns a text edit that will replace each match in `matches` with its corresponding replacement 9/// Returns a text edit that will replace each match in `matches` with its corresponding replacement
11/// template. Placeholders in the template will have been substituted with whatever they matched to 10/// template. Placeholders in the template will have been substituted with whatever they matched to
12/// in the original code. 11/// in the original code.
13pub(crate) fn matches_to_edit(matches: &SsrMatches, file_src: &str) -> TextEdit { 12pub(crate) fn matches_to_edit(
14 matches_to_edit_at_offset(matches, file_src, 0.into()) 13 matches: &SsrMatches,
14 file_src: &str,
15 rules: &[ResolvedRule],
16) -> TextEdit {
17 matches_to_edit_at_offset(matches, file_src, 0.into(), rules)
15} 18}
16 19
17fn matches_to_edit_at_offset( 20fn matches_to_edit_at_offset(
18 matches: &SsrMatches, 21 matches: &SsrMatches,
19 file_src: &str, 22 file_src: &str,
20 relative_start: TextSize, 23 relative_start: TextSize,
24 rules: &[ResolvedRule],
21) -> TextEdit { 25) -> TextEdit {
22 let mut edit_builder = ra_text_edit::TextEditBuilder::default(); 26 let mut edit_builder = ra_text_edit::TextEditBuilder::default();
23 for m in &matches.matches { 27 for m in &matches.matches {
24 edit_builder.replace( 28 edit_builder.replace(
25 m.range.range.checked_sub(relative_start).unwrap(), 29 m.range.range.checked_sub(relative_start).unwrap(),
26 render_replace(m, file_src), 30 render_replace(m, file_src, rules),
27 ); 31 );
28 } 32 }
29 edit_builder.finish() 33 edit_builder.finish()
30} 34}
31 35
32fn render_replace(match_info: &Match, file_src: &str) -> String { 36struct ReplacementRenderer<'a> {
37 match_info: &'a Match,
38 file_src: &'a str,
39 rules: &'a [ResolvedRule],
40 rule: &'a ResolvedRule,
41}
42
43fn render_replace(match_info: &Match, file_src: &str, rules: &[ResolvedRule]) -> String {
33 let mut out = String::new(); 44 let mut out = String::new();
34 for r in &match_info.template.tokens { 45 let rule = &rules[match_info.rule_index];
35 match r { 46 let template = rule
36 PatternElement::Token(t) => out.push_str(t.text.as_str()), 47 .template
37 PatternElement::Placeholder(p) => { 48 .as_ref()
38 if let Some(placeholder_value) = 49 .expect("You called MatchFinder::edits after calling MatchFinder::add_search_pattern");
39 match_info.placeholder_values.get(&Var(p.ident.to_string())) 50 let renderer = ReplacementRenderer { match_info, file_src, rules, rule };
40 { 51 renderer.render_node(&template.node, &mut out);
41 let range = &placeholder_value.range.range; 52 for comment in &match_info.ignored_comments {
42 let mut matched_text = 53 out.push_str(&comment.syntax().to_string());
43 file_src[usize::from(range.start())..usize::from(range.end())].to_owned(); 54 }
44 let edit = matches_to_edit_at_offset( 55 out
45 &placeholder_value.inner_matches, 56}
46 file_src, 57
47 range.start(), 58impl ReplacementRenderer<'_> {
48 ); 59 fn render_node_children(&self, node: &SyntaxNode, out: &mut String) {
49 edit.apply(&mut matched_text); 60 for node_or_token in node.children_with_tokens() {
50 out.push_str(&matched_text); 61 self.render_node_or_token(&node_or_token, out);
51 } else { 62 }
52 // We validated that all placeholder references were valid before we 63 }
53 // started, so this shouldn't happen. 64
54 panic!( 65 fn render_node_or_token(&self, node_or_token: &SyntaxElement, out: &mut String) {
55 "Internal error: replacement referenced unknown placeholder {}", 66 match node_or_token {
56 p.ident 67 SyntaxElement::Token(token) => {
57 ); 68 self.render_token(&token, out);
69 }
70 SyntaxElement::Node(child_node) => {
71 self.render_node(&child_node, out);
72 }
73 }
74 }
75
76 fn render_node(&self, node: &SyntaxNode, out: &mut String) {
77 use ra_syntax::ast::AstNode;
78 if let Some(mod_path) = self.match_info.rendered_template_paths.get(&node) {
79 out.push_str(&mod_path.to_string());
80 // Emit everything except for the segment's name-ref, since we already effectively
81 // emitted that as part of `mod_path`.
82 if let Some(path) = ast::Path::cast(node.clone()) {
83 if let Some(segment) = path.segment() {
84 for node_or_token in segment.syntax().children_with_tokens() {
85 if node_or_token.kind() != SyntaxKind::NAME_REF {
86 self.render_node_or_token(&node_or_token, out);
87 }
88 }
58 } 89 }
59 } 90 }
91 } else {
92 self.render_node_children(&node, out);
60 } 93 }
61 } 94 }
62 for comment in &match_info.ignored_comments { 95
63 out.push_str(&comment.syntax().to_string()); 96 fn render_token(&self, token: &SyntaxToken, out: &mut String) {
97 if let Some(placeholder) = self.rule.get_placeholder(&token) {
98 if let Some(placeholder_value) =
99 self.match_info.placeholder_values.get(&Var(placeholder.ident.to_string()))
100 {
101 let range = &placeholder_value.range.range;
102 let mut matched_text =
103 self.file_src[usize::from(range.start())..usize::from(range.end())].to_owned();
104 let edit = matches_to_edit_at_offset(
105 &placeholder_value.inner_matches,
106 self.file_src,
107 range.start(),
108 self.rules,
109 );
110 edit.apply(&mut matched_text);
111 out.push_str(&matched_text);
112 } else {
113 // We validated that all placeholder references were valid before we
114 // started, so this shouldn't happen.
115 panic!(
116 "Internal error: replacement referenced unknown placeholder {}",
117 placeholder.ident
118 );
119 }
120 } else {
121 out.push_str(token.text().as_str());
122 }
64 } 123 }
65 out
66} 124}
diff --git a/crates/ra_ssr/src/resolving.rs b/crates/ra_ssr/src/resolving.rs
new file mode 100644
index 000000000..75f556785
--- /dev/null
+++ b/crates/ra_ssr/src/resolving.rs
@@ -0,0 +1,173 @@
1//! This module is responsible for resolving paths within rules.
2
3use crate::errors::error;
4use crate::{parsing, SsrError};
5use parsing::Placeholder;
6use ra_syntax::{ast, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken};
7use rustc_hash::{FxHashMap, FxHashSet};
8use test_utils::mark;
9
10pub(crate) struct ResolvedRule {
11 pub(crate) pattern: ResolvedPattern,
12 pub(crate) template: Option<ResolvedPattern>,
13 pub(crate) index: usize,
14}
15
16pub(crate) struct ResolvedPattern {
17 pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
18 pub(crate) node: SyntaxNode,
19 // Paths in `node` that we've resolved.
20 pub(crate) resolved_paths: FxHashMap<SyntaxNode, ResolvedPath>,
21 pub(crate) ufcs_function_calls: FxHashMap<SyntaxNode, hir::Function>,
22}
23
24pub(crate) struct ResolvedPath {
25 pub(crate) resolution: hir::PathResolution,
26 /// The depth of the ast::Path that was resolved within the pattern.
27 pub(crate) depth: u32,
28}
29
30impl ResolvedRule {
31 pub(crate) fn new(
32 rule: parsing::ParsedRule,
33 scope: &hir::SemanticsScope,
34 hygiene: &hir::Hygiene,
35 index: usize,
36 ) -> Result<ResolvedRule, SsrError> {
37 let resolver =
38 Resolver { scope, hygiene, placeholders_by_stand_in: rule.placeholders_by_stand_in };
39 let resolved_template = if let Some(template) = rule.template {
40 Some(resolver.resolve_pattern_tree(template)?)
41 } else {
42 None
43 };
44 Ok(ResolvedRule {
45 pattern: resolver.resolve_pattern_tree(rule.pattern)?,
46 template: resolved_template,
47 index,
48 })
49 }
50
51 pub(crate) fn get_placeholder(&self, token: &SyntaxToken) -> Option<&Placeholder> {
52 if token.kind() != SyntaxKind::IDENT {
53 return None;
54 }
55 self.pattern.placeholders_by_stand_in.get(token.text())
56 }
57}
58
59struct Resolver<'a, 'db> {
60 scope: &'a hir::SemanticsScope<'db>,
61 hygiene: &'a hir::Hygiene,
62 placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
63}
64
65impl Resolver<'_, '_> {
66 fn resolve_pattern_tree(&self, pattern: SyntaxNode) -> Result<ResolvedPattern, SsrError> {
67 let mut resolved_paths = FxHashMap::default();
68 self.resolve(pattern.clone(), 0, &mut resolved_paths)?;
69 let ufcs_function_calls = resolved_paths
70 .iter()
71 .filter_map(|(path_node, resolved)| {
72 if let Some(grandparent) = path_node.parent().and_then(|parent| parent.parent()) {
73 if grandparent.kind() == SyntaxKind::CALL_EXPR {
74 if let hir::PathResolution::AssocItem(hir::AssocItem::Function(function)) =
75 &resolved.resolution
76 {
77 return Some((grandparent, *function));
78 }
79 }
80 }
81 None
82 })
83 .collect();
84 Ok(ResolvedPattern {
85 node: pattern,
86 resolved_paths,
87 placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
88 ufcs_function_calls,
89 })
90 }
91
92 fn resolve(
93 &self,
94 node: SyntaxNode,
95 depth: u32,
96 resolved_paths: &mut FxHashMap<SyntaxNode, ResolvedPath>,
97 ) -> Result<(), SsrError> {
98 use ra_syntax::ast::AstNode;
99 if let Some(path) = ast::Path::cast(node.clone()) {
100 // Check if this is an appropriate place in the path to resolve. If the path is
101 // something like `a::B::<i32>::c` then we want to resolve `a::B`. If the path contains
102 // a placeholder. e.g. `a::$b::c` then we want to resolve `a`.
103 if !path_contains_type_arguments(path.qualifier())
104 && !self.path_contains_placeholder(&path)
105 {
106 let resolution = self
107 .resolve_path(&path)
108 .ok_or_else(|| error!("Failed to resolve path `{}`", node.text()))?;
109 resolved_paths.insert(node, ResolvedPath { resolution, depth });
110 return Ok(());
111 }
112 }
113 for node in node.children() {
114 self.resolve(node, depth + 1, resolved_paths)?;
115 }
116 Ok(())
117 }
118
119 /// Returns whether `path` contains a placeholder, but ignores any placeholders within type
120 /// arguments.
121 fn path_contains_placeholder(&self, path: &ast::Path) -> bool {
122 if let Some(segment) = path.segment() {
123 if let Some(name_ref) = segment.name_ref() {
124 if self.placeholders_by_stand_in.contains_key(name_ref.text()) {
125 return true;
126 }
127 }
128 }
129 if let Some(qualifier) = path.qualifier() {
130 return self.path_contains_placeholder(&qualifier);
131 }
132 false
133 }
134
135 fn resolve_path(&self, path: &ast::Path) -> Option<hir::PathResolution> {
136 let hir_path = hir::Path::from_src(path.clone(), self.hygiene)?;
137 // First try resolving the whole path. This will work for things like
138 // `std::collections::HashMap`, but will fail for things like
139 // `std::collections::HashMap::new`.
140 if let Some(resolution) = self.scope.resolve_hir_path(&hir_path) {
141 return Some(resolution);
142 }
143 // Resolution failed, try resolving the qualifier (e.g. `std::collections::HashMap` and if
144 // that succeeds, then iterate through the candidates on the resolved type with the provided
145 // name.
146 let resolved_qualifier = self.scope.resolve_hir_path_qualifier(&hir_path.qualifier()?)?;
147 if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier {
148 adt.ty(self.scope.db).iterate_path_candidates(
149 self.scope.db,
150 self.scope.module()?.krate(),
151 &FxHashSet::default(),
152 Some(hir_path.segments().last()?.name),
153 |_ty, assoc_item| Some(hir::PathResolution::AssocItem(assoc_item)),
154 )
155 } else {
156 None
157 }
158 }
159}
160
161/// Returns whether `path` or any of its qualifiers contains type arguments.
162fn path_contains_type_arguments(path: Option<ast::Path>) -> bool {
163 if let Some(path) = path {
164 if let Some(segment) = path.segment() {
165 if segment.type_arg_list().is_some() {
166 mark::hit!(type_arguments_within_path);
167 return true;
168 }
169 }
170 return path_contains_type_arguments(path.qualifier());
171 }
172 false
173}
diff --git a/crates/ra_ssr/src/search.rs b/crates/ra_ssr/src/search.rs
new file mode 100644
index 000000000..bcf0f0468
--- /dev/null
+++ b/crates/ra_ssr/src/search.rs
@@ -0,0 +1,232 @@
1//! Searching for matches.
2
3use crate::{
4 matching,
5 resolving::{ResolvedPath, ResolvedPattern, ResolvedRule},
6 Match, MatchFinder,
7};
8use ra_db::FileRange;
9use ra_ide_db::{
10 defs::Definition,
11 search::{Reference, SearchScope},
12};
13use ra_syntax::{ast, AstNode, SyntaxKind, SyntaxNode};
14use test_utils::mark;
15
16/// A cache for the results of find_usages. This is for when we have multiple patterns that have the
17/// same path. e.g. if the pattern was `foo::Bar` that can parse as a path, an expression, a type
18/// and as a pattern. In each, the usages of `foo::Bar` are the same and we'd like to avoid finding
19/// them more than once.
20#[derive(Default)]
21pub(crate) struct UsageCache {
22 usages: Vec<(Definition, Vec<Reference>)>,
23}
24
25impl<'db> MatchFinder<'db> {
26 /// Adds all matches for `rule` to `matches_out`. Matches may overlap in ways that make
27 /// replacement impossible, so further processing is required in order to properly nest matches
28 /// and remove overlapping matches. This is done in the `nesting` module.
29 pub(crate) fn find_matches_for_rule(
30 &self,
31 rule: &ResolvedRule,
32 usage_cache: &mut UsageCache,
33 matches_out: &mut Vec<Match>,
34 ) {
35 if pick_path_for_usages(&rule.pattern).is_none() {
36 self.slow_scan(rule, matches_out);
37 return;
38 }
39 self.find_matches_for_pattern_tree(rule, &rule.pattern, usage_cache, matches_out);
40 }
41
42 fn find_matches_for_pattern_tree(
43 &self,
44 rule: &ResolvedRule,
45 pattern: &ResolvedPattern,
46 usage_cache: &mut UsageCache,
47 matches_out: &mut Vec<Match>,
48 ) {
49 if let Some(resolved_path) = pick_path_for_usages(pattern) {
50 let definition: Definition = resolved_path.resolution.clone().into();
51 for reference in self.find_usages(usage_cache, definition) {
52 if let Some(node_to_match) = self.find_node_to_match(resolved_path, reference) {
53 if !is_search_permitted_ancestors(&node_to_match) {
54 mark::hit!(use_declaration_with_braces);
55 continue;
56 }
57 if let Ok(m) =
58 matching::get_match(false, rule, &node_to_match, &None, &self.sema)
59 {
60 matches_out.push(m);
61 }
62 }
63 }
64 }
65 }
66
67 fn find_node_to_match(
68 &self,
69 resolved_path: &ResolvedPath,
70 reference: &Reference,
71 ) -> Option<SyntaxNode> {
72 let file = self.sema.parse(reference.file_range.file_id);
73 let depth = resolved_path.depth as usize;
74 let offset = reference.file_range.range.start();
75 if let Some(path) =
76 self.sema.find_node_at_offset_with_descend::<ast::Path>(file.syntax(), offset)
77 {
78 self.sema.ancestors_with_macros(path.syntax().clone()).skip(depth).next()
79 } else if let Some(path) =
80 self.sema.find_node_at_offset_with_descend::<ast::MethodCallExpr>(file.syntax(), offset)
81 {
82 // If the pattern contained a path and we found a reference to that path that wasn't
83 // itself a path, but was a method call, then we need to adjust how far up to try
84 // matching by how deep the path was within a CallExpr. The structure would have been
85 // CallExpr, PathExpr, Path - i.e. a depth offset of 2. We don't need to check if the
86 // path was part of a CallExpr because if it wasn't then all that will happen is we'll
87 // fail to match, which is the desired behavior.
88 const PATH_DEPTH_IN_CALL_EXPR: usize = 2;
89 if depth < PATH_DEPTH_IN_CALL_EXPR {
90 return None;
91 }
92 self.sema
93 .ancestors_with_macros(path.syntax().clone())
94 .skip(depth - PATH_DEPTH_IN_CALL_EXPR)
95 .next()
96 } else {
97 None
98 }
99 }
100
101 fn find_usages<'a>(
102 &self,
103 usage_cache: &'a mut UsageCache,
104 definition: Definition,
105 ) -> &'a [Reference] {
106 // Logically if a lookup succeeds we should just return it. Unfortunately returning it would
107 // extend the lifetime of the borrow, then we wouldn't be able to do the insertion on a
108 // cache miss. This is a limitation of NLL and is fixed with Polonius. For now we do two
109 // lookups in the case of a cache hit.
110 if usage_cache.find(&definition).is_none() {
111 let usages = definition.find_usages(&self.sema, Some(self.search_scope()));
112 usage_cache.usages.push((definition, usages));
113 return &usage_cache.usages.last().unwrap().1;
114 }
115 usage_cache.find(&definition).unwrap()
116 }
117
118 /// Returns the scope within which we want to search. We don't want un unrestricted search
119 /// scope, since we don't want to find references in external dependencies.
120 fn search_scope(&self) -> SearchScope {
121 // FIXME: We should ideally have a test that checks that we edit local roots and not library
122 // roots. This probably would require some changes to fixtures, since currently everything
123 // seems to get put into a single source root.
124 use ra_db::SourceDatabaseExt;
125 use ra_ide_db::symbol_index::SymbolsDatabase;
126 let mut files = Vec::new();
127 for &root in self.sema.db.local_roots().iter() {
128 let sr = self.sema.db.source_root(root);
129 files.extend(sr.iter());
130 }
131 SearchScope::files(&files)
132 }
133
134 fn slow_scan(&self, rule: &ResolvedRule, matches_out: &mut Vec<Match>) {
135 use ra_db::SourceDatabaseExt;
136 use ra_ide_db::symbol_index::SymbolsDatabase;
137 for &root in self.sema.db.local_roots().iter() {
138 let sr = self.sema.db.source_root(root);
139 for file_id in sr.iter() {
140 let file = self.sema.parse(file_id);
141 let code = file.syntax();
142 self.slow_scan_node(code, rule, &None, matches_out);
143 }
144 }
145 }
146
147 fn slow_scan_node(
148 &self,
149 code: &SyntaxNode,
150 rule: &ResolvedRule,
151 restrict_range: &Option<FileRange>,
152 matches_out: &mut Vec<Match>,
153 ) {
154 if !is_search_permitted(code) {
155 return;
156 }
157 if let Ok(m) = matching::get_match(false, rule, &code, restrict_range, &self.sema) {
158 matches_out.push(m);
159 }
160 // If we've got a macro call, we already tried matching it pre-expansion, which is the only
161 // way to match the whole macro, now try expanding it and matching the expansion.
162 if let Some(macro_call) = ast::MacroCall::cast(code.clone()) {
163 if let Some(expanded) = self.sema.expand(&macro_call) {
164 if let Some(tt) = macro_call.token_tree() {
165 // When matching within a macro expansion, we only want to allow matches of
166 // nodes that originated entirely from within the token tree of the macro call.
167 // i.e. we don't want to match something that came from the macro itself.
168 self.slow_scan_node(
169 &expanded,
170 rule,
171 &Some(self.sema.original_range(tt.syntax())),
172 matches_out,
173 );
174 }
175 }
176 }
177 for child in code.children() {
178 self.slow_scan_node(&child, rule, restrict_range, matches_out);
179 }
180 }
181}
182
183/// Returns whether we support matching within `node` and all of its ancestors.
184fn is_search_permitted_ancestors(node: &SyntaxNode) -> bool {
185 if let Some(parent) = node.parent() {
186 if !is_search_permitted_ancestors(&parent) {
187 return false;
188 }
189 }
190 is_search_permitted(node)
191}
192
193/// Returns whether we support matching within this kind of node.
194fn is_search_permitted(node: &SyntaxNode) -> bool {
195 // FIXME: Properly handle use declarations. At the moment, if our search pattern is `foo::bar`
196 // and the code is `use foo::{baz, bar}`, we'll match `bar`, since it resolves to `foo::bar`.
197 // However we'll then replace just the part we matched `bar`. We probably need to instead remove
198 // `bar` and insert a new use declaration.
199 node.kind() != SyntaxKind::USE_ITEM
200}
201
202impl UsageCache {
203 fn find(&mut self, definition: &Definition) -> Option<&[Reference]> {
204 // We expect a very small number of cache entries (generally 1), so a linear scan should be
205 // fast enough and avoids the need to implement Hash for Definition.
206 for (d, refs) in &self.usages {
207 if d == definition {
208 return Some(refs);
209 }
210 }
211 None
212 }
213}
214
215/// Returns a path that's suitable for path resolution. We exclude builtin types, since they aren't
216/// something that we can find references to. We then somewhat arbitrarily pick the path that is the
217/// longest as this is hopefully more likely to be less common, making it faster to find.
218fn pick_path_for_usages(pattern: &ResolvedPattern) -> Option<&ResolvedPath> {
219 // FIXME: Take the scope of the resolved path into account. e.g. if there are any paths that are
220 // private to the current module, then we definitely would want to pick them over say a path
221 // from std. Possibly we should go further than this and intersect the search scopes for all
222 // resolved paths then search only in that scope.
223 pattern
224 .resolved_paths
225 .iter()
226 .filter(|(_, p)| {
227 !matches!(p.resolution, hir::PathResolution::Def(hir::ModuleDef::BuiltinType(_)))
228 })
229 .map(|(node, resolved)| (node.text().len(), resolved))
230 .max_by(|(a, _), (b, _)| a.cmp(b))
231 .map(|(_, resolved)| resolved)
232}
diff --git a/crates/ra_ssr/src/tests.rs b/crates/ra_ssr/src/tests.rs
index f20ae2cdf..b38807c0f 100644
--- a/crates/ra_ssr/src/tests.rs
+++ b/crates/ra_ssr/src/tests.rs
@@ -1,5 +1,8 @@
1use crate::{MatchFinder, SsrRule}; 1use crate::{MatchFinder, SsrRule};
2use ra_db::{FileId, SourceDatabaseExt}; 2use expect::{expect, Expect};
3use ra_db::{salsa::Durability, FileId, FilePosition, SourceDatabaseExt};
4use rustc_hash::FxHashSet;
5use std::sync::Arc;
3use test_utils::mark; 6use test_utils::mark;
4 7
5fn parse_error_text(query: &str) -> String { 8fn parse_error_text(query: &str) -> String {
@@ -36,7 +39,7 @@ fn parser_repeated_name() {
36fn parser_invalid_pattern() { 39fn parser_invalid_pattern() {
37 assert_eq!( 40 assert_eq!(
38 parse_error_text(" ==>> ()"), 41 parse_error_text(" ==>> ()"),
39 "Parse error: Pattern is not a valid Rust expression, type, item, path or pattern" 42 "Parse error: Not a valid Rust expression, type, item, path or pattern"
40 ); 43 );
41} 44}
42 45
@@ -44,7 +47,7 @@ fn parser_invalid_pattern() {
44fn parser_invalid_template() { 47fn parser_invalid_template() {
45 assert_eq!( 48 assert_eq!(
46 parse_error_text("() ==>> )"), 49 parse_error_text("() ==>> )"),
47 "Parse error: Replacement is not a valid Rust expression, type, item, path or pattern" 50 "Parse error: Not a valid Rust expression, type, item, path or pattern"
48 ); 51 );
49} 52}
50 53
@@ -56,39 +59,44 @@ fn parser_undefined_placeholder_in_replacement() {
56 ); 59 );
57} 60}
58 61
59fn single_file(code: &str) -> (ra_ide_db::RootDatabase, FileId) { 62/// `code` may optionally contain a cursor marker `<|>`. If it doesn't, then the position will be
63/// the start of the file.
64pub(crate) fn single_file(code: &str) -> (ra_ide_db::RootDatabase, FilePosition) {
60 use ra_db::fixture::WithFixture; 65 use ra_db::fixture::WithFixture;
61 ra_ide_db::RootDatabase::with_single_file(code) 66 use ra_ide_db::symbol_index::SymbolsDatabase;
62} 67 let (mut db, position) = if code.contains(test_utils::CURSOR_MARKER) {
63 68 ra_ide_db::RootDatabase::with_position(code)
64fn assert_ssr_transform(rule: &str, input: &str, result: &str) { 69 } else {
65 assert_ssr_transforms(&[rule], input, result); 70 let (db, file_id) = ra_ide_db::RootDatabase::with_single_file(code);
71 (db, FilePosition { file_id, offset: 0.into() })
72 };
73 let mut local_roots = FxHashSet::default();
74 local_roots.insert(ra_db::fixture::WORKSPACE);
75 db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH);
76 (db, position)
66} 77}
67 78
68fn normalize_code(code: &str) -> String { 79fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) {
69 let (db, file_id) = single_file(code); 80 assert_ssr_transforms(&[rule], input, expected);
70 db.file_text(file_id).to_string()
71} 81}
72 82
73fn assert_ssr_transforms(rules: &[&str], input: &str, result: &str) { 83fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) {
74 let (db, file_id) = single_file(input); 84 let (db, position) = single_file(input);
75 let mut match_finder = MatchFinder::new(&db); 85 let mut match_finder = MatchFinder::in_context(&db, position);
76 for rule in rules { 86 for rule in rules {
77 let rule: SsrRule = rule.parse().unwrap(); 87 let rule: SsrRule = rule.parse().unwrap();
78 match_finder.add_rule(rule); 88 match_finder.add_rule(rule).unwrap();
79 } 89 }
80 if let Some(edits) = match_finder.edits_for_file(file_id) { 90 let edits = match_finder.edits();
81 // Note, db.file_text is not necessarily the same as `input`, since fixture parsing alters 91 if edits.is_empty() {
82 // stuff.
83 let mut after = db.file_text(file_id).to_string();
84 edits.apply(&mut after);
85 // Likewise, we need to make sure that whatever transformations fixture parsing applies,
86 // also get applied to our expected result.
87 let result = normalize_code(result);
88 assert_eq!(after, result);
89 } else {
90 panic!("No edits were made"); 92 panic!("No edits were made");
91 } 93 }
94 assert_eq!(edits[0].file_id, position.file_id);
95 // Note, db.file_text is not necessarily the same as `input`, since fixture parsing alters
96 // stuff.
97 let mut actual = db.file_text(position.file_id).to_string();
98 edits[0].edit.apply(&mut actual);
99 expected.assert_eq(&actual);
92} 100}
93 101
94fn print_match_debug_info(match_finder: &MatchFinder, file_id: FileId, snippet: &str) { 102fn print_match_debug_info(match_finder: &MatchFinder, file_id: FileId, snippet: &str) {
@@ -104,39 +112,34 @@ fn print_match_debug_info(match_finder: &MatchFinder, file_id: FileId, snippet:
104} 112}
105 113
106fn assert_matches(pattern: &str, code: &str, expected: &[&str]) { 114fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
107 let (db, file_id) = single_file(code); 115 let (db, position) = single_file(code);
108 let mut match_finder = MatchFinder::new(&db); 116 let mut match_finder = MatchFinder::in_context(&db, position);
109 match_finder.add_search_pattern(pattern.parse().unwrap()); 117 match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
110 let matched_strings: Vec<String> = match_finder 118 let matched_strings: Vec<String> =
111 .find_matches_in_file(file_id) 119 match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect();
112 .flattened()
113 .matches
114 .iter()
115 .map(|m| m.matched_text())
116 .collect();
117 if matched_strings != expected && !expected.is_empty() { 120 if matched_strings != expected && !expected.is_empty() {
118 print_match_debug_info(&match_finder, file_id, &expected[0]); 121 print_match_debug_info(&match_finder, position.file_id, &expected[0]);
119 } 122 }
120 assert_eq!(matched_strings, expected); 123 assert_eq!(matched_strings, expected);
121} 124}
122 125
123fn assert_no_match(pattern: &str, code: &str) { 126fn assert_no_match(pattern: &str, code: &str) {
124 let (db, file_id) = single_file(code); 127 let (db, position) = single_file(code);
125 let mut match_finder = MatchFinder::new(&db); 128 let mut match_finder = MatchFinder::in_context(&db, position);
126 match_finder.add_search_pattern(pattern.parse().unwrap()); 129 match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
127 let matches = match_finder.find_matches_in_file(file_id).flattened().matches; 130 let matches = match_finder.matches().flattened().matches;
128 if !matches.is_empty() { 131 if !matches.is_empty() {
129 print_match_debug_info(&match_finder, file_id, &matches[0].matched_text()); 132 print_match_debug_info(&match_finder, position.file_id, &matches[0].matched_text());
130 panic!("Got {} matches when we expected none: {:#?}", matches.len(), matches); 133 panic!("Got {} matches when we expected none: {:#?}", matches.len(), matches);
131 } 134 }
132} 135}
133 136
134fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) { 137fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) {
135 let (db, file_id) = single_file(code); 138 let (db, position) = single_file(code);
136 let mut match_finder = MatchFinder::new(&db); 139 let mut match_finder = MatchFinder::in_context(&db, position);
137 match_finder.add_search_pattern(pattern.parse().unwrap()); 140 match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
138 let mut reasons = Vec::new(); 141 let mut reasons = Vec::new();
139 for d in match_finder.debug_where_text_equal(file_id, snippet) { 142 for d in match_finder.debug_where_text_equal(position.file_id, snippet) {
140 if let Some(reason) = d.match_failure_reason() { 143 if let Some(reason) = d.match_failure_reason() {
141 reasons.push(reason.to_owned()); 144 reasons.push(reason.to_owned());
142 } 145 }
@@ -149,7 +152,7 @@ fn ssr_function_to_method() {
149 assert_ssr_transform( 152 assert_ssr_transform(
150 "my_function($a, $b) ==>> ($a).my_method($b)", 153 "my_function($a, $b) ==>> ($a).my_method($b)",
151 "fn my_function() {} fn main() { loop { my_function( other_func(x, y), z + w) } }", 154 "fn my_function() {} fn main() { loop { my_function( other_func(x, y), z + w) } }",
152 "fn my_function() {} fn main() { loop { (other_func(x, y)).my_method(z + w) } }", 155 expect![["fn my_function() {} fn main() { loop { (other_func(x, y)).my_method(z + w) } }"]],
153 ) 156 )
154} 157}
155 158
@@ -157,8 +160,19 @@ fn ssr_function_to_method() {
157fn ssr_nested_function() { 160fn ssr_nested_function() {
158 assert_ssr_transform( 161 assert_ssr_transform(
159 "foo($a, $b, $c) ==>> bar($c, baz($a, $b))", 162 "foo($a, $b, $c) ==>> bar($c, baz($a, $b))",
160 "fn foo() {} fn main { foo (x + value.method(b), x+y-z, true && false) }", 163 r#"
161 "fn foo() {} fn main { bar(true && false, baz(x + value.method(b), x+y-z)) }", 164 //- /lib.rs crate:foo
165 fn foo() {}
166 fn bar() {}
167 fn baz() {}
168 fn main { foo (x + value.method(b), x+y-z, true && false) }
169 "#,
170 expect![[r#"
171 fn foo() {}
172 fn bar() {}
173 fn baz() {}
174 fn main { bar(true && false, baz(x + value.method(b), x+y-z)) }
175 "#]],
162 ) 176 )
163} 177}
164 178
@@ -167,7 +181,7 @@ fn ssr_expected_spacing() {
167 assert_ssr_transform( 181 assert_ssr_transform(
168 "foo($x) + bar() ==>> bar($x)", 182 "foo($x) + bar() ==>> bar($x)",
169 "fn foo() {} fn bar() {} fn main() { foo(5) + bar() }", 183 "fn foo() {} fn bar() {} fn main() { foo(5) + bar() }",
170 "fn foo() {} fn bar() {} fn main() { bar(5) }", 184 expect![["fn foo() {} fn bar() {} fn main() { bar(5) }"]],
171 ); 185 );
172} 186}
173 187
@@ -176,7 +190,7 @@ fn ssr_with_extra_space() {
176 assert_ssr_transform( 190 assert_ssr_transform(
177 "foo($x ) + bar() ==>> bar($x)", 191 "foo($x ) + bar() ==>> bar($x)",
178 "fn foo() {} fn bar() {} fn main() { foo( 5 ) +bar( ) }", 192 "fn foo() {} fn bar() {} fn main() { foo( 5 ) +bar( ) }",
179 "fn foo() {} fn bar() {} fn main() { bar(5) }", 193 expect![["fn foo() {} fn bar() {} fn main() { bar(5) }"]],
180 ); 194 );
181} 195}
182 196
@@ -184,8 +198,8 @@ fn ssr_with_extra_space() {
184fn ssr_keeps_nested_comment() { 198fn ssr_keeps_nested_comment() {
185 assert_ssr_transform( 199 assert_ssr_transform(
186 "foo($x) ==>> bar($x)", 200 "foo($x) ==>> bar($x)",
187 "fn foo() {} fn main() { foo(other(5 /* using 5 */)) }", 201 "fn foo() {} fn bar() {} fn main() { foo(other(5 /* using 5 */)) }",
188 "fn foo() {} fn main() { bar(other(5 /* using 5 */)) }", 202 expect![["fn foo() {} fn bar() {} fn main() { bar(other(5 /* using 5 */)) }"]],
189 ) 203 )
190} 204}
191 205
@@ -193,17 +207,25 @@ fn ssr_keeps_nested_comment() {
193fn ssr_keeps_comment() { 207fn ssr_keeps_comment() {
194 assert_ssr_transform( 208 assert_ssr_transform(
195 "foo($x) ==>> bar($x)", 209 "foo($x) ==>> bar($x)",
196 "fn foo() {} fn main() { foo(5 /* using 5 */) }", 210 "fn foo() {} fn bar() {} fn main() { foo(5 /* using 5 */) }",
197 "fn foo() {} fn main() { bar(5)/* using 5 */ }", 211 expect![["fn foo() {} fn bar() {} fn main() { bar(5)/* using 5 */ }"]],
198 ) 212 )
199} 213}
200 214
201#[test] 215#[test]
202fn ssr_struct_lit() { 216fn ssr_struct_lit() {
203 assert_ssr_transform( 217 assert_ssr_transform(
204 "foo{a: $a, b: $b} ==>> foo::new($a, $b)", 218 "Foo{a: $a, b: $b} ==>> Foo::new($a, $b)",
205 "fn foo() {} fn main() { foo{b:2, a:1} }", 219 r#"
206 "fn foo() {} fn main() { foo::new(1, 2) }", 220 struct Foo() {}
221 impl Foo { fn new() {} }
222 fn main() { Foo{b:2, a:1} }
223 "#,
224 expect![[r#"
225 struct Foo() {}
226 impl Foo { fn new() {} }
227 fn main() { Foo::new(1, 2) }
228 "#]],
207 ) 229 )
208} 230}
209 231
@@ -315,7 +337,7 @@ fn match_struct_instantiation() {
315fn match_path() { 337fn match_path() {
316 let code = r#" 338 let code = r#"
317 mod foo { 339 mod foo {
318 fn bar() {} 340 pub fn bar() {}
319 } 341 }
320 fn f() {foo::bar(42)}"#; 342 fn f() {foo::bar(42)}"#;
321 assert_matches("foo::bar", code, &["foo::bar"]); 343 assert_matches("foo::bar", code, &["foo::bar"]);
@@ -328,6 +350,60 @@ fn match_pattern() {
328 assert_matches("Some($a)", "struct Some(); fn f() {if let Some(x) = foo() {}}", &["Some(x)"]); 350 assert_matches("Some($a)", "struct Some(); fn f() {if let Some(x) = foo() {}}", &["Some(x)"]);
329} 351}
330 352
353// If our pattern has a full path, e.g. a::b::c() and the code has c(), but c resolves to
354// a::b::c, then we should match.
355#[test]
356fn match_fully_qualified_fn_path() {
357 let code = r#"
358 mod a {
359 pub mod b {
360 pub fn c(_: i32) {}
361 }
362 }
363 use a::b::c;
364 fn f1() {
365 c(42);
366 }
367 "#;
368 assert_matches("a::b::c($a)", code, &["c(42)"]);
369}
370
371#[test]
372fn match_resolved_type_name() {
373 let code = r#"
374 mod m1 {
375 pub mod m2 {
376 pub trait Foo<T> {}
377 }
378 }
379 mod m3 {
380 trait Foo<T> {}
381 fn f1(f: Option<&dyn Foo<bool>>) {}
382 }
383 mod m4 {
384 use crate::m1::m2::Foo;
385 fn f1(f: Option<&dyn Foo<i32>>) {}
386 }
387 "#;
388 assert_matches("m1::m2::Foo<$t>", code, &["Foo<i32>"]);
389}
390
391#[test]
392fn type_arguments_within_path() {
393 mark::check!(type_arguments_within_path);
394 let code = r#"
395 mod foo {
396 pub struct Bar<T> {t: T}
397 impl<T> Bar<T> {
398 pub fn baz() {}
399 }
400 }
401 fn f1() {foo::Bar::<i32>::baz();}
402 "#;
403 assert_no_match("foo::Bar::<i64>::baz()", code);
404 assert_matches("foo::Bar::<i32>::baz()", code, &["foo::Bar::<i32>::baz()"]);
405}
406
331#[test] 407#[test]
332fn literal_constraint() { 408fn literal_constraint() {
333 mark::check!(literal_constraint); 409 mark::check!(literal_constraint);
@@ -416,8 +492,8 @@ fn no_match_split_expression() {
416fn replace_function_call() { 492fn replace_function_call() {
417 assert_ssr_transform( 493 assert_ssr_transform(
418 "foo() ==>> bar()", 494 "foo() ==>> bar()",
419 "fn foo() {} fn f1() {foo(); foo();}", 495 "fn foo() {} fn bar() {} fn f1() {foo(); foo();}",
420 "fn foo() {} fn f1() {bar(); bar();}", 496 expect![["fn foo() {} fn bar() {} fn f1() {bar(); bar();}"]],
421 ); 497 );
422} 498}
423 499
@@ -425,8 +501,8 @@ fn replace_function_call() {
425fn replace_function_call_with_placeholders() { 501fn replace_function_call_with_placeholders() {
426 assert_ssr_transform( 502 assert_ssr_transform(
427 "foo($a, $b) ==>> bar($b, $a)", 503 "foo($a, $b) ==>> bar($b, $a)",
428 "fn foo() {} fn f1() {foo(5, 42)}", 504 "fn foo() {} fn bar() {} fn f1() {foo(5, 42)}",
429 "fn foo() {} fn f1() {bar(42, 5)}", 505 expect![["fn foo() {} fn bar() {} fn f1() {bar(42, 5)}"]],
430 ); 506 );
431} 507}
432 508
@@ -434,26 +510,120 @@ fn replace_function_call_with_placeholders() {
434fn replace_nested_function_calls() { 510fn replace_nested_function_calls() {
435 assert_ssr_transform( 511 assert_ssr_transform(
436 "foo($a) ==>> bar($a)", 512 "foo($a) ==>> bar($a)",
437 "fn foo() {} fn f1() {foo(foo(42))}", 513 "fn foo() {} fn bar() {} fn f1() {foo(foo(42))}",
438 "fn foo() {} fn f1() {bar(bar(42))}", 514 expect![["fn foo() {} fn bar() {} fn f1() {bar(bar(42))}"]],
439 ); 515 );
440} 516}
441 517
442#[test] 518#[test]
443fn replace_type() { 519fn replace_associated_function_call() {
444 assert_ssr_transform( 520 assert_ssr_transform(
445 "Result<(), $a> ==>> Option<$a>", 521 "Foo::new() ==>> Bar::new()",
446 "struct Result<T, E> {} fn f1() -> Result<(), Vec<Error>> {foo()}", 522 r#"
447 "struct Result<T, E> {} fn f1() -> Option<Vec<Error>> {foo()}", 523 struct Foo {}
524 impl Foo { fn new() {} }
525 struct Bar {}
526 impl Bar { fn new() {} }
527 fn f1() {Foo::new();}
528 "#,
529 expect![[r#"
530 struct Foo {}
531 impl Foo { fn new() {} }
532 struct Bar {}
533 impl Bar { fn new() {} }
534 fn f1() {Bar::new();}
535 "#]],
536 );
537}
538
539#[test]
540fn replace_path_in_different_contexts() {
541 // Note the <|> inside module a::b which marks the point where the rule is interpreted. We
542 // replace foo with bar, but both need different path qualifiers in different contexts. In f4,
543 // foo is unqualified because of a use statement, however the replacement needs to be fully
544 // qualified.
545 assert_ssr_transform(
546 "c::foo() ==>> c::bar()",
547 r#"
548 mod a {
549 pub mod b {<|>
550 pub mod c {
551 pub fn foo() {}
552 pub fn bar() {}
553 fn f1() { foo() }
554 }
555 fn f2() { c::foo() }
556 }
557 fn f3() { b::c::foo() }
558 }
559 use a::b::c::foo;
560 fn f4() { foo() }
561 "#,
562 expect![[r#"
563 mod a {
564 pub mod b {
565 pub mod c {
566 pub fn foo() {}
567 pub fn bar() {}
568 fn f1() { bar() }
569 }
570 fn f2() { c::bar() }
571 }
572 fn f3() { b::c::bar() }
573 }
574 use a::b::c::foo;
575 fn f4() { a::b::c::bar() }
576 "#]],
448 ); 577 );
449} 578}
450 579
451#[test] 580#[test]
452fn replace_struct_init() { 581fn replace_associated_function_with_generics() {
453 assert_ssr_transform( 582 assert_ssr_transform(
454 "Foo {a: $a, b: $b} ==>> Foo::new($a, $b)", 583 "c::Foo::<$a>::new() ==>> d::Bar::<$a>::default()",
455 "struct Foo {} fn f1() {Foo{b: 1, a: 2}}", 584 r#"
456 "struct Foo {} fn f1() {Foo::new(2, 1)}", 585 mod c {
586 pub struct Foo<T> {v: T}
587 impl<T> Foo<T> { pub fn new() {} }
588 fn f1() {
589 Foo::<i32>::new();
590 }
591 }
592 mod d {
593 pub struct Bar<T> {v: T}
594 impl<T> Bar<T> { pub fn default() {} }
595 fn f1() {
596 super::c::Foo::<i32>::new();
597 }
598 }
599 "#,
600 expect![[r#"
601 mod c {
602 pub struct Foo<T> {v: T}
603 impl<T> Foo<T> { pub fn new() {} }
604 fn f1() {
605 crate::d::Bar::<i32>::default();
606 }
607 }
608 mod d {
609 pub struct Bar<T> {v: T}
610 impl<T> Bar<T> { pub fn default() {} }
611 fn f1() {
612 Bar::<i32>::default();
613 }
614 }
615 "#]],
616 );
617}
618
619#[test]
620fn replace_type() {
621 assert_ssr_transform(
622 "Result<(), $a> ==>> Option<$a>",
623 "struct Result<T, E> {} struct Option<T> {} fn f1() -> Result<(), Vec<Error>> {foo()}",
624 expect![[
625 "struct Result<T, E> {} struct Option<T> {} fn f1() -> Option<Vec<Error>> {foo()}"
626 ]],
457 ); 627 );
458} 628}
459 629
@@ -462,12 +632,12 @@ fn replace_macro_invocations() {
462 assert_ssr_transform( 632 assert_ssr_transform(
463 "try!($a) ==>> $a?", 633 "try!($a) ==>> $a?",
464 "macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(try!(foo()));}", 634 "macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(try!(foo()));}",
465 "macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(foo()?);}", 635 expect![["macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(foo()?);}"]],
466 ); 636 );
467 assert_ssr_transform( 637 assert_ssr_transform(
468 "foo!($a($b)) ==>> foo($b, $a)", 638 "foo!($a($b)) ==>> foo($b, $a)",
469 "macro_rules! foo {() => {}} fn f1() {foo!(abc(def() + 2));}", 639 "macro_rules! foo {() => {}} fn f1() {foo!(abc(def() + 2));}",
470 "macro_rules! foo {() => {}} fn f1() {foo(def() + 2, abc);}", 640 expect![["macro_rules! foo {() => {}} fn f1() {foo(def() + 2, abc);}"]],
471 ); 641 );
472} 642}
473 643
@@ -476,12 +646,12 @@ fn replace_binary_op() {
476 assert_ssr_transform( 646 assert_ssr_transform(
477 "$a + $b ==>> $b + $a", 647 "$a + $b ==>> $b + $a",
478 "fn f() {2 * 3 + 4 * 5}", 648 "fn f() {2 * 3 + 4 * 5}",
479 "fn f() {4 * 5 + 2 * 3}", 649 expect![["fn f() {4 * 5 + 2 * 3}"]],
480 ); 650 );
481 assert_ssr_transform( 651 assert_ssr_transform(
482 "$a + $b ==>> $b + $a", 652 "$a + $b ==>> $b + $a",
483 "fn f() {1 + 2 + 3 + 4}", 653 "fn f() {1 + 2 + 3 + 4}",
484 "fn f() {4 + 3 + 2 + 1}", 654 expect![["fn f() {4 + 3 + 2 + 1}"]],
485 ); 655 );
486} 656}
487 657
@@ -494,8 +664,23 @@ fn match_binary_op() {
494fn multiple_rules() { 664fn multiple_rules() {
495 assert_ssr_transforms( 665 assert_ssr_transforms(
496 &["$a + 1 ==>> add_one($a)", "$a + $b ==>> add($a, $b)"], 666 &["$a + 1 ==>> add_one($a)", "$a + $b ==>> add($a, $b)"],
497 "fn f() -> i32 {3 + 2 + 1}", 667 "fn add() {} fn add_one() {} fn f() -> i32 {3 + 2 + 1}",
498 "fn f() -> i32 {add_one(add(3, 2))}", 668 expect![["fn add() {} fn add_one() {} fn f() -> i32 {add_one(add(3, 2))}"]],
669 )
670}
671
672#[test]
673fn multiple_rules_with_nested_matches() {
674 assert_ssr_transforms(
675 &["foo1($a) ==>> bar1($a)", "foo2($a) ==>> bar2($a)"],
676 r#"
677 fn foo1() {} fn foo2() {} fn bar1() {} fn bar2() {}
678 fn f() {foo1(foo2(foo1(foo2(foo1(42)))))}
679 "#,
680 expect![[r#"
681 fn foo1() {} fn foo2() {} fn bar1() {} fn bar2() {}
682 fn f() {bar1(bar2(bar1(bar2(bar1(42)))))}
683 "#]],
499 ) 684 )
500} 685}
501 686
@@ -527,12 +712,37 @@ fn replace_within_macro_expansion() {
527 macro_rules! macro1 { 712 macro_rules! macro1 {
528 ($a:expr) => {$a} 713 ($a:expr) => {$a}
529 } 714 }
530 fn f() {macro1!(5.x().foo().o2())}"#, 715 fn bar() {}
716 fn f() {macro1!(5.x().foo().o2())}
717 "#,
718 expect![[r#"
719 macro_rules! macro1 {
720 ($a:expr) => {$a}
721 }
722 fn bar() {}
723 fn f() {macro1!(bar(5.x()).o2())}
724 "#]],
725 )
726}
727
728#[test]
729fn replace_outside_and_within_macro_expansion() {
730 assert_ssr_transform(
731 "foo($a) ==>> bar($a)",
531 r#" 732 r#"
733 fn foo() {} fn bar() {}
734 macro_rules! macro1 {
735 ($a:expr) => {$a}
736 }
737 fn f() {foo(foo(macro1!(foo(foo(42)))))}
738 "#,
739 expect![[r#"
740 fn foo() {} fn bar() {}
532 macro_rules! macro1 { 741 macro_rules! macro1 {
533 ($a:expr) => {$a} 742 ($a:expr) => {$a}
534 } 743 }
535 fn f() {macro1!(bar(5.x()).o2())}"#, 744 fn f() {bar(bar(macro1!(bar(bar(42)))))}
745 "#]],
536 ) 746 )
537} 747}
538 748
@@ -544,12 +754,14 @@ fn preserves_whitespace_within_macro_expansion() {
544 macro_rules! macro1 { 754 macro_rules! macro1 {
545 ($a:expr) => {$a} 755 ($a:expr) => {$a}
546 } 756 }
547 fn f() {macro1!(1 * 2 + 3 + 4}"#, 757 fn f() {macro1!(1 * 2 + 3 + 4}
548 r#" 758 "#,
759 expect![[r#"
549 macro_rules! macro1 { 760 macro_rules! macro1 {
550 ($a:expr) => {$a} 761 ($a:expr) => {$a}
551 } 762 }
552 fn f() {macro1!(4 - 3 - 1 * 2}"#, 763 fn f() {macro1!(4 - 3 - 1 * 2}
764 "#]],
553 ) 765 )
554} 766}
555 767
@@ -580,3 +792,96 @@ fn match_failure_reasons() {
580 r#"Pattern wanted token '42' (INT_NUMBER), but code had token '43' (INT_NUMBER)"#, 792 r#"Pattern wanted token '42' (INT_NUMBER), but code had token '43' (INT_NUMBER)"#,
581 ); 793 );
582} 794}
795
796#[test]
797fn overlapping_possible_matches() {
798 // There are three possible matches here, however the middle one, `foo(foo(foo(42)))` shouldn't
799 // match because it overlaps with the outer match. The inner match is permitted since it's is
800 // contained entirely within the placeholder of the outer match.
801 assert_matches(
802 "foo(foo($a))",
803 "fn foo() {} fn main() {foo(foo(foo(foo(42))))}",
804 &["foo(foo(42))", "foo(foo(foo(foo(42))))"],
805 );
806}
807
808#[test]
809fn use_declaration_with_braces() {
810 // It would be OK for a path rule to match and alter a use declaration. We shouldn't mess it up
811 // though. In particular, we must not change `use foo::{baz, bar}` to `use foo::{baz,
812 // foo2::bar2}`.
813 mark::check!(use_declaration_with_braces);
814 assert_ssr_transform(
815 "foo::bar ==>> foo2::bar2",
816 r#"
817 mod foo { pub fn bar() {} pub fn baz() {} }
818 mod foo2 { pub fn bar2() {} }
819 use foo::{baz, bar};
820 fn main() { bar() }
821 "#,
822 expect![["
823 mod foo { pub fn bar() {} pub fn baz() {} }
824 mod foo2 { pub fn bar2() {} }
825 use foo::{baz, bar};
826 fn main() { foo2::bar2() }
827 "]],
828 )
829}
830
831#[test]
832fn ufcs_matches_method_call() {
833 let code = r#"
834 struct Foo {}
835 impl Foo {
836 fn new(_: i32) -> Foo { Foo {} }
837 fn do_stuff(&self, _: i32) {}
838 }
839 struct Bar {}
840 impl Bar {
841 fn new(_: i32) -> Bar { Bar {} }
842 fn do_stuff(&self, v: i32) {}
843 }
844 fn main() {
845 let b = Bar {};
846 let f = Foo {};
847 b.do_stuff(1);
848 f.do_stuff(2);
849 Foo::new(4).do_stuff(3);
850 // Too many / too few args - should never match
851 f.do_stuff(2, 10);
852 f.do_stuff();
853 }
854 "#;
855 assert_matches("Foo::do_stuff($a, $b)", code, &["f.do_stuff(2)", "Foo::new(4).do_stuff(3)"]);
856 // The arguments needs special handling in the case of a function call matching a method call
857 // and the first argument is different.
858 assert_matches("Foo::do_stuff($a, 2)", code, &["f.do_stuff(2)"]);
859 assert_matches("Foo::do_stuff(Foo::new(4), $b)", code, &["Foo::new(4).do_stuff(3)"]);
860
861 assert_ssr_transform(
862 "Foo::do_stuff(Foo::new($a), $b) ==>> Bar::new($b).do_stuff($a)",
863 code,
864 expect![[r#"
865 struct Foo {}
866 impl Foo {
867 fn new(_: i32) -> Foo { Foo {} }
868 fn do_stuff(&self, _: i32) {}
869 }
870 struct Bar {}
871 impl Bar {
872 fn new(_: i32) -> Bar { Bar {} }
873 fn do_stuff(&self, v: i32) {}
874 }
875 fn main() {
876 let b = Bar {};
877 let f = Foo {};
878 b.do_stuff(1);
879 f.do_stuff(2);
880 Bar::new(3).do_stuff(4);
881 // Too many / too few args - should never match
882 f.do_stuff(2, 10);
883 f.do_stuff();
884 }
885 "#]],
886 );
887}
diff --git a/crates/rust-analyzer/src/cli/analysis_bench.rs b/crates/rust-analyzer/src/cli/analysis_bench.rs
index 9299879b7..076184ad6 100644
--- a/crates/rust-analyzer/src/cli/analysis_bench.rs
+++ b/crates/rust-analyzer/src/cli/analysis_bench.rs
@@ -70,7 +70,7 @@ pub fn analysis_bench(
70 match &what { 70 match &what {
71 BenchWhat::Highlight { .. } => { 71 BenchWhat::Highlight { .. } => {
72 let res = do_work(&mut host, file_id, |analysis| { 72 let res = do_work(&mut host, file_id, |analysis| {
73 analysis.diagnostics(file_id).unwrap(); 73 analysis.diagnostics(file_id, true).unwrap();
74 analysis.highlight_as_html(file_id, false).unwrap() 74 analysis.highlight_as_html(file_id, false).unwrap()
75 }); 75 });
76 if verbosity.is_verbose() { 76 if verbosity.is_verbose() {
diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs
index 6f3c1c1f9..4ac8c8772 100644
--- a/crates/rust-analyzer/src/cli/diagnostics.rs
+++ b/crates/rust-analyzer/src/cli/diagnostics.rs
@@ -47,7 +47,7 @@ pub fn diagnostics(
47 String::from("unknown") 47 String::from("unknown")
48 }; 48 };
49 println!("processing crate: {}, module: {}", crate_name, _vfs.file_path(file_id)); 49 println!("processing crate: {}, module: {}", crate_name, _vfs.file_path(file_id));
50 for diagnostic in analysis.diagnostics(file_id).unwrap() { 50 for diagnostic in analysis.diagnostics(file_id, true).unwrap() {
51 if matches!(diagnostic.severity, Severity::Error) { 51 if matches!(diagnostic.severity, Severity::Error) {
52 found_error = true; 52 found_error = true;
53 } 53 }
diff --git a/crates/rust-analyzer/src/cli/ssr.rs b/crates/rust-analyzer/src/cli/ssr.rs
index 4fb829ea5..194bec008 100644
--- a/crates/rust-analyzer/src/cli/ssr.rs
+++ b/crates/rust-analyzer/src/cli/ssr.rs
@@ -1,27 +1,17 @@
1//! Applies structured search replace rules from the command line. 1//! Applies structured search replace rules from the command line.
2 2
3use crate::cli::{load_cargo::load_cargo, Result}; 3use crate::cli::{load_cargo::load_cargo, Result};
4use ra_ide::SourceFileEdit;
5use ra_ssr::{MatchFinder, SsrPattern, SsrRule}; 4use ra_ssr::{MatchFinder, SsrPattern, SsrRule};
6 5
7pub fn apply_ssr_rules(rules: Vec<SsrRule>) -> Result<()> { 6pub fn apply_ssr_rules(rules: Vec<SsrRule>) -> Result<()> {
8 use ra_db::SourceDatabaseExt; 7 use ra_db::SourceDatabaseExt;
9 use ra_ide_db::symbol_index::SymbolsDatabase;
10 let (host, vfs) = load_cargo(&std::env::current_dir()?, true, true)?; 8 let (host, vfs) = load_cargo(&std::env::current_dir()?, true, true)?;
11 let db = host.raw_database(); 9 let db = host.raw_database();
12 let mut match_finder = MatchFinder::new(db); 10 let mut match_finder = MatchFinder::at_first_file(db)?;
13 for rule in rules { 11 for rule in rules {
14 match_finder.add_rule(rule); 12 match_finder.add_rule(rule)?;
15 }
16 let mut edits = Vec::new();
17 for &root in db.local_roots().iter() {
18 let sr = db.source_root(root);
19 for file_id in sr.iter() {
20 if let Some(edit) = match_finder.edits_for_file(file_id) {
21 edits.push(SourceFileEdit { file_id, edit });
22 }
23 }
24 } 13 }
14 let edits = match_finder.edits();
25 for edit in edits { 15 for edit in edits {
26 if let Some(path) = vfs.file_path(edit.file_id).as_path() { 16 if let Some(path) = vfs.file_path(edit.file_id).as_path() {
27 let mut contents = db.file_text(edit.file_id).to_string(); 17 let mut contents = db.file_text(edit.file_id).to_string();
@@ -38,34 +28,27 @@ pub fn apply_ssr_rules(rules: Vec<SsrRule>) -> Result<()> {
38pub fn search_for_patterns(patterns: Vec<SsrPattern>, debug_snippet: Option<String>) -> Result<()> { 28pub fn search_for_patterns(patterns: Vec<SsrPattern>, debug_snippet: Option<String>) -> Result<()> {
39 use ra_db::SourceDatabaseExt; 29 use ra_db::SourceDatabaseExt;
40 use ra_ide_db::symbol_index::SymbolsDatabase; 30 use ra_ide_db::symbol_index::SymbolsDatabase;
41 let (host, vfs) = load_cargo(&std::env::current_dir()?, true, true)?; 31 let (host, _vfs) = load_cargo(&std::env::current_dir()?, true, true)?;
42 let db = host.raw_database(); 32 let db = host.raw_database();
43 let mut match_finder = MatchFinder::new(db); 33 let mut match_finder = MatchFinder::at_first_file(db)?;
44 for pattern in patterns { 34 for pattern in patterns {
45 match_finder.add_search_pattern(pattern); 35 match_finder.add_search_pattern(pattern)?;
46 } 36 }
47 for &root in db.local_roots().iter() { 37 if let Some(debug_snippet) = &debug_snippet {
48 let sr = db.source_root(root); 38 for &root in db.local_roots().iter() {
49 for file_id in sr.iter() { 39 let sr = db.source_root(root);
50 if let Some(debug_snippet) = &debug_snippet { 40 for file_id in sr.iter() {
51 for debug_info in match_finder.debug_where_text_equal(file_id, debug_snippet) { 41 for debug_info in match_finder.debug_where_text_equal(file_id, debug_snippet) {
52 println!("{:#?}", debug_info); 42 println!("{:#?}", debug_info);
53 } 43 }
54 } else {
55 let matches = match_finder.find_matches_in_file(file_id);
56 if !matches.matches.is_empty() {
57 let matches = matches.flattened().matches;
58 if let Some(path) = vfs.file_path(file_id).as_path() {
59 println!("{} matches in '{}'", matches.len(), path.to_string_lossy());
60 }
61 // We could possibly at some point do something more useful than just printing
62 // the matched text. For now though, that's the easiest thing to do.
63 for m in matches {
64 println!("{}", m.matched_text());
65 }
66 }
67 } 44 }
68 } 45 }
46 } else {
47 for m in match_finder.matches().flattened().matches {
48 // We could possibly at some point do something more useful than just printing
49 // the matched text. For now though, that's the easiest thing to do.
50 println!("{}", m.matched_text());
51 }
69 } 52 }
70 Ok(()) 53 Ok(())
71} 54}
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 8947ccf07..e11c8b909 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -23,6 +23,7 @@ pub struct Config {
23 pub client_caps: ClientCapsConfig, 23 pub client_caps: ClientCapsConfig,
24 24
25 pub publish_diagnostics: bool, 25 pub publish_diagnostics: bool,
26 pub experimental_diagnostics: bool,
26 pub diagnostics: DiagnosticsConfig, 27 pub diagnostics: DiagnosticsConfig,
27 pub lru_capacity: Option<usize>, 28 pub lru_capacity: Option<usize>,
28 pub proc_macro_srv: Option<(PathBuf, Vec<OsString>)>, 29 pub proc_macro_srv: Option<(PathBuf, Vec<OsString>)>,
@@ -137,6 +138,7 @@ impl Config {
137 138
138 with_sysroot: true, 139 with_sysroot: true,
139 publish_diagnostics: true, 140 publish_diagnostics: true,
141 experimental_diagnostics: true,
140 diagnostics: DiagnosticsConfig::default(), 142 diagnostics: DiagnosticsConfig::default(),
141 lru_capacity: None, 143 lru_capacity: None,
142 proc_macro_srv: None, 144 proc_macro_srv: None,
@@ -187,6 +189,7 @@ impl Config {
187 189
188 self.with_sysroot = data.withSysroot; 190 self.with_sysroot = data.withSysroot;
189 self.publish_diagnostics = data.diagnostics_enable; 191 self.publish_diagnostics = data.diagnostics_enable;
192 self.experimental_diagnostics = data.diagnostics_enableExperimental;
190 self.diagnostics = DiagnosticsConfig { 193 self.diagnostics = DiagnosticsConfig {
191 warnings_as_info: data.diagnostics_warningsAsInfo, 194 warnings_as_info: data.diagnostics_warningsAsInfo,
192 warnings_as_hint: data.diagnostics_warningsAsHint, 195 warnings_as_hint: data.diagnostics_warningsAsHint,
@@ -405,6 +408,7 @@ config_data! {
405 completion_postfix_enable: bool = true, 408 completion_postfix_enable: bool = true,
406 409
407 diagnostics_enable: bool = true, 410 diagnostics_enable: bool = true,
411 diagnostics_enableExperimental: bool = true,
408 diagnostics_warningsAsHint: Vec<String> = Vec::new(), 412 diagnostics_warningsAsHint: Vec<String> = Vec::new(),
409 diagnostics_warningsAsInfo: Vec<String> = Vec::new(), 413 diagnostics_warningsAsInfo: Vec<String> = Vec::new(),
410 414
diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs
index 8d8c9442b..cd309ed74 100644
--- a/crates/rust-analyzer/src/handlers.rs
+++ b/crates/rust-analyzer/src/handlers.rs
@@ -774,7 +774,7 @@ fn handle_fixes(
774 None => {} 774 None => {}
775 }; 775 };
776 776
777 let diagnostics = snap.analysis.diagnostics(file_id)?; 777 let diagnostics = snap.analysis.diagnostics(file_id, snap.config.experimental_diagnostics)?;
778 778
779 let fixes_from_diagnostics = diagnostics 779 let fixes_from_diagnostics = diagnostics
780 .into_iter() 780 .into_iter()
@@ -1026,8 +1026,9 @@ pub(crate) fn handle_ssr(
1026 params: lsp_ext::SsrParams, 1026 params: lsp_ext::SsrParams,
1027) -> Result<lsp_types::WorkspaceEdit> { 1027) -> Result<lsp_types::WorkspaceEdit> {
1028 let _p = profile("handle_ssr"); 1028 let _p = profile("handle_ssr");
1029 let position = from_proto::file_position(&snap, params.position)?;
1029 let source_change = 1030 let source_change =
1030 snap.analysis.structural_search_replace(&params.query, params.parse_only)??; 1031 snap.analysis.structural_search_replace(&params.query, params.parse_only, position)??;
1031 to_proto::workspace_edit(&snap, source_change) 1032 to_proto::workspace_edit(&snap, source_change)
1032} 1033}
1033 1034
@@ -1039,7 +1040,7 @@ pub(crate) fn publish_diagnostics(
1039 let line_index = snap.analysis.file_line_index(file_id)?; 1040 let line_index = snap.analysis.file_line_index(file_id)?;
1040 let diagnostics: Vec<Diagnostic> = snap 1041 let diagnostics: Vec<Diagnostic> = snap
1041 .analysis 1042 .analysis
1042 .diagnostics(file_id)? 1043 .diagnostics(file_id, snap.config.experimental_diagnostics)?
1043 .into_iter() 1044 .into_iter()
1044 .map(|d| Diagnostic { 1045 .map(|d| Diagnostic {
1045 range: to_proto::range(&line_index, d.range), 1046 range: to_proto::range(&line_index, d.range),
diff --git a/crates/rust-analyzer/src/lsp_ext.rs b/crates/rust-analyzer/src/lsp_ext.rs
index 13ebb18fb..113e0e070 100644
--- a/crates/rust-analyzer/src/lsp_ext.rs
+++ b/crates/rust-analyzer/src/lsp_ext.rs
@@ -216,6 +216,11 @@ impl Request for Ssr {
216pub struct SsrParams { 216pub struct SsrParams {
217 pub query: String, 217 pub query: String,
218 pub parse_only: bool, 218 pub parse_only: bool,
219
220 /// File position where SSR was invoked. Paths in `query` will be resolved relative to this
221 /// position.
222 #[serde(flatten)]
223 pub position: lsp_types::TextDocumentPositionParams,
219} 224}
220 225
221pub enum StatusNotification {} 226pub enum StatusNotification {}