aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
Diffstat (limited to 'crates')
-rw-r--r--crates/expect/src/lib.rs97
-rw-r--r--crates/flycheck/src/lib.rs1
-rw-r--r--crates/ra_assists/src/handlers/auto_import.rs12
-rw-r--r--crates/ra_assists/src/handlers/extract_struct_from_enum_variant.rs2
-rw-r--r--crates/ra_assists/src/handlers/inline_local_variable.rs2
-rw-r--r--crates/ra_db/src/input.rs17
-rw-r--r--crates/ra_hir/src/code_model.rs26
-rw-r--r--crates/ra_hir/src/db.rs8
-rw-r--r--crates/ra_hir/src/semantics.rs253
-rw-r--r--crates/ra_hir_def/src/item_tree/lower.rs2
-rw-r--r--crates/ra_hir_ty/src/autoderef.rs15
-rw-r--r--crates/ra_hir_ty/src/db.rs13
-rw-r--r--crates/ra_hir_ty/src/infer/unify.rs55
-rw-r--r--crates/ra_hir_ty/src/lib.rs22
-rw-r--r--crates/ra_hir_ty/src/method_resolution.rs235
-rw-r--r--crates/ra_hir_ty/src/tests.rs24
-rw-r--r--crates/ra_hir_ty/src/tests/traits.rs18
-rw-r--r--crates/ra_hir_ty/src/traits.rs16
-rw-r--r--crates/ra_hir_ty/src/traits/chalk.rs10
-rw-r--r--crates/ra_hir_ty/src/traits/chalk/mapping.rs43
-rw-r--r--crates/ra_ide/src/call_hierarchy.rs40
-rw-r--r--crates/ra_ide/src/completion.rs12
-rw-r--r--crates/ra_ide/src/completion/complete_keyword.rs6
-rw-r--r--crates/ra_ide/src/completion/presentation.rs50
-rw-r--r--crates/ra_ide/src/diagnostics.rs42
-rw-r--r--crates/ra_ide/src/display/structure.rs428
-rw-r--r--crates/ra_ide/src/expand_macro.rs241
-rw-r--r--crates/ra_ide/src/folding_ranges.rs161
-rw-r--r--crates/ra_ide/src/goto_implementation.rs4
-rw-r--r--crates/ra_ide/src/goto_type_definition.rs14
-rw-r--r--crates/ra_ide/src/hover.rs72
-rw-r--r--crates/ra_ide/src/lib.rs6
-rw-r--r--crates/ra_ide/src/references.rs9
-rw-r--r--crates/ra_ide/src/references/rename.rs44
-rw-r--r--crates/ra_ide/src/syntax_highlighting/tests.rs22
-rw-r--r--crates/ra_ide/test_data/highlight_doctest.html (renamed from crates/ra_ide/src/snapshots/highlight_doctest.html)0
-rw-r--r--crates/ra_ide/test_data/highlight_injection.html (renamed from crates/ra_ide/src/snapshots/highlight_injection.html)0
-rw-r--r--crates/ra_ide/test_data/highlight_strings.html (renamed from crates/ra_ide/src/snapshots/highlight_strings.html)0
-rw-r--r--crates/ra_ide/test_data/highlight_unsafe.html (renamed from crates/ra_ide/src/snapshots/highlight_unsafe.html)0
-rw-r--r--crates/ra_ide/test_data/highlighting.html (renamed from crates/ra_ide/src/snapshots/highlighting.html)0
-rw-r--r--crates/ra_ide/test_data/rainbow_highlighting.html (renamed from crates/ra_ide/src/snapshots/rainbow_highlighting.html)0
-rw-r--r--crates/ra_ide_db/src/change.rs5
-rw-r--r--crates/ra_ide_db/src/imports_locator.rs88
-rw-r--r--crates/ra_ide_db/src/lib.rs8
-rw-r--r--crates/ra_ide_db/src/search.rs12
-rw-r--r--crates/ra_proc_macro_srv/src/tests/mod.rs6
-rw-r--r--crates/ra_proc_macro_srv/src/tests/utils.rs4
-rw-r--r--crates/ra_project_model/src/project_json.rs6
-rw-r--r--crates/ra_syntax/src/tests.rs100
-rw-r--r--crates/rust-analyzer/src/config.rs2
-rw-r--r--crates/rust-analyzer/src/global_state.rs2
-rw-r--r--crates/rust-analyzer/src/handlers.rs6
-rw-r--r--crates/rust-analyzer/src/lsp_ext.rs6
-rw-r--r--crates/rust-analyzer/src/main_loop.rs42
-rw-r--r--crates/rust-analyzer/src/reload.rs5
-rw-r--r--crates/rust-analyzer/src/to_proto.rs31
-rw-r--r--crates/test_utils/src/lib.rs133
57 files changed, 1444 insertions, 1034 deletions
diff --git a/crates/expect/src/lib.rs b/crates/expect/src/lib.rs
index dd7b96aab..a5e26fade 100644
--- a/crates/expect/src/lib.rs
+++ b/crates/expect/src/lib.rs
@@ -2,7 +2,7 @@
2//! https://github.com/rust-analyzer/rust-analyzer/pull/5101 2//! https://github.com/rust-analyzer/rust-analyzer/pull/5101
3use std::{ 3use std::{
4 collections::HashMap, 4 collections::HashMap,
5 env, fmt, fs, 5 env, fmt, fs, mem,
6 ops::Range, 6 ops::Range,
7 panic, 7 panic,
8 path::{Path, PathBuf}, 8 path::{Path, PathBuf},
@@ -14,7 +14,7 @@ use once_cell::sync::Lazy;
14use stdx::{lines_with_ends, trim_indent}; 14use stdx::{lines_with_ends, trim_indent};
15 15
16const HELP: &str = " 16const HELP: &str = "
17You can update all `expect![[]]` tests by: 17You can update all `expect![[]]` tests by running:
18 18
19 env UPDATE_EXPECT=1 cargo test 19 env UPDATE_EXPECT=1 cargo test
20 20
@@ -25,24 +25,48 @@ fn update_expect() -> bool {
25 env::var("UPDATE_EXPECT").is_ok() 25 env::var("UPDATE_EXPECT").is_ok()
26} 26}
27 27
28/// expect![[""]] 28/// expect![[r#"inline snapshot"#]]
29#[macro_export] 29#[macro_export]
30macro_rules! expect { 30macro_rules! expect {
31 [[$lit:literal]] => {$crate::Expect { 31 [[$data:literal]] => {$crate::Expect {
32 file: file!(), 32 position: $crate::Position {
33 line: line!(), 33 file: file!(),
34 column: column!(), 34 line: line!(),
35 data: $lit, 35 column: column!(),
36 },
37 data: $data,
36 }}; 38 }};
37 [[]] => { $crate::expect![[""]] }; 39 [[]] => { $crate::expect![[""]] };
38} 40}
39 41
42/// expect_file!["/crates/foo/test_data/bar.html"]
43#[macro_export]
44macro_rules! expect_file {
45 [$path:literal] => {$crate::ExpectFile { path: $path }};
46}
47
40#[derive(Debug)] 48#[derive(Debug)]
41pub struct Expect { 49pub struct Expect {
50 pub position: Position,
51 pub data: &'static str,
52}
53
54#[derive(Debug)]
55pub struct ExpectFile {
56 pub path: &'static str,
57}
58
59#[derive(Debug)]
60pub struct Position {
42 pub file: &'static str, 61 pub file: &'static str,
43 pub line: u32, 62 pub line: u32,
44 pub column: u32, 63 pub column: u32,
45 pub data: &'static str, 64}
65
66impl fmt::Display for Position {
67 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
68 write!(f, "{}:{}:{}", self.file, self.line, self.column)
69 }
46} 70}
47 71
48impl Expect { 72impl Expect {
@@ -51,7 +75,7 @@ impl Expect {
51 if &trimmed == actual { 75 if &trimmed == actual {
52 return; 76 return;
53 } 77 }
54 Runtime::fail(self, &trimmed, actual); 78 Runtime::fail_expect(self, &trimmed, actual);
55 } 79 }
56 pub fn assert_debug_eq(&self, actual: &impl fmt::Debug) { 80 pub fn assert_debug_eq(&self, actual: &impl fmt::Debug) {
57 let actual = format!("{:#?}\n", actual); 81 let actual = format!("{:#?}\n", actual);
@@ -69,7 +93,7 @@ impl Expect {
69 let mut target_line = None; 93 let mut target_line = None;
70 let mut line_start = 0; 94 let mut line_start = 0;
71 for (i, line) in lines_with_ends(file).enumerate() { 95 for (i, line) in lines_with_ends(file).enumerate() {
72 if i == self.line as usize - 1 { 96 if i == self.position.line as usize - 1 {
73 let pat = "expect![["; 97 let pat = "expect![[";
74 let offset = line.find(pat).unwrap(); 98 let offset = line.find(pat).unwrap();
75 let literal_start = line_start + offset + pat.len(); 99 let literal_start = line_start + offset + pat.len();
@@ -87,6 +111,25 @@ impl Expect {
87 } 111 }
88} 112}
89 113
114impl ExpectFile {
115 pub fn assert_eq(&self, actual: &str) {
116 let expected = self.read();
117 if actual == expected {
118 return;
119 }
120 Runtime::fail_file(self, &expected, actual);
121 }
122 fn read(&self) -> String {
123 fs::read_to_string(self.abs_path()).unwrap_or_default().replace("\r\n", "\n")
124 }
125 fn write(&self, contents: &str) {
126 fs::write(self.abs_path(), contents).unwrap()
127 }
128 fn abs_path(&self) -> PathBuf {
129 workspace_root().join(self.path)
130 }
131}
132
90#[derive(Default)] 133#[derive(Default)]
91struct Runtime { 134struct Runtime {
92 help_printed: bool, 135 help_printed: bool,
@@ -95,27 +138,39 @@ struct Runtime {
95static RT: Lazy<Mutex<Runtime>> = Lazy::new(Default::default); 138static RT: Lazy<Mutex<Runtime>> = Lazy::new(Default::default);
96 139
97impl Runtime { 140impl Runtime {
98 fn fail(expect: &Expect, expected: &str, actual: &str) { 141 fn fail_expect(expect: &Expect, expected: &str, actual: &str) {
99 let mut rt = RT.lock().unwrap_or_else(|poisoned| poisoned.into_inner()); 142 let mut rt = RT.lock().unwrap_or_else(|poisoned| poisoned.into_inner());
100 let mut updated = "";
101 if update_expect() { 143 if update_expect() {
102 updated = " (updated)"; 144 println!("\x1b[1m\x1b[92mupdating\x1b[0m: {}", expect.position);
103 rt.per_file 145 rt.per_file
104 .entry(expect.file) 146 .entry(expect.position.file)
105 .or_insert_with(|| FileRuntime::new(expect)) 147 .or_insert_with(|| FileRuntime::new(expect))
106 .update(expect, actual); 148 .update(expect, actual);
149 return;
107 } 150 }
108 let print_help = !rt.help_printed && !update_expect(); 151 rt.panic(expect.position.to_string(), expected, actual);
109 rt.help_printed = true; 152 }
153
154 fn fail_file(expect: &ExpectFile, expected: &str, actual: &str) {
155 let mut rt = RT.lock().unwrap_or_else(|poisoned| poisoned.into_inner());
156 if update_expect() {
157 println!("\x1b[1m\x1b[92mupdating\x1b[0m: {}", expect.path);
158 expect.write(actual);
159 return;
160 }
161 rt.panic(expect.path.to_string(), expected, actual);
162 }
110 163
164 fn panic(&mut self, position: String, expected: &str, actual: &str) {
165 let print_help = !mem::replace(&mut self.help_printed, true);
111 let help = if print_help { HELP } else { "" }; 166 let help = if print_help { HELP } else { "" };
112 167
113 let diff = Changeset::new(actual, expected, "\n"); 168 let diff = Changeset::new(actual, expected, "\n");
114 169
115 println!( 170 println!(
116 "\n 171 "\n
117\x1b[1m\x1b[91merror\x1b[97m: expect test failed\x1b[0m{} 172\x1b[1m\x1b[91merror\x1b[97m: expect test failed\x1b[0m
118 \x1b[1m\x1b[34m-->\x1b[0m {}:{}:{} 173 \x1b[1m\x1b[34m-->\x1b[0m {}
119{} 174{}
120\x1b[1mExpect\x1b[0m: 175\x1b[1mExpect\x1b[0m:
121---- 176----
@@ -132,7 +187,7 @@ impl Runtime {
132{} 187{}
133---- 188----
134", 189",
135 updated, expect.file, expect.line, expect.column, help, expected, actual, diff 190 position, help, expected, actual, diff
136 ); 191 );
137 // Use resume_unwind instead of panic!() to prevent a backtrace, which is unnecessary noise. 192 // Use resume_unwind instead of panic!() to prevent a backtrace, which is unnecessary noise.
138 panic::resume_unwind(Box::new(())); 193 panic::resume_unwind(Box::new(()));
@@ -147,7 +202,7 @@ struct FileRuntime {
147 202
148impl FileRuntime { 203impl FileRuntime {
149 fn new(expect: &Expect) -> FileRuntime { 204 fn new(expect: &Expect) -> FileRuntime {
150 let path = workspace_root().join(expect.file); 205 let path = workspace_root().join(expect.position.file);
151 let original_text = fs::read_to_string(&path).unwrap(); 206 let original_text = fs::read_to_string(&path).unwrap();
152 let patchwork = Patchwork::new(original_text.clone()); 207 let patchwork = Patchwork::new(original_text.clone());
153 FileRuntime { path, original_text, patchwork } 208 FileRuntime { path, original_text, patchwork }
diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs
index 1023d3040..844b093d4 100644
--- a/crates/flycheck/src/lib.rs
+++ b/crates/flycheck/src/lib.rs
@@ -132,6 +132,7 @@ impl FlycheckActor {
132 self.cancel_check_process(); 132 self.cancel_check_process();
133 133
134 let mut command = self.check_command(); 134 let mut command = self.check_command();
135 log::info!("restart flycheck {:?}", command);
135 command.stdout(Stdio::piped()).stderr(Stdio::null()).stdin(Stdio::null()); 136 command.stdout(Stdio::piped()).stderr(Stdio::null()).stdin(Stdio::null());
136 if let Ok(child) = command.spawn().map(JodChild) { 137 if let Ok(child) = command.spawn().map(JodChild) {
137 self.cargo_handle = Some(CargoHandle::spawn(child)); 138 self.cargo_handle = Some(CargoHandle::spawn(child));
diff --git a/crates/ra_assists/src/handlers/auto_import.rs b/crates/ra_assists/src/handlers/auto_import.rs
index e8060a491..7b6499a08 100644
--- a/crates/ra_assists/src/handlers/auto_import.rs
+++ b/crates/ra_assists/src/handlers/auto_import.rs
@@ -5,7 +5,7 @@ use hir::{
5 AsAssocItem, AssocItemContainer, ModPath, Module, ModuleDef, PathResolution, Semantics, Trait, 5 AsAssocItem, AssocItemContainer, ModPath, Module, ModuleDef, PathResolution, Semantics, Trait,
6 Type, 6 Type,
7}; 7};
8use ra_ide_db::{imports_locator::ImportsLocator, RootDatabase}; 8use ra_ide_db::{imports_locator, RootDatabase};
9use ra_prof::profile; 9use ra_prof::profile;
10use ra_syntax::{ 10use ra_syntax::{
11 ast::{self, AstNode}, 11 ast::{self, AstNode},
@@ -35,8 +35,8 @@ use crate::{utils::insert_use_statement, AssistContext, AssistId, Assists, Group
35// # pub mod std { pub mod collections { pub struct HashMap { } } } 35// # pub mod std { pub mod collections { pub struct HashMap { } } }
36// ``` 36// ```
37pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { 37pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
38 let auto_import_assets = AutoImportAssets::new(&ctx)?; 38 let auto_import_assets = AutoImportAssets::new(ctx)?;
39 let proposed_imports = auto_import_assets.search_for_imports(ctx.db()); 39 let proposed_imports = auto_import_assets.search_for_imports(ctx);
40 if proposed_imports.is_empty() { 40 if proposed_imports.is_empty() {
41 return None; 41 return None;
42 } 42 }
@@ -127,11 +127,11 @@ impl AutoImportAssets {
127 GroupLabel(name) 127 GroupLabel(name)
128 } 128 }
129 129
130 fn search_for_imports(&self, db: &RootDatabase) -> BTreeSet<ModPath> { 130 fn search_for_imports(&self, ctx: &AssistContext) -> BTreeSet<ModPath> {
131 let _p = profile("auto_import::search_for_imports"); 131 let _p = profile("auto_import::search_for_imports");
132 let db = ctx.db();
132 let current_crate = self.module_with_name_to_import.krate(); 133 let current_crate = self.module_with_name_to_import.krate();
133 ImportsLocator::new(db, current_crate) 134 imports_locator::find_imports(&ctx.sema, current_crate, &self.get_search_query())
134 .find_imports(&self.get_search_query())
135 .into_iter() 135 .into_iter()
136 .filter_map(|candidate| match &self.import_candidate { 136 .filter_map(|candidate| match &self.import_candidate {
137 ImportCandidate::TraitAssocItem(assoc_item_type, _) => { 137 ImportCandidate::TraitAssocItem(assoc_item_type, _) => {
diff --git a/crates/ra_assists/src/handlers/extract_struct_from_enum_variant.rs b/crates/ra_assists/src/handlers/extract_struct_from_enum_variant.rs
index bdf9d7ae2..ca19cf198 100644
--- a/crates/ra_assists/src/handlers/extract_struct_from_enum_variant.rs
+++ b/crates/ra_assists/src/handlers/extract_struct_from_enum_variant.rs
@@ -53,7 +53,7 @@ pub(crate) fn extract_struct_from_enum_variant(
53 target, 53 target,
54 |builder| { 54 |builder| {
55 let definition = Definition::ModuleDef(ModuleDef::EnumVariant(variant_hir)); 55 let definition = Definition::ModuleDef(ModuleDef::EnumVariant(variant_hir));
56 let res = definition.find_usages(&ctx.db(), None); 56 let res = definition.find_usages(&ctx.sema, None);
57 let start_offset = variant.parent_enum().syntax().text_range().start(); 57 let start_offset = variant.parent_enum().syntax().text_range().start();
58 let mut visited_modules_set = FxHashSet::default(); 58 let mut visited_modules_set = FxHashSet::default();
59 visited_modules_set.insert(current_module); 59 visited_modules_set.insert(current_module);
diff --git a/crates/ra_assists/src/handlers/inline_local_variable.rs b/crates/ra_assists/src/handlers/inline_local_variable.rs
index f4fb0056b..259839535 100644
--- a/crates/ra_assists/src/handlers/inline_local_variable.rs
+++ b/crates/ra_assists/src/handlers/inline_local_variable.rs
@@ -44,7 +44,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext) -> O
44 44
45 let def = ctx.sema.to_def(&bind_pat)?; 45 let def = ctx.sema.to_def(&bind_pat)?;
46 let def = Definition::Local(def); 46 let def = Definition::Local(def);
47 let refs = def.find_usages(ctx.db(), None); 47 let refs = def.find_usages(&ctx.sema, None);
48 if refs.is_empty() { 48 if refs.is_empty() {
49 mark::hit!(test_not_applicable_if_variable_unused); 49 mark::hit!(test_not_applicable_if_variable_unused);
50 return None; 50 return None;
diff --git a/crates/ra_db/src/input.rs b/crates/ra_db/src/input.rs
index 445a1ee48..aaa492759 100644
--- a/crates/ra_db/src/input.rs
+++ b/crates/ra_db/src/input.rs
@@ -197,6 +197,23 @@ impl CrateGraph {
197 self.arena.keys().copied() 197 self.arena.keys().copied()
198 } 198 }
199 199
200 /// Returns an iterator over all transitive dependencies of the given crate.
201 pub fn transitive_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> + '_ {
202 let mut worklist = vec![of];
203 let mut deps = FxHashSet::default();
204
205 while let Some(krate) = worklist.pop() {
206 if !deps.insert(krate) {
207 continue;
208 }
209
210 worklist.extend(self[krate].dependencies.iter().map(|dep| dep.crate_id));
211 }
212
213 deps.remove(&of);
214 deps.into_iter()
215 }
216
200 // FIXME: this only finds one crate with the given root; we could have multiple 217 // FIXME: this only finds one crate with the given root; we could have multiple
201 pub fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> { 218 pub fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> {
202 let (&crate_id, _) = 219 let (&crate_id, _) =
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs
index e09eb77c2..1b3525011 100644
--- a/crates/ra_hir/src/code_model.rs
+++ b/crates/ra_hir/src/code_model.rs
@@ -1053,12 +1053,14 @@ pub struct ImplDef {
1053 1053
1054impl ImplDef { 1054impl ImplDef {
1055 pub fn all_in_crate(db: &dyn HirDatabase, krate: Crate) -> Vec<ImplDef> { 1055 pub fn all_in_crate(db: &dyn HirDatabase, krate: Crate) -> Vec<ImplDef> {
1056 let impls = db.impls_in_crate(krate.id); 1056 let inherent = db.inherent_impls_in_crate(krate.id);
1057 impls.all_impls().map(Self::from).collect() 1057 let trait_ = db.trait_impls_in_crate(krate.id);
1058
1059 inherent.all_impls().chain(trait_.all_impls()).map(Self::from).collect()
1058 } 1060 }
1059 pub fn for_trait(db: &dyn HirDatabase, krate: Crate, trait_: Trait) -> Vec<ImplDef> { 1061 pub fn for_trait(db: &dyn HirDatabase, krate: Crate, trait_: Trait) -> Vec<ImplDef> {
1060 let impls = db.impls_in_crate(krate.id); 1062 let impls = db.trait_impls_in_crate(krate.id);
1061 impls.lookup_impl_defs_for_trait(trait_.id).map(Self::from).collect() 1063 impls.for_trait(trait_.id).map(Self::from).collect()
1062 } 1064 }
1063 1065
1064 pub fn target_trait(self, db: &dyn HirDatabase) -> Option<TypeRef> { 1066 pub fn target_trait(self, db: &dyn HirDatabase) -> Option<TypeRef> {
@@ -1187,7 +1189,7 @@ impl Type {
1187 None => return false, 1189 None => return false,
1188 }; 1190 };
1189 1191
1190 let canonical_ty = Canonical { value: self.ty.value.clone(), num_vars: 0 }; 1192 let canonical_ty = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) };
1191 method_resolution::implements_trait( 1193 method_resolution::implements_trait(
1192 &canonical_ty, 1194 &canonical_ty,
1193 db, 1195 db,
@@ -1211,7 +1213,7 @@ impl Type {
1211 self.ty.environment.clone(), 1213 self.ty.environment.clone(),
1212 hir_ty::Obligation::Trait(trait_ref), 1214 hir_ty::Obligation::Trait(trait_ref),
1213 ), 1215 ),
1214 num_vars: 0, 1216 kinds: Arc::new([]),
1215 }; 1217 };
1216 1218
1217 db.trait_solve(self.krate, goal).is_some() 1219 db.trait_solve(self.krate, goal).is_some()
@@ -1286,7 +1288,7 @@ impl Type {
1286 pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a { 1288 pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a {
1287 // There should be no inference vars in types passed here 1289 // There should be no inference vars in types passed here
1288 // FIXME check that? 1290 // FIXME check that?
1289 let canonical = Canonical { value: self.ty.value.clone(), num_vars: 0 }; 1291 let canonical = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) };
1290 let environment = self.ty.environment.clone(); 1292 let environment = self.ty.environment.clone();
1291 let ty = InEnvironment { value: canonical, environment }; 1293 let ty = InEnvironment { value: canonical, environment };
1292 autoderef(db, Some(self.krate), ty) 1294 autoderef(db, Some(self.krate), ty)
@@ -1303,10 +1305,10 @@ impl Type {
1303 mut callback: impl FnMut(AssocItem) -> Option<T>, 1305 mut callback: impl FnMut(AssocItem) -> Option<T>,
1304 ) -> Option<T> { 1306 ) -> Option<T> {
1305 for krate in self.ty.value.def_crates(db, krate.id)? { 1307 for krate in self.ty.value.def_crates(db, krate.id)? {
1306 let impls = db.impls_in_crate(krate); 1308 let impls = db.inherent_impls_in_crate(krate);
1307 1309
1308 for impl_def in impls.lookup_impl_defs(&self.ty.value) { 1310 for impl_def in impls.for_self_ty(&self.ty.value) {
1309 for &item in db.impl_data(impl_def).items.iter() { 1311 for &item in db.impl_data(*impl_def).items.iter() {
1310 if let Some(result) = callback(item.into()) { 1312 if let Some(result) = callback(item.into()) {
1311 return Some(result); 1313 return Some(result);
1312 } 1314 }
@@ -1327,7 +1329,7 @@ impl Type {
1327 // There should be no inference vars in types passed here 1329 // There should be no inference vars in types passed here
1328 // FIXME check that? 1330 // FIXME check that?
1329 // FIXME replace Unknown by bound vars here 1331 // FIXME replace Unknown by bound vars here
1330 let canonical = Canonical { value: self.ty.value.clone(), num_vars: 0 }; 1332 let canonical = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) };
1331 1333
1332 let env = self.ty.environment.clone(); 1334 let env = self.ty.environment.clone();
1333 let krate = krate.id; 1335 let krate = krate.id;
@@ -1358,7 +1360,7 @@ impl Type {
1358 // There should be no inference vars in types passed here 1360 // There should be no inference vars in types passed here
1359 // FIXME check that? 1361 // FIXME check that?
1360 // FIXME replace Unknown by bound vars here 1362 // FIXME replace Unknown by bound vars here
1361 let canonical = Canonical { value: self.ty.value.clone(), num_vars: 0 }; 1363 let canonical = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) };
1362 1364
1363 let env = self.ty.environment.clone(); 1365 let env = self.ty.environment.clone();
1364 let krate = krate.id; 1366 let krate = krate.id;
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs
index bb67952de..cb48ca065 100644
--- a/crates/ra_hir/src/db.rs
+++ b/crates/ra_hir/src/db.rs
@@ -16,10 +16,10 @@ pub use hir_expand::db::{
16pub use hir_ty::db::{ 16pub use hir_ty::db::{
17 AssociatedTyDataQuery, AssociatedTyValueQuery, CallableItemSignatureQuery, FieldTypesQuery, 17 AssociatedTyDataQuery, AssociatedTyValueQuery, CallableItemSignatureQuery, FieldTypesQuery,
18 GenericDefaultsQuery, GenericPredicatesForParamQuery, GenericPredicatesQuery, HirDatabase, 18 GenericDefaultsQuery, GenericPredicatesForParamQuery, GenericPredicatesQuery, HirDatabase,
19 HirDatabaseStorage, ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, ImplsFromDepsQuery, 19 HirDatabaseStorage, ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, InferQueryQuery,
20 ImplsInCrateQuery, InferQueryQuery, InternAssocTyValueQuery, InternChalkImplQuery, 20 InherentImplsInCrateQuery, InternAssocTyValueQuery, InternChalkImplQuery, InternTypeCtorQuery,
21 InternTypeCtorQuery, InternTypeParamIdQuery, ReturnTypeImplTraitsQuery, StructDatumQuery, 21 InternTypeParamIdQuery, ReturnTypeImplTraitsQuery, StructDatumQuery, TraitDatumQuery,
22 TraitDatumQuery, TraitSolveQuery, TyQuery, ValueTyQuery, 22 TraitImplsInCrateQuery, TraitImplsInDepsQuery, TraitSolveQuery, TyQuery, ValueTyQuery,
23}; 23};
24 24
25#[test] 25#[test]
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs
index 810c49d6f..3d78f71c1 100644
--- a/crates/ra_hir/src/semantics.rs
+++ b/crates/ra_hir/src/semantics.rs
@@ -83,6 +83,11 @@ impl PathResolution {
83/// Primary API to get semantic information, like types, from syntax trees. 83/// Primary API to get semantic information, like types, from syntax trees.
84pub struct Semantics<'db, DB> { 84pub struct Semantics<'db, DB> {
85 pub db: &'db DB, 85 pub db: &'db DB,
86 imp: SemanticsImpl<'db>,
87}
88
89pub struct SemanticsImpl<'db> {
90 pub db: &'db dyn HirDatabase,
86 s2d_cache: RefCell<SourceToDefCache>, 91 s2d_cache: RefCell<SourceToDefCache>,
87 cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>, 92 cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
88} 93}
@@ -95,20 +100,180 @@ impl<DB> fmt::Debug for Semantics<'_, DB> {
95 100
96impl<'db, DB: HirDatabase> Semantics<'db, DB> { 101impl<'db, DB: HirDatabase> Semantics<'db, DB> {
97 pub fn new(db: &DB) -> Semantics<DB> { 102 pub fn new(db: &DB) -> Semantics<DB> {
98 Semantics { db, s2d_cache: Default::default(), cache: Default::default() } 103 let impl_ = SemanticsImpl::new(db);
104 Semantics { db, imp: impl_ }
99 } 105 }
100 106
101 pub fn parse(&self, file_id: FileId) -> ast::SourceFile { 107 pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
102 let tree = self.db.parse(file_id).tree(); 108 self.imp.parse(file_id)
103 self.cache(tree.syntax().clone(), file_id.into());
104 tree
105 } 109 }
106 110
107 pub fn ast<T: AstDiagnostic + Diagnostic>(&self, d: &T) -> <T as AstDiagnostic>::AST { 111 pub fn ast<T: AstDiagnostic + Diagnostic>(&self, d: &T) -> <T as AstDiagnostic>::AST {
108 let file_id = d.source().file_id; 112 let file_id = d.source().file_id;
109 let root = self.db.parse_or_expand(file_id).unwrap(); 113 let root = self.db.parse_or_expand(file_id).unwrap();
110 self.cache(root, file_id); 114 self.imp.cache(root, file_id);
111 d.ast(self.db) 115 d.ast(self.db.upcast())
116 }
117
118 pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
119 self.imp.expand(macro_call)
120 }
121
122 pub fn expand_hypothetical(
123 &self,
124 actual_macro_call: &ast::MacroCall,
125 hypothetical_args: &ast::TokenTree,
126 token_to_map: SyntaxToken,
127 ) -> Option<(SyntaxNode, SyntaxToken)> {
128 self.imp.expand_hypothetical(actual_macro_call, hypothetical_args, token_to_map)
129 }
130
131 pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
132 self.imp.descend_into_macros(token)
133 }
134
135 pub fn descend_node_at_offset<N: ast::AstNode>(
136 &self,
137 node: &SyntaxNode,
138 offset: TextSize,
139 ) -> Option<N> {
140 self.imp.descend_node_at_offset(node, offset).find_map(N::cast)
141 }
142
143 pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
144 self.imp.original_range(node)
145 }
146
147 pub fn diagnostics_range(&self, diagnostics: &dyn Diagnostic) -> FileRange {
148 self.imp.diagnostics_range(diagnostics)
149 }
150
151 pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
152 self.imp.ancestors_with_macros(node)
153 }
154
155 pub fn ancestors_at_offset_with_macros(
156 &self,
157 node: &SyntaxNode,
158 offset: TextSize,
159 ) -> impl Iterator<Item = SyntaxNode> + '_ {
160 self.imp.ancestors_at_offset_with_macros(node, offset)
161 }
162
163 /// Find a AstNode by offset inside SyntaxNode, if it is inside *Macrofile*,
164 /// search up until it is of the target AstNode type
165 pub fn find_node_at_offset_with_macros<N: AstNode>(
166 &self,
167 node: &SyntaxNode,
168 offset: TextSize,
169 ) -> Option<N> {
170 self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast)
171 }
172
173 /// Find a AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
174 /// descend it and find again
175 pub fn find_node_at_offset_with_descend<N: AstNode>(
176 &self,
177 node: &SyntaxNode,
178 offset: TextSize,
179 ) -> Option<N> {
180 if let Some(it) = find_node_at_offset(&node, offset) {
181 return Some(it);
182 }
183
184 self.imp.descend_node_at_offset(node, offset).find_map(N::cast)
185 }
186
187 pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> {
188 self.imp.type_of_expr(expr)
189 }
190
191 pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<Type> {
192 self.imp.type_of_pat(pat)
193 }
194
195 pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
196 self.imp.resolve_method_call(call)
197 }
198
199 pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
200 self.imp.resolve_field(field)
201 }
202
203 pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<(Field, Option<Local>)> {
204 self.imp.resolve_record_field(field)
205 }
206
207 pub fn resolve_record_field_pat(&self, field: &ast::RecordFieldPat) -> Option<Field> {
208 self.imp.resolve_record_field_pat(field)
209 }
210
211 pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {
212 self.imp.resolve_macro_call(macro_call)
213 }
214
215 pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
216 self.imp.resolve_path(path)
217 }
218
219 pub fn resolve_variant(&self, record_lit: ast::RecordLit) -> Option<VariantId> {
220 self.imp.resolve_variant(record_lit)
221 }
222
223 pub fn lower_path(&self, path: &ast::Path) -> Option<Path> {
224 self.imp.lower_path(path)
225 }
226
227 pub fn resolve_bind_pat_to_const(&self, pat: &ast::BindPat) -> Option<ModuleDef> {
228 self.imp.resolve_bind_pat_to_const(pat)
229 }
230
231 // FIXME: use this instead?
232 // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>;
233
234 pub fn record_literal_missing_fields(&self, literal: &ast::RecordLit) -> Vec<(Field, Type)> {
235 self.imp.record_literal_missing_fields(literal)
236 }
237
238 pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
239 self.imp.record_pattern_missing_fields(pattern)
240 }
241
242 pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
243 let src = self.imp.find_file(src.syntax().clone()).with_value(src).cloned();
244 T::to_def(&self.imp, src)
245 }
246
247 pub fn to_module_def(&self, file: FileId) -> Option<Module> {
248 self.imp.to_module_def(file)
249 }
250
251 pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> {
252 self.imp.scope(node)
253 }
254
255 pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> {
256 self.imp.scope_at_offset(node, offset)
257 }
258
259 pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
260 self.imp.scope_for_def(def)
261 }
262
263 pub fn assert_contains_node(&self, node: &SyntaxNode) {
264 self.imp.assert_contains_node(node)
265 }
266}
267
268impl<'db> SemanticsImpl<'db> {
269 pub fn new(db: &'db dyn HirDatabase) -> Self {
270 Self { db, s2d_cache: Default::default(), cache: Default::default() }
271 }
272
273 pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
274 let tree = self.db.parse(file_id).tree();
275 self.cache(tree.syntax().clone(), file_id.into());
276 tree
112 } 277 }
113 278
114 pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { 279 pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
@@ -130,9 +295,15 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
130 self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call); 295 self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call);
131 let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); 296 let sa = self.analyze2(macro_call.map(|it| it.syntax()), None);
132 let krate = sa.resolver.krate()?; 297 let krate = sa.resolver.krate()?;
133 let macro_call_id = macro_call 298 let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
134 .as_call_id(self.db, krate, |path| sa.resolver.resolve_path_as_macro(self.db, &path))?; 299 sa.resolver.resolve_path_as_macro(self.db.upcast(), &path)
135 hir_expand::db::expand_hypothetical(self.db, macro_call_id, hypothetical_args, token_to_map) 300 })?;
301 hir_expand::db::expand_hypothetical(
302 self.db.upcast(),
303 macro_call_id,
304 hypothetical_args,
305 token_to_map,
306 )
136 } 307 }
137 308
138 pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { 309 pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
@@ -147,7 +318,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
147 return None; 318 return None;
148 } 319 }
149 let file_id = sa.expand(self.db, token.with_value(&macro_call))?; 320 let file_id = sa.expand(self.db, token.with_value(&macro_call))?;
150 let token = file_id.expansion_info(self.db)?.map_token_down(token.as_ref())?; 321 let token = file_id.expansion_info(self.db.upcast())?.map_token_down(token.as_ref())?;
151 322
152 self.cache(find_root(&token.value.parent()), token.file_id); 323 self.cache(find_root(&token.value.parent()), token.file_id);
153 324
@@ -159,15 +330,16 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
159 token.value 330 token.value
160 } 331 }
161 332
162 pub fn descend_node_at_offset<N: ast::AstNode>( 333 pub fn descend_node_at_offset(
163 &self, 334 &self,
164 node: &SyntaxNode, 335 node: &SyntaxNode,
165 offset: TextSize, 336 offset: TextSize,
166 ) -> Option<N> { 337 ) -> impl Iterator<Item = SyntaxNode> + '_ {
167 // Handle macro token cases 338 // Handle macro token cases
168 node.token_at_offset(offset) 339 node.token_at_offset(offset)
169 .map(|token| self.descend_into_macros(token)) 340 .map(|token| self.descend_into_macros(token))
170 .find_map(|it| self.ancestors_with_macros(it.parent()).find_map(N::cast)) 341 .map(|it| self.ancestors_with_macros(it.parent()))
342 .flatten()
171 } 343 }
172 344
173 pub fn original_range(&self, node: &SyntaxNode) -> FileRange { 345 pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
@@ -184,7 +356,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
184 356
185 pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { 357 pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
186 let node = self.find_file(node); 358 let node = self.find_file(node);
187 node.ancestors_with_macros(self.db).map(|it| it.value) 359 node.ancestors_with_macros(self.db.upcast()).map(|it| it.value)
188 } 360 }
189 361
190 pub fn ancestors_at_offset_with_macros( 362 pub fn ancestors_at_offset_with_macros(
@@ -197,29 +369,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
197 .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) 369 .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
198 } 370 }
199 371
200 /// Find a AstNode by offset inside SyntaxNode, if it is inside *Macrofile*,
201 /// search up until it is of the target AstNode type
202 pub fn find_node_at_offset_with_macros<N: AstNode>(
203 &self,
204 node: &SyntaxNode,
205 offset: TextSize,
206 ) -> Option<N> {
207 self.ancestors_at_offset_with_macros(node, offset).find_map(N::cast)
208 }
209
210 /// Find a AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
211 /// descend it and find again
212 pub fn find_node_at_offset_with_descend<N: AstNode>(
213 &self,
214 node: &SyntaxNode,
215 offset: TextSize,
216 ) -> Option<N> {
217 if let Some(it) = find_node_at_offset(&node, offset) {
218 return Some(it);
219 }
220 self.descend_node_at_offset(&node, offset)
221 }
222
223 pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { 372 pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> {
224 self.analyze(expr.syntax()).type_of(self.db, &expr) 373 self.analyze(expr.syntax()).type_of(self.db, &expr)
225 } 374 }
@@ -267,9 +416,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
267 self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat) 416 self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat)
268 } 417 }
269 418
270 // FIXME: use this instead?
271 // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>;
272
273 pub fn record_literal_missing_fields(&self, literal: &ast::RecordLit) -> Vec<(Field, Type)> { 419 pub fn record_literal_missing_fields(&self, literal: &ast::RecordLit) -> Vec<(Field, Type)> {
274 self.analyze(literal.syntax()) 420 self.analyze(literal.syntax())
275 .record_literal_missing_fields(self.db, literal) 421 .record_literal_missing_fields(self.db, literal)
@@ -282,11 +428,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
282 .unwrap_or_default() 428 .unwrap_or_default()
283 } 429 }
284 430
285 pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
286 let src = self.find_file(src.syntax().clone()).with_value(src).cloned();
287 T::to_def(self, src)
288 }
289
290 fn with_ctx<F: FnOnce(&mut SourceToDefCtx) -> T, T>(&self, f: F) -> T { 431 fn with_ctx<F: FnOnce(&mut SourceToDefCtx) -> T, T>(&self, f: F) -> T {
291 let mut cache = self.s2d_cache.borrow_mut(); 432 let mut cache = self.s2d_cache.borrow_mut();
292 let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache }; 433 let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache };
@@ -310,7 +451,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
310 } 451 }
311 452
312 pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { 453 pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
313 let resolver = def.id.resolver(self.db); 454 let resolver = def.id.resolver(self.db.upcast());
314 SemanticsScope { db: self.db, resolver } 455 SemanticsScope { db: self.db, resolver }
315 } 456 }
316 457
@@ -331,17 +472,17 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
331 ChildContainer::DefWithBodyId(def) => { 472 ChildContainer::DefWithBodyId(def) => {
332 return SourceAnalyzer::new_for_body(self.db, def, src, offset) 473 return SourceAnalyzer::new_for_body(self.db, def, src, offset)
333 } 474 }
334 ChildContainer::TraitId(it) => it.resolver(self.db), 475 ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
335 ChildContainer::ImplId(it) => it.resolver(self.db), 476 ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),
336 ChildContainer::ModuleId(it) => it.resolver(self.db), 477 ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()),
337 ChildContainer::EnumId(it) => it.resolver(self.db), 478 ChildContainer::EnumId(it) => it.resolver(self.db.upcast()),
338 ChildContainer::VariantId(it) => it.resolver(self.db), 479 ChildContainer::VariantId(it) => it.resolver(self.db.upcast()),
339 ChildContainer::GenericDefId(it) => it.resolver(self.db), 480 ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()),
340 }; 481 };
341 SourceAnalyzer::new_for_resolver(resolver, src) 482 SourceAnalyzer::new_for_resolver(resolver, src)
342 } 483 }
343 484
344 fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) { 485 pub fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
345 assert!(root_node.parent().is_none()); 486 assert!(root_node.parent().is_none());
346 let mut cache = self.cache.borrow_mut(); 487 let mut cache = self.cache.borrow_mut();
347 let prev = cache.insert(root_node, file_id); 488 let prev = cache.insert(root_node, file_id);
@@ -357,7 +498,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
357 cache.get(root_node).copied() 498 cache.get(root_node).copied()
358 } 499 }
359 500
360 fn find_file(&self, node: SyntaxNode) -> InFile<SyntaxNode> { 501 pub fn find_file(&self, node: SyntaxNode) -> InFile<SyntaxNode> {
361 let root_node = find_root(&node); 502 let root_node = find_root(&node);
362 let file_id = self.lookup(&root_node).unwrap_or_else(|| { 503 let file_id = self.lookup(&root_node).unwrap_or_else(|| {
363 panic!( 504 panic!(
@@ -382,14 +523,14 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
382pub trait ToDef: AstNode + Clone { 523pub trait ToDef: AstNode + Clone {
383 type Def; 524 type Def;
384 525
385 fn to_def<DB: HirDatabase>(sema: &Semantics<DB>, src: InFile<Self>) -> Option<Self::Def>; 526 fn to_def(sema: &SemanticsImpl, src: InFile<Self>) -> Option<Self::Def>;
386} 527}
387 528
388macro_rules! to_def_impls { 529macro_rules! to_def_impls {
389 ($(($def:path, $ast:path, $meth:ident)),* ,) => {$( 530 ($(($def:path, $ast:path, $meth:ident)),* ,) => {$(
390 impl ToDef for $ast { 531 impl ToDef for $ast {
391 type Def = $def; 532 type Def = $def;
392 fn to_def<DB: HirDatabase>(sema: &Semantics<DB>, src: InFile<Self>) -> Option<Self::Def> { 533 fn to_def(sema: &SemanticsImpl, src: InFile<Self>) -> Option<Self::Def> {
393 sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from) 534 sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from)
394 } 535 }
395 } 536 }
diff --git a/crates/ra_hir_def/src/item_tree/lower.rs b/crates/ra_hir_def/src/item_tree/lower.rs
index 5149dd141..06743d7fc 100644
--- a/crates/ra_hir_def/src/item_tree/lower.rs
+++ b/crates/ra_hir_def/src/item_tree/lower.rs
@@ -211,7 +211,7 @@ impl Ctx {
211 fn lower_record_field(&mut self, field: &ast::RecordFieldDef) -> Option<Field> { 211 fn lower_record_field(&mut self, field: &ast::RecordFieldDef) -> Option<Field> {
212 let name = field.name()?.as_name(); 212 let name = field.name()?.as_name();
213 let visibility = self.lower_visibility(field); 213 let visibility = self.lower_visibility(field);
214 let type_ref = self.lower_type_ref(&field.ascribed_type()?); 214 let type_ref = self.lower_type_ref_opt(field.ascribed_type());
215 let res = Field { name, type_ref, visibility }; 215 let res = Field { name, type_ref, visibility };
216 Some(res) 216 Some(res)
217 } 217 }
diff --git a/crates/ra_hir_ty/src/autoderef.rs b/crates/ra_hir_ty/src/autoderef.rs
index 1b0f84c5c..c727012c6 100644
--- a/crates/ra_hir_ty/src/autoderef.rs
+++ b/crates/ra_hir_ty/src/autoderef.rs
@@ -37,7 +37,7 @@ pub(crate) fn deref(
37 ty: InEnvironment<&Canonical<Ty>>, 37 ty: InEnvironment<&Canonical<Ty>>,
38) -> Option<Canonical<Ty>> { 38) -> Option<Canonical<Ty>> {
39 if let Some(derefed) = ty.value.value.builtin_deref() { 39 if let Some(derefed) = ty.value.value.builtin_deref() {
40 Some(Canonical { value: derefed, num_vars: ty.value.num_vars }) 40 Some(Canonical { value: derefed, kinds: ty.value.kinds.clone() })
41 } else { 41 } else {
42 deref_by_trait(db, krate, ty) 42 deref_by_trait(db, krate, ty)
43 } 43 }
@@ -68,8 +68,8 @@ fn deref_by_trait(
68 68
69 // Check that the type implements Deref at all 69 // Check that the type implements Deref at all
70 let trait_ref = TraitRef { trait_: deref_trait, substs: parameters.clone() }; 70 let trait_ref = TraitRef { trait_: deref_trait, substs: parameters.clone() };
71 let implements_goal = super::Canonical { 71 let implements_goal = Canonical {
72 num_vars: ty.value.num_vars, 72 kinds: ty.value.kinds.clone(),
73 value: InEnvironment { 73 value: InEnvironment {
74 value: Obligation::Trait(trait_ref), 74 value: Obligation::Trait(trait_ref),
75 environment: ty.environment.clone(), 75 environment: ty.environment.clone(),
@@ -81,7 +81,7 @@ fn deref_by_trait(
81 81
82 // Now do the assoc type projection 82 // Now do the assoc type projection
83 let projection = super::traits::ProjectionPredicate { 83 let projection = super::traits::ProjectionPredicate {
84 ty: Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, ty.value.num_vars)), 84 ty: Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, ty.value.kinds.len())),
85 projection_ty: super::ProjectionTy { associated_ty: target, parameters }, 85 projection_ty: super::ProjectionTy { associated_ty: target, parameters },
86 }; 86 };
87 87
@@ -89,7 +89,8 @@ fn deref_by_trait(
89 89
90 let in_env = InEnvironment { value: obligation, environment: ty.environment }; 90 let in_env = InEnvironment { value: obligation, environment: ty.environment };
91 91
92 let canonical = super::Canonical { num_vars: 1 + ty.value.num_vars, value: in_env }; 92 let canonical =
93 Canonical::new(in_env, ty.value.kinds.iter().copied().chain(Some(super::TyKind::General)));
93 94
94 let solution = db.trait_solve(krate, canonical)?; 95 let solution = db.trait_solve(krate, canonical)?;
95 96
@@ -110,7 +111,7 @@ fn deref_by_trait(
110 // assumptions will be broken. We would need to properly introduce 111 // assumptions will be broken. We would need to properly introduce
111 // new variables in that case 112 // new variables in that case
112 113
113 for i in 1..vars.0.num_vars { 114 for i in 1..vars.0.kinds.len() {
114 if vars.0.value[i - 1] != Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, i - 1)) 115 if vars.0.value[i - 1] != Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, i - 1))
115 { 116 {
116 warn!("complex solution for derefing {:?}: {:?}, ignoring", ty.value, solution); 117 warn!("complex solution for derefing {:?}: {:?}, ignoring", ty.value, solution);
@@ -119,7 +120,7 @@ fn deref_by_trait(
119 } 120 }
120 Some(Canonical { 121 Some(Canonical {
121 value: vars.0.value[vars.0.value.len() - 1].clone(), 122 value: vars.0.value[vars.0.value.len() - 1].clone(),
122 num_vars: vars.0.num_vars, 123 kinds: vars.0.kinds.clone(),
123 }) 124 })
124 } 125 }
125 Solution::Ambig(_) => { 126 Solution::Ambig(_) => {
diff --git a/crates/ra_hir_ty/src/db.rs b/crates/ra_hir_ty/src/db.rs
index cad553273..dc06c0ee7 100644
--- a/crates/ra_hir_ty/src/db.rs
+++ b/crates/ra_hir_ty/src/db.rs
@@ -11,7 +11,7 @@ use ra_db::{impl_intern_key, salsa, CrateId, Upcast};
11use ra_prof::profile; 11use ra_prof::profile;
12 12
13use crate::{ 13use crate::{
14 method_resolution::CrateImplDefs, 14 method_resolution::{InherentImpls, TraitImpls},
15 traits::{chalk, AssocTyValue, Impl}, 15 traits::{chalk, AssocTyValue, Impl},
16 Binders, CallableDef, GenericPredicate, InferenceResult, OpaqueTyId, PolyFnSig, 16 Binders, CallableDef, GenericPredicate, InferenceResult, OpaqueTyId, PolyFnSig,
17 ReturnTypeImplTraits, TraitRef, Ty, TyDefId, TypeCtor, ValueTyDefId, 17 ReturnTypeImplTraits, TraitRef, Ty, TyDefId, TypeCtor, ValueTyDefId,
@@ -67,11 +67,14 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
67 #[salsa::invoke(crate::lower::generic_defaults_query)] 67 #[salsa::invoke(crate::lower::generic_defaults_query)]
68 fn generic_defaults(&self, def: GenericDefId) -> Arc<[Binders<Ty>]>; 68 fn generic_defaults(&self, def: GenericDefId) -> Arc<[Binders<Ty>]>;
69 69
70 #[salsa::invoke(crate::method_resolution::CrateImplDefs::impls_in_crate_query)] 70 #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)]
71 fn impls_in_crate(&self, krate: CrateId) -> Arc<CrateImplDefs>; 71 fn inherent_impls_in_crate(&self, krate: CrateId) -> Arc<InherentImpls>;
72 72
73 #[salsa::invoke(crate::method_resolution::CrateImplDefs::impls_from_deps_query)] 73 #[salsa::invoke(TraitImpls::trait_impls_in_crate_query)]
74 fn impls_from_deps(&self, krate: CrateId) -> Arc<CrateImplDefs>; 74 fn trait_impls_in_crate(&self, krate: CrateId) -> Arc<TraitImpls>;
75
76 #[salsa::invoke(TraitImpls::trait_impls_in_deps_query)]
77 fn trait_impls_in_deps(&self, krate: CrateId) -> Arc<TraitImpls>;
75 78
76 // Interned IDs for Chalk integration 79 // Interned IDs for Chalk integration
77 #[salsa::interned] 80 #[salsa::interned]
diff --git a/crates/ra_hir_ty/src/infer/unify.rs b/crates/ra_hir_ty/src/infer/unify.rs
index 269495ca0..2e895d911 100644
--- a/crates/ra_hir_ty/src/infer/unify.rs
+++ b/crates/ra_hir_ty/src/infer/unify.rs
@@ -9,7 +9,7 @@ use test_utils::mark;
9use super::{InferenceContext, Obligation}; 9use super::{InferenceContext, Obligation};
10use crate::{ 10use crate::{
11 BoundVar, Canonical, DebruijnIndex, GenericPredicate, InEnvironment, InferTy, Substs, Ty, 11 BoundVar, Canonical, DebruijnIndex, GenericPredicate, InEnvironment, InferTy, Substs, Ty,
12 TypeCtor, TypeWalk, 12 TyKind, TypeCtor, TypeWalk,
13}; 13};
14 14
15impl<'a> InferenceContext<'a> { 15impl<'a> InferenceContext<'a> {
@@ -86,10 +86,20 @@ where
86 } 86 }
87 87
88 fn into_canonicalized<T>(self, result: T) -> Canonicalized<T> { 88 fn into_canonicalized<T>(self, result: T) -> Canonicalized<T> {
89 Canonicalized { 89 let kinds = self
90 value: Canonical { value: result, num_vars: self.free_vars.len() }, 90 .free_vars
91 free_vars: self.free_vars, 91 .iter()
92 } 92 .map(|v| match v {
93 // mapping MaybeNeverTypeVar to the same kind as general ones
94 // should be fine, because as opposed to int or float type vars,
95 // they don't restrict what kind of type can go into them, they
96 // just affect fallback.
97 InferTy::TypeVar(_) | InferTy::MaybeNeverTypeVar(_) => TyKind::General,
98 InferTy::IntVar(_) => TyKind::Integer,
99 InferTy::FloatVar(_) => TyKind::Float,
100 })
101 .collect();
102 Canonicalized { value: Canonical { value: result, kinds }, free_vars: self.free_vars }
93 } 103 }
94 104
95 pub(crate) fn canonicalize_ty(mut self, ty: Ty) -> Canonicalized<Ty> { 105 pub(crate) fn canonicalize_ty(mut self, ty: Ty) -> Canonicalized<Ty> {
@@ -131,26 +141,41 @@ impl<T> Canonicalized<T> {
131 ty 141 ty
132 } 142 }
133 143
134 pub fn apply_solution(&self, ctx: &mut InferenceContext<'_>, solution: Canonical<Vec<Ty>>) { 144 pub fn apply_solution(&self, ctx: &mut InferenceContext<'_>, solution: Canonical<Substs>) {
135 // the solution may contain new variables, which we need to convert to new inference vars 145 // the solution may contain new variables, which we need to convert to new inference vars
136 let new_vars = Substs((0..solution.num_vars).map(|_| ctx.table.new_type_var()).collect()); 146 let new_vars = Substs(
147 solution
148 .kinds
149 .iter()
150 .map(|k| match k {
151 TyKind::General => ctx.table.new_type_var(),
152 TyKind::Integer => ctx.table.new_integer_var(),
153 TyKind::Float => ctx.table.new_float_var(),
154 })
155 .collect(),
156 );
137 for (i, ty) in solution.value.into_iter().enumerate() { 157 for (i, ty) in solution.value.into_iter().enumerate() {
138 let var = self.free_vars[i]; 158 let var = self.free_vars[i];
139 // eagerly replace projections in the type; we may be getting types 159 // eagerly replace projections in the type; we may be getting types
140 // e.g. from where clauses where this hasn't happened yet 160 // e.g. from where clauses where this hasn't happened yet
141 let ty = ctx.normalize_associated_types_in(ty.subst_bound_vars(&new_vars)); 161 let ty = ctx.normalize_associated_types_in(ty.clone().subst_bound_vars(&new_vars));
142 ctx.table.unify(&Ty::Infer(var), &ty); 162 ctx.table.unify(&Ty::Infer(var), &ty);
143 } 163 }
144 } 164 }
145} 165}
146 166
147pub fn unify(ty1: &Canonical<Ty>, ty2: &Canonical<Ty>) -> Option<Substs> { 167pub fn unify(tys: &Canonical<(Ty, Ty)>) -> Option<Substs> {
148 let mut table = InferenceTable::new(); 168 let mut table = InferenceTable::new();
149 let num_vars = ty1.num_vars.max(ty2.num_vars); 169 let vars = Substs(
150 let vars = 170 tys.kinds
151 Substs::builder(num_vars).fill(std::iter::repeat_with(|| table.new_type_var())).build(); 171 .iter()
152 let ty1_with_vars = ty1.value.clone().subst_bound_vars(&vars); 172 // we always use type vars here because we want everything to
153 let ty2_with_vars = ty2.value.clone().subst_bound_vars(&vars); 173 // fallback to Unknown in the end (kind of hacky, as below)
174 .map(|_| table.new_type_var())
175 .collect(),
176 );
177 let ty1_with_vars = tys.value.0.clone().subst_bound_vars(&vars);
178 let ty2_with_vars = tys.value.1.clone().subst_bound_vars(&vars);
154 if !table.unify(&ty1_with_vars, &ty2_with_vars) { 179 if !table.unify(&ty1_with_vars, &ty2_with_vars) {
155 return None; 180 return None;
156 } 181 }
@@ -162,7 +187,7 @@ pub fn unify(ty1: &Canonical<Ty>, ty2: &Canonical<Ty>) -> Option<Substs> {
162 } 187 }
163 } 188 }
164 Some( 189 Some(
165 Substs::builder(ty1.num_vars) 190 Substs::builder(tys.kinds.len())
166 .fill(vars.iter().map(|v| table.resolve_ty_completely(v.clone()))) 191 .fill(vars.iter().map(|v| table.resolve_ty_completely(v.clone())))
167 .build(), 192 .build(),
168 ) 193 )
diff --git a/crates/ra_hir_ty/src/lib.rs b/crates/ra_hir_ty/src/lib.rs
index c9513b752..7f3f5e771 100644
--- a/crates/ra_hir_ty/src/lib.rs
+++ b/crates/ra_hir_ty/src/lib.rs
@@ -662,13 +662,27 @@ impl TypeWalk for GenericPredicate {
662 662
663/// Basically a claim (currently not validated / checked) that the contained 663/// Basically a claim (currently not validated / checked) that the contained
664/// type / trait ref contains no inference variables; any inference variables it 664/// type / trait ref contains no inference variables; any inference variables it
665/// contained have been replaced by bound variables, and `num_vars` tells us how 665/// contained have been replaced by bound variables, and `kinds` tells us how
666/// many there are. This is used to erase irrelevant differences between types 666/// many there are and whether they were normal or float/int variables. This is
667/// before using them in queries. 667/// used to erase irrelevant differences between types before using them in
668/// queries.
668#[derive(Debug, Clone, PartialEq, Eq, Hash)] 669#[derive(Debug, Clone, PartialEq, Eq, Hash)]
669pub struct Canonical<T> { 670pub struct Canonical<T> {
670 pub value: T, 671 pub value: T,
671 pub num_vars: usize, 672 pub kinds: Arc<[TyKind]>,
673}
674
675impl<T> Canonical<T> {
676 pub fn new(value: T, kinds: impl IntoIterator<Item = TyKind>) -> Self {
677 Self { value, kinds: kinds.into_iter().collect() }
678 }
679}
680
681#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
682pub enum TyKind {
683 General,
684 Integer,
685 Float,
672} 686}
673 687
674/// A function signature as seen by type inference: Several parameter types and 688/// A function signature as seen by type inference: Several parameter types and
diff --git a/crates/ra_hir_ty/src/method_resolution.rs b/crates/ra_hir_ty/src/method_resolution.rs
index c19519cf1..a45febbf7 100644
--- a/crates/ra_hir_ty/src/method_resolution.rs
+++ b/crates/ra_hir_ty/src/method_resolution.rs
@@ -2,7 +2,7 @@
2//! For details about how this works in rustc, see the method lookup page in the 2//! For details about how this works in rustc, see the method lookup page in the
3//! [rustc guide](https://rust-lang.github.io/rustc-guide/method-lookup.html) 3//! [rustc guide](https://rust-lang.github.io/rustc-guide/method-lookup.html)
4//! and the corresponding code mostly in librustc_typeck/check/method/probe.rs. 4//! and the corresponding code mostly in librustc_typeck/check/method/probe.rs.
5use std::sync::Arc; 5use std::{iter, sync::Arc};
6 6
7use arrayvec::ArrayVec; 7use arrayvec::ArrayVec;
8use hir_def::{ 8use hir_def::{
@@ -17,7 +17,8 @@ use rustc_hash::{FxHashMap, FxHashSet};
17use super::Substs; 17use super::Substs;
18use crate::{ 18use crate::{
19 autoderef, db::HirDatabase, primitive::FloatBitness, utils::all_super_traits, ApplicationTy, 19 autoderef, db::HirDatabase, primitive::FloatBitness, utils::all_super_traits, ApplicationTy,
20 Canonical, DebruijnIndex, InEnvironment, TraitEnvironment, TraitRef, Ty, TypeCtor, TypeWalk, 20 Canonical, DebruijnIndex, InEnvironment, TraitEnvironment, TraitRef, Ty, TyKind, TypeCtor,
21 TypeWalk,
21}; 22};
22 23
23/// This is used as a key for indexing impls. 24/// This is used as a key for indexing impls.
@@ -38,136 +39,131 @@ impl TyFingerprint {
38 } 39 }
39} 40}
40 41
41/// A queryable and mergeable collection of impls. 42/// Trait impls defined or available in some crate.
42#[derive(Debug, PartialEq, Eq)] 43#[derive(Debug, Eq, PartialEq)]
43pub struct CrateImplDefs { 44pub struct TraitImpls {
44 inherent_impls: FxHashMap<TyFingerprint, Vec<ImplId>>, 45 // If the `Option<TyFingerprint>` is `None`, the impl may apply to any self type.
45 impls_by_trait: FxHashMap<TraitId, FxHashMap<Option<TyFingerprint>, Vec<ImplId>>>, 46 map: FxHashMap<TraitId, FxHashMap<Option<TyFingerprint>, Vec<ImplId>>>,
46} 47}
47 48
48impl CrateImplDefs { 49impl TraitImpls {
49 pub(crate) fn impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<CrateImplDefs> { 50 pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
50 let _p = profile("impls_in_crate_query"); 51 let _p = profile("trait_impls_in_crate_query");
51 let mut res = CrateImplDefs { 52 let mut impls = Self { map: FxHashMap::default() };
52 inherent_impls: FxHashMap::default(),
53 impls_by_trait: FxHashMap::default(),
54 };
55 res.fill(db, krate);
56 53
57 Arc::new(res) 54 let crate_def_map = db.crate_def_map(krate);
55 for (_module_id, module_data) in crate_def_map.modules.iter() {
56 for impl_id in module_data.scope.impls() {
57 let target_trait = match db.impl_trait(impl_id) {
58 Some(tr) => tr.value.trait_,
59 None => continue,
60 };
61 let self_ty = db.impl_self_ty(impl_id);
62 let self_ty_fp = TyFingerprint::for_impl(&self_ty.value);
63 impls
64 .map
65 .entry(target_trait)
66 .or_default()
67 .entry(self_ty_fp)
68 .or_default()
69 .push(impl_id);
70 }
71 }
72
73 Arc::new(impls)
58 } 74 }
59 75
60 /// Collects all impls from transitive dependencies of `krate` that may be used by `krate`. 76 pub(crate) fn trait_impls_in_deps_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
61 /// 77 let _p = profile("trait_impls_in_deps_query");
62 /// The full set of impls that can be used by `krate` is the returned map plus all the impls
63 /// from `krate` itself.
64 pub(crate) fn impls_from_deps_query(
65 db: &dyn HirDatabase,
66 krate: CrateId,
67 ) -> Arc<CrateImplDefs> {
68 let _p = profile("impls_from_deps_query");
69 let crate_graph = db.crate_graph(); 78 let crate_graph = db.crate_graph();
70 let mut res = CrateImplDefs { 79 let mut res = Self { map: FxHashMap::default() };
71 inherent_impls: FxHashMap::default(),
72 impls_by_trait: FxHashMap::default(),
73 };
74 80
75 // For each dependency, calculate `impls_from_deps` recursively, then add its own 81 for krate in crate_graph.transitive_deps(krate) {
76 // `impls_in_crate`. 82 res.merge(&db.trait_impls_in_crate(krate));
77 // As we might visit crates multiple times, `merge` has to deduplicate impls to avoid
78 // wasting memory.
79 for dep in &crate_graph[krate].dependencies {
80 res.merge(&db.impls_from_deps(dep.crate_id));
81 res.merge(&db.impls_in_crate(dep.crate_id));
82 } 83 }
83 84
84 Arc::new(res) 85 Arc::new(res)
85 } 86 }
86 87
87 fn fill(&mut self, db: &dyn HirDatabase, krate: CrateId) {
88 let crate_def_map = db.crate_def_map(krate);
89 for (_module_id, module_data) in crate_def_map.modules.iter() {
90 for impl_id in module_data.scope.impls() {
91 match db.impl_trait(impl_id) {
92 Some(tr) => {
93 let self_ty = db.impl_self_ty(impl_id);
94 let self_ty_fp = TyFingerprint::for_impl(&self_ty.value);
95 self.impls_by_trait
96 .entry(tr.value.trait_)
97 .or_default()
98 .entry(self_ty_fp)
99 .or_default()
100 .push(impl_id);
101 }
102 None => {
103 let self_ty = db.impl_self_ty(impl_id);
104 if let Some(self_ty_fp) = TyFingerprint::for_impl(&self_ty.value) {
105 self.inherent_impls.entry(self_ty_fp).or_default().push(impl_id);
106 }
107 }
108 }
109 }
110 }
111 }
112
113 fn merge(&mut self, other: &Self) { 88 fn merge(&mut self, other: &Self) {
114 for (fp, impls) in &other.inherent_impls { 89 for (trait_, other_map) in &other.map {
115 let vec = self.inherent_impls.entry(*fp).or_default(); 90 let map = self.map.entry(*trait_).or_default();
116 vec.extend(impls);
117 vec.sort();
118 vec.dedup();
119 }
120
121 for (trait_, other_map) in &other.impls_by_trait {
122 let map = self.impls_by_trait.entry(*trait_).or_default();
123 for (fp, impls) in other_map { 91 for (fp, impls) in other_map {
124 let vec = map.entry(*fp).or_default(); 92 let vec = map.entry(*fp).or_default();
125 vec.extend(impls); 93 vec.extend(impls);
126 vec.sort();
127 vec.dedup();
128 } 94 }
129 } 95 }
130 } 96 }
131 97
132 pub fn lookup_impl_defs(&self, ty: &Ty) -> impl Iterator<Item = ImplId> + '_ { 98 /// Queries all impls of the given trait.
133 let fingerprint = TyFingerprint::for_impl(ty); 99 pub fn for_trait(&self, trait_: TraitId) -> impl Iterator<Item = ImplId> + '_ {
134 fingerprint.and_then(|f| self.inherent_impls.get(&f)).into_iter().flatten().copied() 100 self.map
135 } 101 .get(&trait_)
136
137 pub fn lookup_impl_defs_for_trait(&self, tr: TraitId) -> impl Iterator<Item = ImplId> + '_ {
138 self.impls_by_trait
139 .get(&tr)
140 .into_iter() 102 .into_iter()
141 .flat_map(|m| m.values().flat_map(|v| v.iter().copied())) 103 .flat_map(|map| map.values().flat_map(|v| v.iter().copied()))
142 } 104 }
143 105
144 pub fn lookup_impl_defs_for_trait_and_ty( 106 /// Queries all impls of `trait_` that may apply to `self_ty`.
107 pub fn for_trait_and_self_ty(
145 &self, 108 &self,
146 tr: TraitId, 109 trait_: TraitId,
147 fp: TyFingerprint, 110 self_ty: TyFingerprint,
148 ) -> impl Iterator<Item = ImplId> + '_ { 111 ) -> impl Iterator<Item = ImplId> + '_ {
149 self.impls_by_trait 112 self.map
150 .get(&tr) 113 .get(&trait_)
151 .and_then(|m| m.get(&Some(fp)))
152 .into_iter() 114 .into_iter()
153 .flatten() 115 .flat_map(move |map| map.get(&None).into_iter().chain(map.get(&Some(self_ty))))
154 .copied() 116 .flat_map(|v| v.iter().copied())
155 .chain( 117 }
156 self.impls_by_trait 118
157 .get(&tr) 119 pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
158 .and_then(|m| m.get(&None)) 120 self.map.values().flat_map(|map| map.values().flat_map(|v| v.iter().copied()))
159 .into_iter() 121 }
160 .flatten() 122}
161 .copied(), 123
162 ) 124/// Inherent impls defined in some crate.
125///
126/// Inherent impls can only be defined in the crate that also defines the self type of the impl
127/// (note that some primitives are considered to be defined by both libcore and liballoc).
128///
129/// This makes inherent impl lookup easier than trait impl lookup since we only have to consider a
130/// single crate.
131#[derive(Debug, Eq, PartialEq)]
132pub struct InherentImpls {
133 map: FxHashMap<TyFingerprint, Vec<ImplId>>,
134}
135
136impl InherentImpls {
137 pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
138 let mut map: FxHashMap<_, Vec<_>> = FxHashMap::default();
139
140 let crate_def_map = db.crate_def_map(krate);
141 for (_module_id, module_data) in crate_def_map.modules.iter() {
142 for impl_id in module_data.scope.impls() {
143 let data = db.impl_data(impl_id);
144 if data.target_trait.is_some() {
145 continue;
146 }
147
148 let self_ty = db.impl_self_ty(impl_id);
149 if let Some(fp) = TyFingerprint::for_impl(&self_ty.value) {
150 map.entry(fp).or_default().push(impl_id);
151 }
152 }
153 }
154
155 Arc::new(Self { map })
156 }
157
158 pub fn for_self_ty(&self, self_ty: &Ty) -> &[ImplId] {
159 match TyFingerprint::for_impl(self_ty) {
160 Some(fp) => self.map.get(&fp).map(|vec| vec.as_ref()).unwrap_or(&[]),
161 None => &[],
162 }
163 } 163 }
164 164
165 pub fn all_impls<'a>(&'a self) -> impl Iterator<Item = ImplId> + 'a { 165 pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
166 self.inherent_impls 166 self.map.values().flat_map(|v| v.iter().copied())
167 .values()
168 .chain(self.impls_by_trait.values().flat_map(|m| m.values()))
169 .flatten()
170 .copied()
171 } 167 }
172} 168}
173 169
@@ -377,7 +373,7 @@ fn iterate_method_candidates_with_autoref(
377 return true; 373 return true;
378 } 374 }
379 let refed = Canonical { 375 let refed = Canonical {
380 num_vars: deref_chain[0].num_vars, 376 kinds: deref_chain[0].kinds.clone(),
381 value: Ty::apply_one(TypeCtor::Ref(Mutability::Shared), deref_chain[0].value.clone()), 377 value: Ty::apply_one(TypeCtor::Ref(Mutability::Shared), deref_chain[0].value.clone()),
382 }; 378 };
383 if iterate_method_candidates_by_receiver( 379 if iterate_method_candidates_by_receiver(
@@ -393,7 +389,7 @@ fn iterate_method_candidates_with_autoref(
393 return true; 389 return true;
394 } 390 }
395 let ref_muted = Canonical { 391 let ref_muted = Canonical {
396 num_vars: deref_chain[0].num_vars, 392 kinds: deref_chain[0].kinds.clone(),
397 value: Ty::apply_one(TypeCtor::Ref(Mutability::Mut), deref_chain[0].value.clone()), 393 value: Ty::apply_one(TypeCtor::Ref(Mutability::Mut), deref_chain[0].value.clone()),
398 }; 394 };
399 if iterate_method_candidates_by_receiver( 395 if iterate_method_candidates_by_receiver(
@@ -524,9 +520,9 @@ fn iterate_inherent_methods(
524 None => return false, 520 None => return false,
525 }; 521 };
526 for krate in def_crates { 522 for krate in def_crates {
527 let impls = db.impls_in_crate(krate); 523 let impls = db.inherent_impls_in_crate(krate);
528 524
529 for impl_def in impls.lookup_impl_defs(&self_ty.value) { 525 for &impl_def in impls.for_self_ty(&self_ty.value) {
530 for &item in db.impl_data(impl_def).items.iter() { 526 for &item in db.impl_data(impl_def).items.iter() {
531 if !is_valid_candidate(db, name, receiver_ty, item, self_ty) { 527 if !is_valid_candidate(db, name, receiver_ty, item, self_ty) {
532 continue; 528 continue;
@@ -612,18 +608,19 @@ pub(crate) fn inherent_impl_substs(
612 // we create a var for each type parameter of the impl; we need to keep in 608 // we create a var for each type parameter of the impl; we need to keep in
613 // mind here that `self_ty` might have vars of its own 609 // mind here that `self_ty` might have vars of its own
614 let vars = Substs::build_for_def(db, impl_id) 610 let vars = Substs::build_for_def(db, impl_id)
615 .fill_with_bound_vars(DebruijnIndex::INNERMOST, self_ty.num_vars) 611 .fill_with_bound_vars(DebruijnIndex::INNERMOST, self_ty.kinds.len())
616 .build(); 612 .build();
617 let self_ty_with_vars = db.impl_self_ty(impl_id).subst(&vars); 613 let self_ty_with_vars = db.impl_self_ty(impl_id).subst(&vars);
618 let self_ty_with_vars = 614 let mut kinds = self_ty.kinds.to_vec();
619 Canonical { num_vars: vars.len() + self_ty.num_vars, value: self_ty_with_vars }; 615 kinds.extend(iter::repeat(TyKind::General).take(vars.len()));
620 let substs = super::infer::unify(&self_ty_with_vars, self_ty); 616 let tys = Canonical { kinds: kinds.into(), value: (self_ty_with_vars, self_ty.value.clone()) };
617 let substs = super::infer::unify(&tys);
621 // We only want the substs for the vars we added, not the ones from self_ty. 618 // We only want the substs for the vars we added, not the ones from self_ty.
622 // Also, if any of the vars we added are still in there, we replace them by 619 // Also, if any of the vars we added are still in there, we replace them by
623 // Unknown. I think this can only really happen if self_ty contained 620 // Unknown. I think this can only really happen if self_ty contained
624 // Unknown, and in that case we want the result to contain Unknown in those 621 // Unknown, and in that case we want the result to contain Unknown in those
625 // places again. 622 // places again.
626 substs.map(|s| fallback_bound_vars(s.suffix(vars.len()), self_ty.num_vars)) 623 substs.map(|s| fallback_bound_vars(s.suffix(vars.len()), self_ty.kinds.len()))
627} 624}
628 625
629/// This replaces any 'free' Bound vars in `s` (i.e. those with indices past 626/// This replaces any 'free' Bound vars in `s` (i.e. those with indices past
@@ -683,15 +680,15 @@ fn generic_implements_goal(
683 trait_: TraitId, 680 trait_: TraitId,
684 self_ty: Canonical<Ty>, 681 self_ty: Canonical<Ty>,
685) -> Canonical<InEnvironment<super::Obligation>> { 682) -> Canonical<InEnvironment<super::Obligation>> {
686 let num_vars = self_ty.num_vars; 683 let mut kinds = self_ty.kinds.to_vec();
687 let substs = super::Substs::build_for_def(db, trait_) 684 let substs = super::Substs::build_for_def(db, trait_)
688 .push(self_ty.value) 685 .push(self_ty.value)
689 .fill_with_bound_vars(DebruijnIndex::INNERMOST, num_vars) 686 .fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len())
690 .build(); 687 .build();
691 let num_vars = substs.len() - 1 + self_ty.num_vars; 688 kinds.extend(iter::repeat(TyKind::General).take(substs.len() - 1));
692 let trait_ref = TraitRef { trait_, substs }; 689 let trait_ref = TraitRef { trait_, substs };
693 let obligation = super::Obligation::Trait(trait_ref); 690 let obligation = super::Obligation::Trait(trait_ref);
694 Canonical { num_vars, value: InEnvironment::new(env, obligation) } 691 Canonical { kinds: kinds.into(), value: InEnvironment::new(env, obligation) }
695} 692}
696 693
697fn autoderef_method_receiver( 694fn autoderef_method_receiver(
@@ -704,9 +701,9 @@ fn autoderef_method_receiver(
704 if let Some(Ty::Apply(ApplicationTy { ctor: TypeCtor::Array, parameters })) = 701 if let Some(Ty::Apply(ApplicationTy { ctor: TypeCtor::Array, parameters })) =
705 deref_chain.last().map(|ty| &ty.value) 702 deref_chain.last().map(|ty| &ty.value)
706 { 703 {
707 let num_vars = deref_chain.last().unwrap().num_vars; 704 let kinds = deref_chain.last().unwrap().kinds.clone();
708 let unsized_ty = Ty::apply(TypeCtor::Slice, parameters.clone()); 705 let unsized_ty = Ty::apply(TypeCtor::Slice, parameters.clone());
709 deref_chain.push(Canonical { value: unsized_ty, num_vars }) 706 deref_chain.push(Canonical { value: unsized_ty, kinds })
710 } 707 }
711 deref_chain 708 deref_chain
712} 709}
diff --git a/crates/ra_hir_ty/src/tests.rs b/crates/ra_hir_ty/src/tests.rs
index 9084c3bed..eeac34d14 100644
--- a/crates/ra_hir_ty/src/tests.rs
+++ b/crates/ra_hir_ty/src/tests.rs
@@ -508,6 +508,30 @@ fn no_such_field_with_feature_flag_diagnostics_on_struct_fields() {
508} 508}
509 509
510#[test] 510#[test]
511fn no_such_field_with_type_macro() {
512 let diagnostics = TestDB::with_files(
513 r"
514 macro_rules! Type {
515 () => { u32 };
516 }
517
518 struct Foo {
519 bar: Type![],
520 }
521 impl Foo {
522 fn new() -> Self {
523 Foo { bar: 0 }
524 }
525 }
526 ",
527 )
528 .diagnostics()
529 .0;
530
531 assert_snapshot!(diagnostics, @r###""###);
532}
533
534#[test]
511fn missing_record_pat_field_diagnostic() { 535fn missing_record_pat_field_diagnostic() {
512 let diagnostics = TestDB::with_files( 536 let diagnostics = TestDB::with_files(
513 r" 537 r"
diff --git a/crates/ra_hir_ty/src/tests/traits.rs b/crates/ra_hir_ty/src/tests/traits.rs
index 01c919a7e..766790576 100644
--- a/crates/ra_hir_ty/src/tests/traits.rs
+++ b/crates/ra_hir_ty/src/tests/traits.rs
@@ -3029,3 +3029,21 @@ fn infer_dyn_fn_output() {
3029 "### 3029 "###
3030 ); 3030 );
3031} 3031}
3032
3033#[test]
3034fn variable_kinds() {
3035 check_types(
3036 r#"
3037trait Trait<T> { fn get(self, t: T) -> T; }
3038struct S;
3039impl Trait<u128> for S {}
3040impl Trait<f32> for S {}
3041fn test() {
3042 S.get(1);
3043 //^^^^^^^^ u128
3044 S.get(1.);
3045 //^^^^^^^^ f32
3046}
3047 "#,
3048 );
3049}
diff --git a/crates/ra_hir_ty/src/traits.rs b/crates/ra_hir_ty/src/traits.rs
index 6f43c3a22..2a6d7faef 100644
--- a/crates/ra_hir_ty/src/traits.rs
+++ b/crates/ra_hir_ty/src/traits.rs
@@ -1,5 +1,5 @@
1//! Trait solving using Chalk. 1//! Trait solving using Chalk.
2use std::{panic, sync::Arc}; 2use std::sync::Arc;
3 3
4use chalk_ir::cast::Cast; 4use chalk_ir::cast::Cast;
5use hir_def::{ 5use hir_def::{
@@ -8,7 +8,7 @@ use hir_def::{
8use ra_db::{impl_intern_key, salsa, CrateId}; 8use ra_db::{impl_intern_key, salsa, CrateId};
9use ra_prof::profile; 9use ra_prof::profile;
10 10
11use crate::{db::HirDatabase, DebruijnIndex}; 11use crate::{db::HirDatabase, DebruijnIndex, Substs};
12 12
13use super::{Canonical, GenericPredicate, HirDisplay, ProjectionTy, TraitRef, Ty, TypeWalk}; 13use super::{Canonical, GenericPredicate, HirDisplay, ProjectionTy, TraitRef, Ty, TypeWalk};
14 14
@@ -190,15 +190,7 @@ fn solution_from_chalk(
190 solution: chalk_solve::Solution<Interner>, 190 solution: chalk_solve::Solution<Interner>,
191) -> Solution { 191) -> Solution {
192 let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution<Interner>>| { 192 let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution<Interner>>| {
193 let value = subst 193 let result = from_chalk(db, subst);
194 .value
195 .iter(&Interner)
196 .map(|p| match p.ty(&Interner) {
197 Some(ty) => from_chalk(db, ty.clone()),
198 None => unimplemented!(),
199 })
200 .collect();
201 let result = Canonical { value, num_vars: subst.binders.len(&Interner) };
202 SolutionVariables(result) 194 SolutionVariables(result)
203 }; 195 };
204 match solution { 196 match solution {
@@ -222,7 +214,7 @@ fn solution_from_chalk(
222} 214}
223 215
224#[derive(Clone, Debug, PartialEq, Eq)] 216#[derive(Clone, Debug, PartialEq, Eq)]
225pub struct SolutionVariables(pub Canonical<Vec<Ty>>); 217pub struct SolutionVariables(pub Canonical<Substs>);
226 218
227#[derive(Clone, Debug, PartialEq, Eq)] 219#[derive(Clone, Debug, PartialEq, Eq)]
228/// A (possible) solution for a proposed goal. 220/// A (possible) solution for a proposed goal.
diff --git a/crates/ra_hir_ty/src/traits/chalk.rs b/crates/ra_hir_ty/src/traits/chalk.rs
index 8ef4941c0..c97b81d57 100644
--- a/crates/ra_hir_ty/src/traits/chalk.rs
+++ b/crates/ra_hir_ty/src/traits/chalk.rs
@@ -77,8 +77,8 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
77 // Note: Since we're using impls_for_trait, only impls where the trait 77 // Note: Since we're using impls_for_trait, only impls where the trait
78 // can be resolved should ever reach Chalk. `impl_datum` relies on that 78 // can be resolved should ever reach Chalk. `impl_datum` relies on that
79 // and will panic if the trait can't be resolved. 79 // and will panic if the trait can't be resolved.
80 let in_deps = self.db.impls_from_deps(self.krate); 80 let in_deps = self.db.trait_impls_in_deps(self.krate);
81 let in_self = self.db.impls_in_crate(self.krate); 81 let in_self = self.db.trait_impls_in_crate(self.krate);
82 let impl_maps = [in_deps, in_self]; 82 let impl_maps = [in_deps, in_self];
83 83
84 let id_to_chalk = |id: hir_def::ImplId| Impl::ImplDef(id).to_chalk(self.db); 84 let id_to_chalk = |id: hir_def::ImplId| Impl::ImplDef(id).to_chalk(self.db);
@@ -87,14 +87,12 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
87 Some(fp) => impl_maps 87 Some(fp) => impl_maps
88 .iter() 88 .iter()
89 .flat_map(|crate_impl_defs| { 89 .flat_map(|crate_impl_defs| {
90 crate_impl_defs.lookup_impl_defs_for_trait_and_ty(trait_, fp).map(id_to_chalk) 90 crate_impl_defs.for_trait_and_self_ty(trait_, fp).map(id_to_chalk)
91 }) 91 })
92 .collect(), 92 .collect(),
93 None => impl_maps 93 None => impl_maps
94 .iter() 94 .iter()
95 .flat_map(|crate_impl_defs| { 95 .flat_map(|crate_impl_defs| crate_impl_defs.for_trait(trait_).map(id_to_chalk))
96 crate_impl_defs.lookup_impl_defs_for_trait(trait_).map(id_to_chalk)
97 })
98 .collect(), 96 .collect(),
99 }; 97 };
100 98
diff --git a/crates/ra_hir_ty/src/traits/chalk/mapping.rs b/crates/ra_hir_ty/src/traits/chalk/mapping.rs
index ac82ea831..433d6aa03 100644
--- a/crates/ra_hir_ty/src/traits/chalk/mapping.rs
+++ b/crates/ra_hir_ty/src/traits/chalk/mapping.rs
@@ -17,7 +17,7 @@ use crate::{
17 primitive::{FloatBitness, FloatTy, IntBitness, IntTy, Signedness}, 17 primitive::{FloatBitness, FloatTy, IntBitness, IntTy, Signedness},
18 traits::{builtin, AssocTyValue, Canonical, Impl, Obligation}, 18 traits::{builtin, AssocTyValue, Canonical, Impl, Obligation},
19 ApplicationTy, CallableDef, GenericPredicate, InEnvironment, OpaqueTy, OpaqueTyId, 19 ApplicationTy, CallableDef, GenericPredicate, InEnvironment, OpaqueTy, OpaqueTyId,
20 ProjectionPredicate, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TypeCtor, 20 ProjectionPredicate, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TyKind, TypeCtor,
21}; 21};
22 22
23use super::interner::*; 23use super::interner::*;
@@ -555,22 +555,39 @@ where
555 type Chalk = chalk_ir::Canonical<T::Chalk>; 555 type Chalk = chalk_ir::Canonical<T::Chalk>;
556 556
557 fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Canonical<T::Chalk> { 557 fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Canonical<T::Chalk> {
558 let parameter = chalk_ir::CanonicalVarKind::new( 558 let kinds = self
559 chalk_ir::VariableKind::Ty(chalk_ir::TyKind::General), 559 .kinds
560 chalk_ir::UniverseIndex::ROOT, 560 .iter()
561 ); 561 .map(|k| match k {
562 TyKind::General => chalk_ir::TyKind::General,
563 TyKind::Integer => chalk_ir::TyKind::Integer,
564 TyKind::Float => chalk_ir::TyKind::Float,
565 })
566 .map(|tk| {
567 chalk_ir::CanonicalVarKind::new(
568 chalk_ir::VariableKind::Ty(tk),
569 chalk_ir::UniverseIndex::ROOT,
570 )
571 });
562 let value = self.value.to_chalk(db); 572 let value = self.value.to_chalk(db);
563 chalk_ir::Canonical { 573 chalk_ir::Canonical { value, binders: chalk_ir::CanonicalVarKinds::from(&Interner, kinds) }
564 value,
565 binders: chalk_ir::CanonicalVarKinds::from(&Interner, vec![parameter; self.num_vars]),
566 }
567 } 574 }
568 575
569 fn from_chalk(db: &dyn HirDatabase, canonical: chalk_ir::Canonical<T::Chalk>) -> Canonical<T> { 576 fn from_chalk(db: &dyn HirDatabase, canonical: chalk_ir::Canonical<T::Chalk>) -> Canonical<T> {
570 Canonical { 577 let kinds = canonical
571 num_vars: canonical.binders.len(&Interner), 578 .binders
572 value: from_chalk(db, canonical.value), 579 .iter(&Interner)
573 } 580 .map(|k| match k.kind {
581 chalk_ir::VariableKind::Ty(tk) => match tk {
582 chalk_ir::TyKind::General => TyKind::General,
583 chalk_ir::TyKind::Integer => TyKind::Integer,
584 chalk_ir::TyKind::Float => TyKind::Float,
585 },
586 chalk_ir::VariableKind::Lifetime => panic!("unexpected lifetime from Chalk"),
587 chalk_ir::VariableKind::Const(_) => panic!("unexpected const from Chalk"),
588 })
589 .collect();
590 Canonical { kinds, value: from_chalk(db, canonical.value) }
574 } 591 }
575} 592}
576 593
diff --git a/crates/ra_ide/src/call_hierarchy.rs b/crates/ra_ide/src/call_hierarchy.rs
index 1e3a31602..884353808 100644
--- a/crates/ra_ide/src/call_hierarchy.rs
+++ b/crates/ra_ide/src/call_hierarchy.rs
@@ -39,10 +39,11 @@ pub(crate) fn call_hierarchy(
39 39
40pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> { 40pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> {
41 let sema = Semantics::new(db); 41 let sema = Semantics::new(db);
42
42 // 1. Find all refs 43 // 1. Find all refs
43 // 2. Loop through refs and determine unique fndef. This will become our `from: CallHierarchyItem,` in the reply. 44 // 2. Loop through refs and determine unique fndef. This will become our `from: CallHierarchyItem,` in the reply.
44 // 3. Add ranges relative to the start of the fndef. 45 // 3. Add ranges relative to the start of the fndef.
45 let refs = references::find_all_refs(db, position, None)?; 46 let refs = references::find_all_refs(&sema, position, None)?;
46 47
47 let mut calls = CallLocations::default(); 48 let mut calls = CallLocations::default();
48 49
@@ -355,4 +356,41 @@ fn caller3() {
355 &["caller3 FN_DEF FileId(1) 66..83 69..76 : [52..59]"], 356 &["caller3 FN_DEF FileId(1) 66..83 69..76 : [52..59]"],
356 ); 357 );
357 } 358 }
359
360 #[test]
361 fn test_call_hierarchy_issue_5103() {
362 check_hierarchy(
363 r#"
364fn a() {
365 b()
366}
367
368fn b() {}
369
370fn main() {
371 a<|>()
372}
373"#,
374 "a FN_DEF FileId(1) 0..18 3..4",
375 &["main FN_DEF FileId(1) 31..52 34..38 : [47..48]"],
376 &["b FN_DEF FileId(1) 20..29 23..24 : [13..14]"],
377 );
378
379 check_hierarchy(
380 r#"
381fn a() {
382 b<|>()
383}
384
385fn b() {}
386
387fn main() {
388 a()
389}
390"#,
391 "b FN_DEF FileId(1) 20..29 23..24",
392 &["a FN_DEF FileId(1) 0..18 3..4 : [13..14]"],
393 &[],
394 );
395 }
358} 396}
diff --git a/crates/ra_ide/src/completion.rs b/crates/ra_ide/src/completion.rs
index e1fcf379d..9ebb8ebb7 100644
--- a/crates/ra_ide/src/completion.rs
+++ b/crates/ra_ide/src/completion.rs
@@ -63,11 +63,11 @@ pub use crate::completion::{
63// There also snippet completions: 63// There also snippet completions:
64// 64//
65// .Expressions 65// .Expressions
66// - `pd` -> `println!("{:?}")` 66// - `pd` -> `eprintln!(" = {:?}", );")`
67// - `ppd` -> `println!("{:#?}")` 67// - `ppd` -> `eprintln!(" = {:#?}", );`
68// 68//
69// .Items 69// .Items
70// - `tfn` -> `#[test] fn f(){}` 70// - `tfn` -> `#[test] fn feature(){}`
71// - `tmod` -> 71// - `tmod` ->
72// ```rust 72// ```rust
73// #[cfg(test)] 73// #[cfg(test)]
@@ -75,7 +75,7 @@ pub use crate::completion::{
75// use super::*; 75// use super::*;
76// 76//
77// #[test] 77// #[test]
78// fn test_fn() {} 78// fn test_name() {}
79// } 79// }
80// ``` 80// ```
81 81
@@ -137,8 +137,8 @@ mod tests {
137 documentation: &'a str, 137 documentation: &'a str,
138 } 138 }
139 139
140 fn check_detail_and_documentation(fixture: &str, expected: DetailAndDocumentation) { 140 fn check_detail_and_documentation(ra_fixture: &str, expected: DetailAndDocumentation) {
141 let (analysis, position) = analysis_and_position(fixture); 141 let (analysis, position) = analysis_and_position(ra_fixture);
142 let config = CompletionConfig::default(); 142 let config = CompletionConfig::default();
143 let completions = analysis.completions(&config, position).unwrap().unwrap(); 143 let completions = analysis.completions(&config, position).unwrap().unwrap();
144 for item in completions { 144 for item in completions {
diff --git a/crates/ra_ide/src/completion/complete_keyword.rs b/crates/ra_ide/src/completion/complete_keyword.rs
index 3b174f916..e599cc3d1 100644
--- a/crates/ra_ide/src/completion/complete_keyword.rs
+++ b/crates/ra_ide/src/completion/complete_keyword.rs
@@ -1,6 +1,6 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use ra_syntax::ast; 3use ra_syntax::{ast, SyntaxKind};
4 4
5use crate::completion::{ 5use crate::completion::{
6 CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions, 6 CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions,
@@ -37,6 +37,10 @@ pub(super) fn complete_use_tree_keyword(acc: &mut Completions, ctx: &CompletionC
37} 37}
38 38
39pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionContext) { 39pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionContext) {
40 if ctx.token.kind() == SyntaxKind::COMMENT {
41 return;
42 }
43
40 let has_trait_or_impl_parent = ctx.has_impl_parent || ctx.has_trait_parent; 44 let has_trait_or_impl_parent = ctx.has_impl_parent || ctx.has_trait_parent;
41 if ctx.trait_as_prev_sibling || ctx.impl_as_prev_sibling { 45 if ctx.trait_as_prev_sibling || ctx.impl_as_prev_sibling {
42 add_keyword(ctx, acc, "where", "where "); 46 add_keyword(ctx, acc, "where", "where ");
diff --git a/crates/ra_ide/src/completion/presentation.rs b/crates/ra_ide/src/completion/presentation.rs
index 4fdc2f0bb..b18279746 100644
--- a/crates/ra_ide/src/completion/presentation.rs
+++ b/crates/ra_ide/src/completion/presentation.rs
@@ -1516,4 +1516,54 @@ mod tests {
1516 "### 1516 "###
1517 ); 1517 );
1518 } 1518 }
1519
1520 #[test]
1521 fn no_keyword_autocompletion_on_line_comments() {
1522 assert_debug_snapshot!(
1523 do_completion(
1524 r"
1525 fn test() {
1526 let x = 2; // A comment<|>
1527 }
1528 ",
1529 CompletionKind::Keyword
1530 ),
1531 @r###"
1532 []
1533 "###
1534 );
1535 }
1536
1537 #[test]
1538 fn no_keyword_autocompletion_on_multi_line_comments() {
1539 assert_debug_snapshot!(
1540 do_completion(
1541 r"
1542 /*
1543 Some multi-line comment<|>
1544 */
1545 ",
1546 CompletionKind::Keyword
1547 ),
1548 @r###"
1549 []
1550 "###
1551 );
1552 }
1553
1554 #[test]
1555 fn no_keyword_autocompletion_on_doc_comments() {
1556 assert_debug_snapshot!(
1557 do_completion(
1558 r"
1559 /// Some doc comment
1560 /// let test<|> = 1
1561 ",
1562 CompletionKind::Keyword
1563 ),
1564 @r###"
1565 []
1566 "###
1567 );
1568 }
1519} 1569}
diff --git a/crates/ra_ide/src/diagnostics.rs b/crates/ra_ide/src/diagnostics.rs
index 05fb799d6..46f8c31c7 100644
--- a/crates/ra_ide/src/diagnostics.rs
+++ b/crates/ra_ide/src/diagnostics.rs
@@ -324,10 +324,10 @@ mod tests {
324 /// * a diagnostic is produced 324 /// * a diagnostic is produced
325 /// * this diagnostic touches the input cursor position 325 /// * this diagnostic touches the input cursor position
326 /// * that the contents of the file containing the cursor match `after` after the diagnostic fix is applied 326 /// * that the contents of the file containing the cursor match `after` after the diagnostic fix is applied
327 fn check_apply_diagnostic_fix_from_position(fixture: &str, after: &str) { 327 fn check_apply_diagnostic_fix_from_position(ra_fixture: &str, after: &str) {
328 let after = trim_indent(after); 328 let after = trim_indent(after);
329 329
330 let (analysis, file_position) = analysis_and_position(fixture); 330 let (analysis, file_position) = analysis_and_position(ra_fixture);
331 let diagnostic = analysis.diagnostics(file_position.file_id).unwrap().pop().unwrap(); 331 let diagnostic = analysis.diagnostics(file_position.file_id).unwrap().pop().unwrap();
332 let mut fix = diagnostic.fix.unwrap(); 332 let mut fix = diagnostic.fix.unwrap();
333 let edit = fix.source_change.source_file_edits.pop().unwrap().edit; 333 let edit = fix.source_change.source_file_edits.pop().unwrap().edit;
@@ -365,14 +365,14 @@ mod tests {
365 365
366 /// Takes a multi-file input fixture with annotated cursor position and checks that no diagnostics 366 /// Takes a multi-file input fixture with annotated cursor position and checks that no diagnostics
367 /// apply to the file containing the cursor. 367 /// apply to the file containing the cursor.
368 fn check_no_diagnostic_for_target_file(fixture: &str) { 368 fn check_no_diagnostic_for_target_file(ra_fixture: &str) {
369 let (analysis, file_position) = analysis_and_position(fixture); 369 let (analysis, file_position) = analysis_and_position(ra_fixture);
370 let diagnostics = analysis.diagnostics(file_position.file_id).unwrap(); 370 let diagnostics = analysis.diagnostics(file_position.file_id).unwrap();
371 assert_eq!(diagnostics.len(), 0); 371 assert_eq!(diagnostics.len(), 0);
372 } 372 }
373 373
374 fn check_no_diagnostic(content: &str) { 374 fn check_no_diagnostic(ra_fixture: &str) {
375 let (analysis, file_id) = single_file(content); 375 let (analysis, file_id) = single_file(ra_fixture);
376 let diagnostics = analysis.diagnostics(file_id).unwrap(); 376 let diagnostics = analysis.diagnostics(file_id).unwrap();
377 assert_eq!(diagnostics.len(), 0, "expected no diagnostic, found one"); 377 assert_eq!(diagnostics.len(), 0, "expected no diagnostic, found one");
378 } 378 }
@@ -473,7 +473,8 @@ mod tests {
473 473
474 #[test] 474 #[test]
475 fn test_wrap_return_type_not_applicable_when_expr_type_does_not_match_ok_type() { 475 fn test_wrap_return_type_not_applicable_when_expr_type_does_not_match_ok_type() {
476 let content = r#" 476 check_no_diagnostic_for_target_file(
477 r"
477 //- /main.rs 478 //- /main.rs
478 use core::result::Result::{self, Ok, Err}; 479 use core::result::Result::{self, Ok, Err};
479 480
@@ -485,13 +486,14 @@ mod tests {
485 pub mod result { 486 pub mod result {
486 pub enum Result<T, E> { Ok(T), Err(E) } 487 pub enum Result<T, E> { Ok(T), Err(E) }
487 } 488 }
488 "#; 489 ",
489 check_no_diagnostic_for_target_file(content); 490 );
490 } 491 }
491 492
492 #[test] 493 #[test]
493 fn test_wrap_return_type_not_applicable_when_return_type_is_not_result() { 494 fn test_wrap_return_type_not_applicable_when_return_type_is_not_result() {
494 let content = r#" 495 check_no_diagnostic_for_target_file(
496 r"
495 //- /main.rs 497 //- /main.rs
496 use core::result::Result::{self, Ok, Err}; 498 use core::result::Result::{self, Ok, Err};
497 499
@@ -508,8 +510,8 @@ mod tests {
508 pub mod result { 510 pub mod result {
509 pub enum Result<T, E> { Ok(T), Err(E) } 511 pub enum Result<T, E> { Ok(T), Err(E) }
510 } 512 }
511 "#; 513 ",
512 check_no_diagnostic_for_target_file(content); 514 );
513 } 515 }
514 516
515 #[test] 517 #[test]
@@ -618,7 +620,8 @@ mod tests {
618 620
619 #[test] 621 #[test]
620 fn test_fill_struct_fields_no_diagnostic() { 622 fn test_fill_struct_fields_no_diagnostic() {
621 let content = r" 623 check_no_diagnostic(
624 r"
622 struct TestStruct { 625 struct TestStruct {
623 one: i32, 626 one: i32,
624 two: i64, 627 two: i64,
@@ -628,14 +631,14 @@ mod tests {
628 let one = 1; 631 let one = 1;
629 let s = TestStruct{ one, two: 2 }; 632 let s = TestStruct{ one, two: 2 };
630 } 633 }
631 "; 634 ",
632 635 );
633 check_no_diagnostic(content);
634 } 636 }
635 637
636 #[test] 638 #[test]
637 fn test_fill_struct_fields_no_diagnostic_on_spread() { 639 fn test_fill_struct_fields_no_diagnostic_on_spread() {
638 let content = r" 640 check_no_diagnostic(
641 r"
639 struct TestStruct { 642 struct TestStruct {
640 one: i32, 643 one: i32,
641 two: i64, 644 two: i64,
@@ -645,9 +648,8 @@ mod tests {
645 let one = 1; 648 let one = 1;
646 let s = TestStruct{ ..a }; 649 let s = TestStruct{ ..a };
647 } 650 }
648 "; 651 ",
649 652 );
650 check_no_diagnostic(content);
651 } 653 }
652 654
653 #[test] 655 #[test]
diff --git a/crates/ra_ide/src/display/structure.rs b/crates/ra_ide/src/display/structure.rs
index aad5a8e4d..c22a5d17b 100644
--- a/crates/ra_ide/src/display/structure.rs
+++ b/crates/ra_ide/src/display/structure.rs
@@ -173,12 +173,19 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
173 173
174#[cfg(test)] 174#[cfg(test)]
175mod tests { 175mod tests {
176 use expect::{expect, Expect};
177
176 use super::*; 178 use super::*;
177 use insta::assert_debug_snapshot; 179
180 fn check(ra_fixture: &str, expect: Expect) {
181 let file = SourceFile::parse(ra_fixture).ok().unwrap();
182 let structure = file_structure(&file);
183 expect.assert_debug_eq(&structure)
184 }
178 185
179 #[test] 186 #[test]
180 fn test_file_structure() { 187 fn test_file_structure() {
181 let file = SourceFile::parse( 188 check(
182 r#" 189 r#"
183struct Foo { 190struct Foo {
184 x: i32 191 x: i32
@@ -223,216 +230,211 @@ fn obsolete() {}
223#[deprecated(note = "for awhile")] 230#[deprecated(note = "for awhile")]
224fn very_obsolete() {} 231fn very_obsolete() {}
225"#, 232"#,
226 ) 233 expect![[r#"
227 .ok() 234 [
228 .unwrap(); 235 StructureNode {
229 let structure = file_structure(&file); 236 parent: None,
230 assert_debug_snapshot!(structure, 237 label: "Foo",
231 @r###" 238 navigation_range: 8..11,
232 [ 239 node_range: 1..26,
233 StructureNode { 240 kind: STRUCT_DEF,
234 parent: None, 241 detail: None,
235 label: "Foo", 242 deprecated: false,
236 navigation_range: 8..11, 243 },
237 node_range: 1..26, 244 StructureNode {
238 kind: STRUCT_DEF, 245 parent: Some(
239 detail: None, 246 0,
240 deprecated: false, 247 ),
241 }, 248 label: "x",
242 StructureNode { 249 navigation_range: 18..19,
243 parent: Some( 250 node_range: 18..24,
244 0, 251 kind: RECORD_FIELD_DEF,
245 ), 252 detail: Some(
246 label: "x", 253 "i32",
247 navigation_range: 18..19, 254 ),
248 node_range: 18..24, 255 deprecated: false,
249 kind: RECORD_FIELD_DEF, 256 },
250 detail: Some( 257 StructureNode {
251 "i32", 258 parent: None,
252 ), 259 label: "m",
253 deprecated: false, 260 navigation_range: 32..33,
254 }, 261 node_range: 28..158,
255 StructureNode { 262 kind: MODULE,
256 parent: None, 263 detail: None,
257 label: "m", 264 deprecated: false,
258 navigation_range: 32..33, 265 },
259 node_range: 28..158, 266 StructureNode {
260 kind: MODULE, 267 parent: Some(
261 detail: None, 268 2,
262 deprecated: false, 269 ),
263 }, 270 label: "bar1",
264 StructureNode { 271 navigation_range: 43..47,
265 parent: Some( 272 node_range: 40..52,
266 2, 273 kind: FN_DEF,
267 ), 274 detail: Some(
268 label: "bar1", 275 "fn()",
269 navigation_range: 43..47, 276 ),
270 node_range: 40..52, 277 deprecated: false,
271 kind: FN_DEF, 278 },
272 detail: Some( 279 StructureNode {
273 "fn()", 280 parent: Some(
274 ), 281 2,
275 deprecated: false, 282 ),
276 }, 283 label: "bar2",
277 StructureNode { 284 navigation_range: 60..64,
278 parent: Some( 285 node_range: 57..81,
279 2, 286 kind: FN_DEF,
280 ), 287 detail: Some(
281 label: "bar2", 288 "fn<T>(t: T) -> T",
282 navigation_range: 60..64, 289 ),
283 node_range: 57..81, 290 deprecated: false,
284 kind: FN_DEF, 291 },
285 detail: Some( 292 StructureNode {
286 "fn<T>(t: T) -> T", 293 parent: Some(
287 ), 294 2,
288 deprecated: false, 295 ),
289 }, 296 label: "bar3",
290 StructureNode { 297 navigation_range: 89..93,
291 parent: Some( 298 node_range: 86..156,
292 2, 299 kind: FN_DEF,
293 ), 300 detail: Some(
294 label: "bar3", 301 "fn<A, B>(a: A, b: B) -> Vec< u32 >",
295 navigation_range: 89..93, 302 ),
296 node_range: 86..156, 303 deprecated: false,
297 kind: FN_DEF, 304 },
298 detail: Some( 305 StructureNode {
299 "fn<A, B>(a: A, b: B) -> Vec< u32 >", 306 parent: None,
300 ), 307 label: "E",
301 deprecated: false, 308 navigation_range: 165..166,
302 }, 309 node_range: 160..180,
303 StructureNode { 310 kind: ENUM_DEF,
304 parent: None, 311 detail: None,
305 label: "E", 312 deprecated: false,
306 navigation_range: 165..166, 313 },
307 node_range: 160..180, 314 StructureNode {
308 kind: ENUM_DEF, 315 parent: Some(
309 detail: None, 316 6,
310 deprecated: false, 317 ),
311 }, 318 label: "X",
312 StructureNode { 319 navigation_range: 169..170,
313 parent: Some( 320 node_range: 169..170,
314 6, 321 kind: ENUM_VARIANT,
315 ), 322 detail: None,
316 label: "X", 323 deprecated: false,
317 navigation_range: 169..170, 324 },
318 node_range: 169..170, 325 StructureNode {
319 kind: ENUM_VARIANT, 326 parent: Some(
320 detail: None, 327 6,
321 deprecated: false, 328 ),
322 }, 329 label: "Y",
323 StructureNode { 330 navigation_range: 172..173,
324 parent: Some( 331 node_range: 172..178,
325 6, 332 kind: ENUM_VARIANT,
326 ), 333 detail: None,
327 label: "Y", 334 deprecated: false,
328 navigation_range: 172..173, 335 },
329 node_range: 172..178, 336 StructureNode {
330 kind: ENUM_VARIANT, 337 parent: None,
331 detail: None, 338 label: "T",
332 deprecated: false, 339 navigation_range: 186..187,
333 }, 340 node_range: 181..193,
334 StructureNode { 341 kind: TYPE_ALIAS_DEF,
335 parent: None, 342 detail: Some(
336 label: "T", 343 "()",
337 navigation_range: 186..187, 344 ),
338 node_range: 181..193, 345 deprecated: false,
339 kind: TYPE_ALIAS_DEF, 346 },
340 detail: Some( 347 StructureNode {
341 "()", 348 parent: None,
342 ), 349 label: "S",
343 deprecated: false, 350 navigation_range: 201..202,
344 }, 351 node_range: 194..213,
345 StructureNode { 352 kind: STATIC_DEF,
346 parent: None, 353 detail: Some(
347 label: "S", 354 "i32",
348 navigation_range: 201..202, 355 ),
349 node_range: 194..213, 356 deprecated: false,
350 kind: STATIC_DEF, 357 },
351 detail: Some( 358 StructureNode {
352 "i32", 359 parent: None,
353 ), 360 label: "C",
354 deprecated: false, 361 navigation_range: 220..221,
355 }, 362 node_range: 214..232,
356 StructureNode { 363 kind: CONST_DEF,
357 parent: None, 364 detail: Some(
358 label: "C", 365 "i32",
359 navigation_range: 220..221, 366 ),
360 node_range: 214..232, 367 deprecated: false,
361 kind: CONST_DEF, 368 },
362 detail: Some( 369 StructureNode {
363 "i32", 370 parent: None,
364 ), 371 label: "impl E",
365 deprecated: false, 372 navigation_range: 239..240,
366 }, 373 node_range: 234..243,
367 StructureNode { 374 kind: IMPL_DEF,
368 parent: None, 375 detail: None,
369 label: "impl E", 376 deprecated: false,
370 navigation_range: 239..240, 377 },
371 node_range: 234..243, 378 StructureNode {
372 kind: IMPL_DEF, 379 parent: None,
373 detail: None, 380 label: "impl fmt::Debug for E",
374 deprecated: false, 381 navigation_range: 265..266,
375 }, 382 node_range: 245..269,
376 StructureNode { 383 kind: IMPL_DEF,
377 parent: None, 384 detail: None,
378 label: "impl fmt::Debug for E", 385 deprecated: false,
379 navigation_range: 265..266, 386 },
380 node_range: 245..269, 387 StructureNode {
381 kind: IMPL_DEF, 388 parent: None,
382 detail: None, 389 label: "mc",
383 deprecated: false, 390 navigation_range: 284..286,
384 }, 391 node_range: 271..303,
385 StructureNode { 392 kind: MACRO_CALL,
386 parent: None, 393 detail: None,
387 label: "mc", 394 deprecated: false,
388 navigation_range: 284..286, 395 },
389 node_range: 271..303, 396 StructureNode {
390 kind: MACRO_CALL, 397 parent: None,
391 detail: None, 398 label: "mcexp",
392 deprecated: false, 399 navigation_range: 334..339,
393 }, 400 node_range: 305..356,
394 StructureNode { 401 kind: MACRO_CALL,
395 parent: None, 402 detail: None,
396 label: "mcexp", 403 deprecated: false,
397 navigation_range: 334..339, 404 },
398 node_range: 305..356, 405 StructureNode {
399 kind: MACRO_CALL, 406 parent: None,
400 detail: None, 407 label: "mcexp",
401 deprecated: false, 408 navigation_range: 387..392,
402 }, 409 node_range: 358..409,
403 StructureNode { 410 kind: MACRO_CALL,
404 parent: None, 411 detail: None,
405 label: "mcexp", 412 deprecated: false,
406 navigation_range: 387..392, 413 },
407 node_range: 358..409, 414 StructureNode {
408 kind: MACRO_CALL, 415 parent: None,
409 detail: None, 416 label: "obsolete",
410 deprecated: false, 417 navigation_range: 428..436,
411 }, 418 node_range: 411..441,
412 StructureNode { 419 kind: FN_DEF,
413 parent: None, 420 detail: Some(
414 label: "obsolete", 421 "fn()",
415 navigation_range: 428..436, 422 ),
416 node_range: 411..441, 423 deprecated: true,
417 kind: FN_DEF, 424 },
418 detail: Some( 425 StructureNode {
419 "fn()", 426 parent: None,
420 ), 427 label: "very_obsolete",
421 deprecated: true, 428 navigation_range: 481..494,
422 }, 429 node_range: 443..499,
423 StructureNode { 430 kind: FN_DEF,
424 parent: None, 431 detail: Some(
425 label: "very_obsolete", 432 "fn()",
426 navigation_range: 481..494, 433 ),
427 node_range: 443..499, 434 deprecated: true,
428 kind: FN_DEF, 435 },
429 detail: Some( 436 ]
430 "fn()", 437 "#]],
431 ), 438 );
432 deprecated: true,
433 },
434 ]
435 "###
436 );
437 } 439 }
438} 440}
diff --git a/crates/ra_ide/src/expand_macro.rs b/crates/ra_ide/src/expand_macro.rs
index 54a47aac0..043515f54 100644
--- a/crates/ra_ide/src/expand_macro.rs
+++ b/crates/ra_ide/src/expand_macro.rs
@@ -2,7 +2,9 @@ use hir::Semantics;
2use ra_ide_db::RootDatabase; 2use ra_ide_db::RootDatabase;
3use ra_syntax::{ 3use ra_syntax::{
4 algo::{find_node_at_offset, SyntaxRewriter}, 4 algo::{find_node_at_offset, SyntaxRewriter},
5 ast, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, WalkEvent, T, 5 ast, AstNode, NodeOrToken, SyntaxKind,
6 SyntaxKind::*,
7 SyntaxNode, WalkEvent, T,
6}; 8};
7 9
8use crate::FilePosition; 10use crate::FilePosition;
@@ -65,8 +67,6 @@ fn expand_macro_recur(
65// FIXME: It would also be cool to share logic here and in the mbe tests, 67// FIXME: It would also be cool to share logic here and in the mbe tests,
66// which are pretty unreadable at the moment. 68// which are pretty unreadable at the moment.
67fn insert_whitespaces(syn: SyntaxNode) -> String { 69fn insert_whitespaces(syn: SyntaxNode) -> String {
68 use SyntaxKind::*;
69
70 let mut res = String::new(); 70 let mut res = String::new();
71 let mut token_iter = syn 71 let mut token_iter = syn
72 .preorder_with_tokens() 72 .preorder_with_tokens()
@@ -120,175 +120,164 @@ fn insert_whitespaces(syn: SyntaxNode) -> String {
120 120
121#[cfg(test)] 121#[cfg(test)]
122mod tests { 122mod tests {
123 use insta::assert_snapshot; 123 use expect::{expect, Expect};
124 124
125 use crate::mock_analysis::analysis_and_position; 125 use crate::mock_analysis::analysis_and_position;
126 126
127 use super::*; 127 fn check(ra_fixture: &str, expect: Expect) {
128 128 let (analysis, pos) = analysis_and_position(ra_fixture);
129 fn check_expand_macro(fixture: &str) -> ExpandedMacro { 129 let expansion = analysis.expand_macro(pos).unwrap().unwrap();
130 let (analysis, pos) = analysis_and_position(fixture); 130 let actual = format!("{}\n{}", expansion.name, expansion.expansion);
131 analysis.expand_macro(pos).unwrap().unwrap() 131 expect.assert_eq(&actual);
132 } 132 }
133 133
134 #[test] 134 #[test]
135 fn macro_expand_recursive_expansion() { 135 fn macro_expand_recursive_expansion() {
136 let res = check_expand_macro( 136 check(
137 r#" 137 r#"
138 //- /lib.rs 138macro_rules! bar {
139 macro_rules! bar { 139 () => { fn b() {} }
140 () => { fn b() {} } 140}
141 } 141macro_rules! foo {
142 macro_rules! foo { 142 () => { bar!(); }
143 () => { bar!(); } 143}
144 } 144macro_rules! baz {
145 macro_rules! baz { 145 () => { foo!(); }
146 () => { foo!(); } 146}
147 } 147f<|>oo!();
148 f<|>oo!(); 148"#,
149 "#, 149 expect![[r#"
150 foo
151 fn b(){}
152 "#]],
150 ); 153 );
151
152 assert_eq!(res.name, "foo");
153 assert_snapshot!(res.expansion, @r###"
154fn b(){}
155"###);
156 } 154 }
157 155
158 #[test] 156 #[test]
159 fn macro_expand_multiple_lines() { 157 fn macro_expand_multiple_lines() {
160 let res = check_expand_macro( 158 check(
161 r#" 159 r#"
162 //- /lib.rs 160macro_rules! foo {
163 macro_rules! foo { 161 () => {
164 () => { 162 fn some_thing() -> u32 {
165 fn some_thing() -> u32 { 163 let a = 0;
166 let a = 0; 164 a + 10
167 a + 10
168 }
169 }
170 } 165 }
171 f<|>oo!(); 166 }
167}
168f<|>oo!();
172 "#, 169 "#,
170 expect![[r#"
171 foo
172 fn some_thing() -> u32 {
173 let a = 0;
174 a+10
175 }"#]],
173 ); 176 );
174
175 assert_eq!(res.name, "foo");
176 assert_snapshot!(res.expansion, @r###"
177fn some_thing() -> u32 {
178 let a = 0;
179 a+10
180}
181"###);
182 } 177 }
183 178
184 #[test] 179 #[test]
185 fn macro_expand_match_ast() { 180 fn macro_expand_match_ast() {
186 let res = check_expand_macro( 181 check(
187 r#" 182 r#"
188 //- /lib.rs 183macro_rules! match_ast {
189 macro_rules! match_ast { 184 (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
190 (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; 185 (match ($node:expr) {
186 $( ast::$ast:ident($it:ident) => $res:block, )*
187 _ => $catch_all:expr $(,)?
188 }) => {{
189 $( if let Some($it) = ast::$ast::cast($node.clone()) $res else )*
190 { $catch_all }
191 }};
192}
191 193
192 (match ($node:expr) { 194fn main() {
193 $( ast::$ast:ident($it:ident) => $res:block, )* 195 mat<|>ch_ast! {
194 _ => $catch_all:expr $(,)? 196 match container {
195 }) => {{ 197 ast::TraitDef(it) => {},
196 $( if let Some($it) = ast::$ast::cast($node.clone()) $res else )* 198 ast::ImplDef(it) => {},
197 { $catch_all } 199 _ => { continue },
198 }};
199 } 200 }
200
201 fn main() {
202 mat<|>ch_ast! {
203 match container {
204 ast::TraitDef(it) => {},
205 ast::ImplDef(it) => {},
206 _ => { continue },
207 }
208 }
209 }
210 "#,
211 );
212
213 assert_eq!(res.name, "match_ast");
214 assert_snapshot!(res.expansion, @r###"
215{
216 if let Some(it) = ast::TraitDef::cast(container.clone()){}
217 else if let Some(it) = ast::ImplDef::cast(container.clone()){}
218 else {
219 {
220 continue
221 } 201 }
222 }
223} 202}
224"###); 203"#,
204 expect![[r#"
205 match_ast
206 {
207 if let Some(it) = ast::TraitDef::cast(container.clone()){}
208 else if let Some(it) = ast::ImplDef::cast(container.clone()){}
209 else {
210 {
211 continue
212 }
213 }
214 }"#]],
215 );
225 } 216 }
226 217
227 #[test] 218 #[test]
228 fn macro_expand_match_ast_inside_let_statement() { 219 fn macro_expand_match_ast_inside_let_statement() {
229 let res = check_expand_macro( 220 check(
230 r#" 221 r#"
231 //- /lib.rs 222macro_rules! match_ast {
232 macro_rules! match_ast { 223 (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
233 (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; 224 (match ($node:expr) {}) => {{}};
234 (match ($node:expr) {}) => {{}}; 225}
235 }
236 226
237 fn main() { 227fn main() {
238 let p = f(|it| { 228 let p = f(|it| {
239 let res = mat<|>ch_ast! { match c {}}; 229 let res = mat<|>ch_ast! { match c {}};
240 Some(res) 230 Some(res)
241 })?; 231 })?;
242 } 232}
243 "#, 233"#,
234 expect![[r#"
235 match_ast
236 {}
237 "#]],
244 ); 238 );
245
246 assert_eq!(res.name, "match_ast");
247 assert_snapshot!(res.expansion, @r###"{}"###);
248 } 239 }
249 240
250 #[test] 241 #[test]
251 fn macro_expand_inner_macro_fail_to_expand() { 242 fn macro_expand_inner_macro_fail_to_expand() {
252 let res = check_expand_macro( 243 check(
253 r#" 244 r#"
254 //- /lib.rs 245macro_rules! bar {
255 macro_rules! bar { 246 (BAD) => {};
256 (BAD) => {}; 247}
257 } 248macro_rules! foo {
258 macro_rules! foo { 249 () => {bar!()};
259 () => {bar!()}; 250}
260 }
261 251
262 fn main() { 252fn main() {
263 let res = fo<|>o!(); 253 let res = fo<|>o!();
264 } 254}
265 "#, 255"#,
256 expect![[r#"
257 foo
258 "#]],
266 ); 259 );
267
268 assert_eq!(res.name, "foo");
269 assert_snapshot!(res.expansion, @r###""###);
270 } 260 }
271 261
272 #[test] 262 #[test]
273 fn macro_expand_with_dollar_crate() { 263 fn macro_expand_with_dollar_crate() {
274 let res = check_expand_macro( 264 check(
275 r#" 265 r#"
276 //- /lib.rs 266#[macro_export]
277 #[macro_export] 267macro_rules! bar {
278 macro_rules! bar { 268 () => {0};
279 () => {0}; 269}
280 } 270macro_rules! foo {
281 macro_rules! foo { 271 () => {$crate::bar!()};
282 () => {$crate::bar!()}; 272}
283 }
284 273
285 fn main() { 274fn main() {
286 let res = fo<|>o!(); 275 let res = fo<|>o!();
287 } 276}
288 "#, 277"#,
278 expect![[r#"
279 foo
280 0 "#]],
289 ); 281 );
290
291 assert_eq!(res.name, "foo");
292 assert_snapshot!(res.expansion, @r###"0"###);
293 } 282 }
294} 283}
diff --git a/crates/ra_ide/src/folding_ranges.rs b/crates/ra_ide/src/folding_ranges.rs
index 8657377de..5cec689f8 100644
--- a/crates/ra_ide/src/folding_ranges.rs
+++ b/crates/ra_ide/src/folding_ranges.rs
@@ -15,6 +15,7 @@ pub enum FoldKind {
15 Imports, 15 Imports,
16 Mods, 16 Mods,
17 Block, 17 Block,
18 ArgList,
18} 19}
19 20
20#[derive(Debug)] 21#[derive(Debug)]
@@ -83,6 +84,7 @@ fn fold_kind(kind: SyntaxKind) -> Option<FoldKind> {
83 match kind { 84 match kind {
84 COMMENT => Some(FoldKind::Comment), 85 COMMENT => Some(FoldKind::Comment),
85 USE_ITEM => Some(FoldKind::Imports), 86 USE_ITEM => Some(FoldKind::Imports),
87 ARG_LIST => Some(FoldKind::ArgList),
86 RECORD_FIELD_DEF_LIST 88 RECORD_FIELD_DEF_LIST
87 | RECORD_FIELD_PAT_LIST 89 | RECORD_FIELD_PAT_LIST
88 | ITEM_LIST 90 | ITEM_LIST
@@ -196,89 +198,85 @@ fn contiguous_range_for_comment(
196 198
197#[cfg(test)] 199#[cfg(test)]
198mod tests { 200mod tests {
201 use test_utils::extract_tags;
202
199 use super::*; 203 use super::*;
200 use test_utils::extract_ranges;
201 204
202 fn do_check(text: &str, fold_kinds: &[FoldKind]) { 205 fn check(ra_fixture: &str) {
203 let (ranges, text) = extract_ranges(text, "fold"); 206 let (ranges, text) = extract_tags(ra_fixture, "fold");
207
204 let parse = SourceFile::parse(&text); 208 let parse = SourceFile::parse(&text);
205 let folds = folding_ranges(&parse.tree()); 209 let folds = folding_ranges(&parse.tree());
206
207 assert_eq!( 210 assert_eq!(
208 folds.len(), 211 folds.len(),
209 ranges.len(), 212 ranges.len(),
210 "The amount of folds is different than the expected amount" 213 "The amount of folds is different than the expected amount"
211 ); 214 );
212 assert_eq!( 215
213 folds.len(), 216 for (fold, (range, attr)) in folds.iter().zip(ranges.into_iter()) {
214 fold_kinds.len(),
215 "The amount of fold kinds is different than the expected amount"
216 );
217 for ((fold, range), fold_kind) in
218 folds.iter().zip(ranges.into_iter()).zip(fold_kinds.iter())
219 {
220 assert_eq!(fold.range.start(), range.start()); 217 assert_eq!(fold.range.start(), range.start());
221 assert_eq!(fold.range.end(), range.end()); 218 assert_eq!(fold.range.end(), range.end());
222 assert_eq!(&fold.kind, fold_kind); 219
220 let kind = match fold.kind {
221 FoldKind::Comment => "comment",
222 FoldKind::Imports => "imports",
223 FoldKind::Mods => "mods",
224 FoldKind::Block => "block",
225 FoldKind::ArgList => "arglist",
226 };
227 assert_eq!(kind, &attr.unwrap());
223 } 228 }
224 } 229 }
225 230
226 #[test] 231 #[test]
227 fn test_fold_comments() { 232 fn test_fold_comments() {
228 let text = r#" 233 check(
229<fold>// Hello 234 r#"
235<fold comment>// Hello
230// this is a multiline 236// this is a multiline
231// comment 237// comment
232//</fold> 238//</fold>
233 239
234// But this is not 240// But this is not
235 241
236fn main() <fold>{ 242fn main() <fold block>{
237 <fold>// We should 243 <fold comment>// We should
238 // also 244 // also
239 // fold 245 // fold
240 // this one.</fold> 246 // this one.</fold>
241 <fold>//! But this one is different 247 <fold comment>//! But this one is different
242 //! because it has another flavor</fold> 248 //! because it has another flavor</fold>
243 <fold>/* As does this 249 <fold comment>/* As does this
244 multiline comment */</fold> 250 multiline comment */</fold>
245}</fold>"#; 251}</fold>"#,
246 252 );
247 let fold_kinds = &[
248 FoldKind::Comment,
249 FoldKind::Block,
250 FoldKind::Comment,
251 FoldKind::Comment,
252 FoldKind::Comment,
253 ];
254 do_check(text, fold_kinds);
255 } 253 }
256 254
257 #[test] 255 #[test]
258 fn test_fold_imports() { 256 fn test_fold_imports() {
259 let text = r#" 257 check(
260<fold>use std::<fold>{ 258 r#"
259<fold imports>use std::<fold block>{
261 str, 260 str,
262 vec, 261 vec,
263 io as iop 262 io as iop
264}</fold>;</fold> 263}</fold>;</fold>
265 264
266fn main() <fold>{ 265fn main() <fold block>{
267}</fold>"#; 266}</fold>"#,
268 267 );
269 let folds = &[FoldKind::Imports, FoldKind::Block, FoldKind::Block];
270 do_check(text, folds);
271 } 268 }
272 269
273 #[test] 270 #[test]
274 fn test_fold_mods() { 271 fn test_fold_mods() {
275 let text = r#" 272 check(
273 r#"
276 274
277pub mod foo; 275pub mod foo;
278<fold>mod after_pub; 276<fold mods>mod after_pub;
279mod after_pub_next;</fold> 277mod after_pub_next;</fold>
280 278
281<fold>mod before_pub; 279<fold mods>mod before_pub;
282mod before_pub_next;</fold> 280mod before_pub_next;</fold>
283pub mod bar; 281pub mod bar;
284 282
@@ -286,90 +284,93 @@ mod not_folding_single;
286pub mod foobar; 284pub mod foobar;
287pub not_folding_single_next; 285pub not_folding_single_next;
288 286
289<fold>#[cfg(test)] 287<fold mods>#[cfg(test)]
290mod with_attribute; 288mod with_attribute;
291mod with_attribute_next;</fold> 289mod with_attribute_next;</fold>
292 290
293fn main() <fold>{ 291fn main() <fold block>{
294}</fold>"#; 292}</fold>"#,
295 293 );
296 let folds = &[FoldKind::Mods, FoldKind::Mods, FoldKind::Mods, FoldKind::Block];
297 do_check(text, folds);
298 } 294 }
299 295
300 #[test] 296 #[test]
301 fn test_fold_import_groups() { 297 fn test_fold_import_groups() {
302 let text = r#" 298 check(
303<fold>use std::str; 299 r#"
300<fold imports>use std::str;
304use std::vec; 301use std::vec;
305use std::io as iop;</fold> 302use std::io as iop;</fold>
306 303
307<fold>use std::mem; 304<fold imports>use std::mem;
308use std::f64;</fold> 305use std::f64;</fold>
309 306
310use std::collections::HashMap; 307use std::collections::HashMap;
311// Some random comment 308// Some random comment
312use std::collections::VecDeque; 309use std::collections::VecDeque;
313 310
314fn main() <fold>{ 311fn main() <fold block>{
315}</fold>"#; 312}</fold>"#,
316 313 );
317 let folds = &[FoldKind::Imports, FoldKind::Imports, FoldKind::Block];
318 do_check(text, folds);
319 } 314 }
320 315
321 #[test] 316 #[test]
322 fn test_fold_import_and_groups() { 317 fn test_fold_import_and_groups() {
323 let text = r#" 318 check(
324<fold>use std::str; 319 r#"
320<fold imports>use std::str;
325use std::vec; 321use std::vec;
326use std::io as iop;</fold> 322use std::io as iop;</fold>
327 323
328<fold>use std::mem; 324<fold imports>use std::mem;
329use std::f64;</fold> 325use std::f64;</fold>
330 326
331<fold>use std::collections::<fold>{ 327<fold imports>use std::collections::<fold block>{
332 HashMap, 328 HashMap,
333 VecDeque, 329 VecDeque,
334}</fold>;</fold> 330}</fold>;</fold>
335// Some random comment 331// Some random comment
336 332
337fn main() <fold>{ 333fn main() <fold block>{
338}</fold>"#; 334}</fold>"#,
339 335 );
340 let folds = &[
341 FoldKind::Imports,
342 FoldKind::Imports,
343 FoldKind::Imports,
344 FoldKind::Block,
345 FoldKind::Block,
346 ];
347 do_check(text, folds);
348 } 336 }
349 337
350 #[test] 338 #[test]
351 fn test_folds_macros() { 339 fn test_folds_macros() {
352 let text = r#" 340 check(
353macro_rules! foo <fold>{ 341 r#"
342macro_rules! foo <fold block>{
354 ($($tt:tt)*) => { $($tt)* } 343 ($($tt:tt)*) => { $($tt)* }
355}</fold> 344}</fold>
356"#; 345"#,
357 346 );
358 let folds = &[FoldKind::Block];
359 do_check(text, folds);
360 } 347 }
361 348
362 #[test] 349 #[test]
363 fn test_fold_match_arms() { 350 fn test_fold_match_arms() {
364 let text = r#" 351 check(
365fn main() <fold>{ 352 r#"
366 match 0 <fold>{ 353fn main() <fold block>{
354 match 0 <fold block>{
367 0 => 0, 355 0 => 0,
368 _ => 1, 356 _ => 1,
369 }</fold> 357 }</fold>
370}</fold>"#; 358}</fold>"#,
359 );
360 }
371 361
372 let folds = &[FoldKind::Block, FoldKind::Block]; 362 #[test]
373 do_check(text, folds); 363 fn fold_big_calls() {
364 check(
365 r#"
366fn main() <fold block>{
367 frobnicate<fold arglist>(
368 1,
369 2,
370 3,
371 )</fold>
372}</fold>
373 "#,
374 )
374 } 375 }
375} 376}
diff --git a/crates/ra_ide/src/goto_implementation.rs b/crates/ra_ide/src/goto_implementation.rs
index 99a7022a4..9acc960fc 100644
--- a/crates/ra_ide/src/goto_implementation.rs
+++ b/crates/ra_ide/src/goto_implementation.rs
@@ -219,6 +219,10 @@ impl T for &Foo {}
219 #[derive(Copy)] 219 #[derive(Copy)]
220//^^^^^^^^^^^^^^^ 220//^^^^^^^^^^^^^^^
221struct Foo<|>; 221struct Foo<|>;
222
223mod marker {
224 trait Copy {}
225}
222"#, 226"#,
223 ); 227 );
224 } 228 }
diff --git a/crates/ra_ide/src/goto_type_definition.rs b/crates/ra_ide/src/goto_type_definition.rs
index 91a3097fb..7eb40d637 100644
--- a/crates/ra_ide/src/goto_type_definition.rs
+++ b/crates/ra_ide/src/goto_type_definition.rs
@@ -55,8 +55,8 @@ fn pick_best(tokens: TokenAtOffset<SyntaxToken>) -> Option<SyntaxToken> {
55mod tests { 55mod tests {
56 use crate::mock_analysis::analysis_and_position; 56 use crate::mock_analysis::analysis_and_position;
57 57
58 fn check_goto(fixture: &str, expected: &str) { 58 fn check_goto(ra_fixture: &str, expected: &str) {
59 let (analysis, pos) = analysis_and_position(fixture); 59 let (analysis, pos) = analysis_and_position(ra_fixture);
60 60
61 let mut navs = analysis.goto_type_definition(pos).unwrap().unwrap().info; 61 let mut navs = analysis.goto_type_definition(pos).unwrap().unwrap().info;
62 assert_eq!(navs.len(), 1); 62 assert_eq!(navs.len(), 1);
@@ -67,7 +67,7 @@ mod tests {
67 #[test] 67 #[test]
68 fn goto_type_definition_works_simple() { 68 fn goto_type_definition_works_simple() {
69 check_goto( 69 check_goto(
70 " 70 r"
71 //- /lib.rs 71 //- /lib.rs
72 struct Foo; 72 struct Foo;
73 fn foo() { 73 fn foo() {
@@ -82,7 +82,7 @@ mod tests {
82 #[test] 82 #[test]
83 fn goto_type_definition_works_simple_ref() { 83 fn goto_type_definition_works_simple_ref() {
84 check_goto( 84 check_goto(
85 " 85 r"
86 //- /lib.rs 86 //- /lib.rs
87 struct Foo; 87 struct Foo;
88 fn foo() { 88 fn foo() {
@@ -97,7 +97,7 @@ mod tests {
97 #[test] 97 #[test]
98 fn goto_type_definition_works_through_macro() { 98 fn goto_type_definition_works_through_macro() {
99 check_goto( 99 check_goto(
100 " 100 r"
101 //- /lib.rs 101 //- /lib.rs
102 macro_rules! id { 102 macro_rules! id {
103 ($($tt:tt)*) => { $($tt)* } 103 ($($tt:tt)*) => { $($tt)* }
@@ -116,7 +116,7 @@ mod tests {
116 #[test] 116 #[test]
117 fn goto_type_definition_for_param() { 117 fn goto_type_definition_for_param() {
118 check_goto( 118 check_goto(
119 " 119 r"
120 //- /lib.rs 120 //- /lib.rs
121 struct Foo; 121 struct Foo;
122 fn foo(<|>f: Foo) {} 122 fn foo(<|>f: Foo) {}
@@ -128,7 +128,7 @@ mod tests {
128 #[test] 128 #[test]
129 fn goto_type_definition_for_tuple_field() { 129 fn goto_type_definition_for_tuple_field() {
130 check_goto( 130 check_goto(
131 " 131 r"
132 //- /lib.rs 132 //- /lib.rs
133 struct Foo; 133 struct Foo;
134 struct Bar(Foo); 134 struct Bar(Foo);
diff --git a/crates/ra_ide/src/hover.rs b/crates/ra_ide/src/hover.rs
index c3e36a387..eaba2b61e 100644
--- a/crates/ra_ide/src/hover.rs
+++ b/crates/ra_ide/src/hover.rs
@@ -417,8 +417,8 @@ mod tests {
417 assert_eq!(offset, position.into()); 417 assert_eq!(offset, position.into());
418 } 418 }
419 419
420 fn check_hover_result(fixture: &str, expected: &[&str]) -> (String, Vec<HoverAction>) { 420 fn check_hover_result(ra_fixture: &str, expected: &[&str]) -> (String, Vec<HoverAction>) {
421 let (analysis, position) = analysis_and_position(fixture); 421 let (analysis, position) = analysis_and_position(ra_fixture);
422 let hover = analysis.hover(position).unwrap().unwrap(); 422 let hover = analysis.hover(position).unwrap().unwrap();
423 let mut results = Vec::from(hover.info.results()); 423 let mut results = Vec::from(hover.info.results());
424 results.sort(); 424 results.sort();
@@ -435,8 +435,8 @@ mod tests {
435 (content[hover.range].to_string(), hover.info.actions().to_vec()) 435 (content[hover.range].to_string(), hover.info.actions().to_vec())
436 } 436 }
437 437
438 fn check_hover_no_result(fixture: &str) { 438 fn check_hover_no_result(ra_fixture: &str) {
439 let (analysis, position) = analysis_and_position(fixture); 439 let (analysis, position) = analysis_and_position(ra_fixture);
440 assert!(analysis.hover(position).unwrap().is_none()); 440 assert!(analysis.hover(position).unwrap().is_none());
441 } 441 }
442 442
@@ -923,7 +923,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
923 #[test] 923 #[test]
924 fn test_hover_through_macro() { 924 fn test_hover_through_macro() {
925 let (hover_on, _) = check_hover_result( 925 let (hover_on, _) = check_hover_result(
926 " 926 r"
927 //- /lib.rs 927 //- /lib.rs
928 macro_rules! id { 928 macro_rules! id {
929 ($($tt:tt)*) => { $($tt)* } 929 ($($tt:tt)*) => { $($tt)* }
@@ -944,7 +944,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
944 #[test] 944 #[test]
945 fn test_hover_through_expr_in_macro() { 945 fn test_hover_through_expr_in_macro() {
946 let (hover_on, _) = check_hover_result( 946 let (hover_on, _) = check_hover_result(
947 " 947 r"
948 //- /lib.rs 948 //- /lib.rs
949 macro_rules! id { 949 macro_rules! id {
950 ($($tt:tt)*) => { $($tt)* } 950 ($($tt:tt)*) => { $($tt)* }
@@ -962,7 +962,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
962 #[test] 962 #[test]
963 fn test_hover_through_expr_in_macro_recursive() { 963 fn test_hover_through_expr_in_macro_recursive() {
964 let (hover_on, _) = check_hover_result( 964 let (hover_on, _) = check_hover_result(
965 " 965 r"
966 //- /lib.rs 966 //- /lib.rs
967 macro_rules! id_deep { 967 macro_rules! id_deep {
968 ($($tt:tt)*) => { $($tt)* } 968 ($($tt:tt)*) => { $($tt)* }
@@ -983,7 +983,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
983 #[test] 983 #[test]
984 fn test_hover_through_func_in_macro_recursive() { 984 fn test_hover_through_func_in_macro_recursive() {
985 let (hover_on, _) = check_hover_result( 985 let (hover_on, _) = check_hover_result(
986 " 986 r"
987 //- /lib.rs 987 //- /lib.rs
988 macro_rules! id_deep { 988 macro_rules! id_deep {
989 ($($tt:tt)*) => { $($tt)* } 989 ($($tt:tt)*) => { $($tt)* }
@@ -1026,7 +1026,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
1026 #[test] 1026 #[test]
1027 fn test_hover_through_assert_macro() { 1027 fn test_hover_through_assert_macro() {
1028 let (hover_on, _) = check_hover_result( 1028 let (hover_on, _) = check_hover_result(
1029 r#" 1029 r"
1030 //- /lib.rs 1030 //- /lib.rs
1031 #[rustc_builtin_macro] 1031 #[rustc_builtin_macro]
1032 macro_rules! assert {} 1032 macro_rules! assert {}
@@ -1035,7 +1035,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
1035 fn foo() { 1035 fn foo() {
1036 assert!(ba<|>r()); 1036 assert!(ba<|>r());
1037 } 1037 }
1038 "#, 1038 ",
1039 &["fn bar() -> bool"], 1039 &["fn bar() -> bool"],
1040 ); 1040 );
1041 1041
@@ -1077,14 +1077,14 @@ fn func(foo: i32) { if true { <|>foo; }; }
1077 #[test] 1077 #[test]
1078 fn test_hover_function_show_qualifiers() { 1078 fn test_hover_function_show_qualifiers() {
1079 check_hover_result( 1079 check_hover_result(
1080 " 1080 r"
1081 //- /lib.rs 1081 //- /lib.rs
1082 async fn foo<|>() {} 1082 async fn foo<|>() {}
1083 ", 1083 ",
1084 &["async fn foo()"], 1084 &["async fn foo()"],
1085 ); 1085 );
1086 check_hover_result( 1086 check_hover_result(
1087 " 1087 r"
1088 //- /lib.rs 1088 //- /lib.rs
1089 pub const unsafe fn foo<|>() {} 1089 pub const unsafe fn foo<|>() {}
1090 ", 1090 ",
@@ -1102,7 +1102,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
1102 #[test] 1102 #[test]
1103 fn test_hover_trait_show_qualifiers() { 1103 fn test_hover_trait_show_qualifiers() {
1104 let (_, actions) = check_hover_result( 1104 let (_, actions) = check_hover_result(
1105 " 1105 r"
1106 //- /lib.rs 1106 //- /lib.rs
1107 unsafe trait foo<|>() {} 1107 unsafe trait foo<|>() {}
1108 ", 1108 ",
@@ -1114,7 +1114,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
1114 #[test] 1114 #[test]
1115 fn test_hover_mod_with_same_name_as_function() { 1115 fn test_hover_mod_with_same_name_as_function() {
1116 check_hover_result( 1116 check_hover_result(
1117 " 1117 r"
1118 //- /lib.rs 1118 //- /lib.rs
1119 use self::m<|>y::Bar; 1119 use self::m<|>y::Bar;
1120 1120
@@ -1237,7 +1237,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
1237 #[test] 1237 #[test]
1238 fn test_hover_trait_has_impl_action() { 1238 fn test_hover_trait_has_impl_action() {
1239 let (_, actions) = check_hover_result( 1239 let (_, actions) = check_hover_result(
1240 " 1240 r"
1241 //- /lib.rs 1241 //- /lib.rs
1242 trait foo<|>() {} 1242 trait foo<|>() {}
1243 ", 1243 ",
@@ -1249,7 +1249,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
1249 #[test] 1249 #[test]
1250 fn test_hover_struct_has_impl_action() { 1250 fn test_hover_struct_has_impl_action() {
1251 let (_, actions) = check_hover_result( 1251 let (_, actions) = check_hover_result(
1252 " 1252 r"
1253 //- /lib.rs 1253 //- /lib.rs
1254 struct foo<|>() {} 1254 struct foo<|>() {}
1255 ", 1255 ",
@@ -1261,7 +1261,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
1261 #[test] 1261 #[test]
1262 fn test_hover_union_has_impl_action() { 1262 fn test_hover_union_has_impl_action() {
1263 let (_, actions) = check_hover_result( 1263 let (_, actions) = check_hover_result(
1264 " 1264 r"
1265 //- /lib.rs 1265 //- /lib.rs
1266 union foo<|>() {} 1266 union foo<|>() {}
1267 ", 1267 ",
@@ -1273,7 +1273,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
1273 #[test] 1273 #[test]
1274 fn test_hover_enum_has_impl_action() { 1274 fn test_hover_enum_has_impl_action() {
1275 let (_, actions) = check_hover_result( 1275 let (_, actions) = check_hover_result(
1276 " 1276 r"
1277 //- /lib.rs 1277 //- /lib.rs
1278 enum foo<|>() { 1278 enum foo<|>() {
1279 A, 1279 A,
@@ -1288,7 +1288,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
1288 #[test] 1288 #[test]
1289 fn test_hover_test_has_action() { 1289 fn test_hover_test_has_action() {
1290 let (_, actions) = check_hover_result( 1290 let (_, actions) = check_hover_result(
1291 " 1291 r"
1292 //- /lib.rs 1292 //- /lib.rs
1293 #[test] 1293 #[test]
1294 fn foo_<|>test() {} 1294 fn foo_<|>test() {}
@@ -1332,7 +1332,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
1332 #[test] 1332 #[test]
1333 fn test_hover_test_mod_has_action() { 1333 fn test_hover_test_mod_has_action() {
1334 let (_, actions) = check_hover_result( 1334 let (_, actions) = check_hover_result(
1335 " 1335 r"
1336 //- /lib.rs 1336 //- /lib.rs
1337 mod tests<|> { 1337 mod tests<|> {
1338 #[test] 1338 #[test]
@@ -1373,7 +1373,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
1373 #[test] 1373 #[test]
1374 fn test_hover_struct_has_goto_type_action() { 1374 fn test_hover_struct_has_goto_type_action() {
1375 let (_, actions) = check_hover_result( 1375 let (_, actions) = check_hover_result(
1376 " 1376 r"
1377 //- /main.rs 1377 //- /main.rs
1378 struct S{ f1: u32 } 1378 struct S{ f1: u32 }
1379 1379
@@ -1416,7 +1416,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
1416 #[test] 1416 #[test]
1417 fn test_hover_generic_struct_has_goto_type_actions() { 1417 fn test_hover_generic_struct_has_goto_type_actions() {
1418 let (_, actions) = check_hover_result( 1418 let (_, actions) = check_hover_result(
1419 " 1419 r"
1420 //- /main.rs 1420 //- /main.rs
1421 struct Arg(u32); 1421 struct Arg(u32);
1422 struct S<T>{ f1: T } 1422 struct S<T>{ f1: T }
@@ -1479,7 +1479,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
1479 #[test] 1479 #[test]
1480 fn test_hover_generic_struct_has_flattened_goto_type_actions() { 1480 fn test_hover_generic_struct_has_flattened_goto_type_actions() {
1481 let (_, actions) = check_hover_result( 1481 let (_, actions) = check_hover_result(
1482 " 1482 r"
1483 //- /main.rs 1483 //- /main.rs
1484 struct Arg(u32); 1484 struct Arg(u32);
1485 struct S<T>{ f1: T } 1485 struct S<T>{ f1: T }
@@ -1542,7 +1542,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
1542 #[test] 1542 #[test]
1543 fn test_hover_tuple_has_goto_type_actions() { 1543 fn test_hover_tuple_has_goto_type_actions() {
1544 let (_, actions) = check_hover_result( 1544 let (_, actions) = check_hover_result(
1545 " 1545 r"
1546 //- /main.rs 1546 //- /main.rs
1547 struct A(u32); 1547 struct A(u32);
1548 struct B(u32); 1548 struct B(u32);
@@ -1627,7 +1627,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
1627 #[test] 1627 #[test]
1628 fn test_hover_return_impl_trait_has_goto_type_action() { 1628 fn test_hover_return_impl_trait_has_goto_type_action() {
1629 let (_, actions) = check_hover_result( 1629 let (_, actions) = check_hover_result(
1630 " 1630 r"
1631 //- /main.rs 1631 //- /main.rs
1632 trait Foo {} 1632 trait Foo {}
1633 1633
@@ -1672,7 +1672,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
1672 #[test] 1672 #[test]
1673 fn test_hover_generic_return_impl_trait_has_goto_type_action() { 1673 fn test_hover_generic_return_impl_trait_has_goto_type_action() {
1674 let (_, actions) = check_hover_result( 1674 let (_, actions) = check_hover_result(
1675 " 1675 r"
1676 //- /main.rs 1676 //- /main.rs
1677 trait Foo<T> {} 1677 trait Foo<T> {}
1678 struct S; 1678 struct S;
@@ -1737,7 +1737,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
1737 #[test] 1737 #[test]
1738 fn test_hover_return_impl_traits_has_goto_type_action() { 1738 fn test_hover_return_impl_traits_has_goto_type_action() {
1739 let (_, actions) = check_hover_result( 1739 let (_, actions) = check_hover_result(
1740 " 1740 r"
1741 //- /main.rs 1741 //- /main.rs
1742 trait Foo {} 1742 trait Foo {}
1743 trait Bar {} 1743 trait Bar {}
@@ -1802,7 +1802,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
1802 #[test] 1802 #[test]
1803 fn test_hover_generic_return_impl_traits_has_goto_type_action() { 1803 fn test_hover_generic_return_impl_traits_has_goto_type_action() {
1804 let (_, actions) = check_hover_result( 1804 let (_, actions) = check_hover_result(
1805 " 1805 r"
1806 //- /main.rs 1806 //- /main.rs
1807 trait Foo<T> {} 1807 trait Foo<T> {}
1808 trait Bar<T> {} 1808 trait Bar<T> {}
@@ -1907,7 +1907,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
1907 #[test] 1907 #[test]
1908 fn test_hover_arg_impl_trait_has_goto_type_action() { 1908 fn test_hover_arg_impl_trait_has_goto_type_action() {
1909 let (_, actions) = check_hover_result( 1909 let (_, actions) = check_hover_result(
1910 " 1910 r"
1911 //- /lib.rs 1911 //- /lib.rs
1912 trait Foo {} 1912 trait Foo {}
1913 fn foo(ar<|>g: &impl Foo) {} 1913 fn foo(ar<|>g: &impl Foo) {}
@@ -1947,7 +1947,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
1947 #[test] 1947 #[test]
1948 fn test_hover_arg_impl_traits_has_goto_type_action() { 1948 fn test_hover_arg_impl_traits_has_goto_type_action() {
1949 let (_, actions) = check_hover_result( 1949 let (_, actions) = check_hover_result(
1950 " 1950 r"
1951 //- /lib.rs 1951 //- /lib.rs
1952 trait Foo {} 1952 trait Foo {}
1953 trait Bar<T> {} 1953 trait Bar<T> {}
@@ -2028,7 +2028,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
2028 #[test] 2028 #[test]
2029 fn test_hover_arg_generic_impl_trait_has_goto_type_action() { 2029 fn test_hover_arg_generic_impl_trait_has_goto_type_action() {
2030 let (_, actions) = check_hover_result( 2030 let (_, actions) = check_hover_result(
2031 " 2031 r"
2032 //- /lib.rs 2032 //- /lib.rs
2033 trait Foo<T> {} 2033 trait Foo<T> {}
2034 struct S {} 2034 struct S {}
@@ -2088,7 +2088,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
2088 #[test] 2088 #[test]
2089 fn test_hover_dyn_return_has_goto_type_action() { 2089 fn test_hover_dyn_return_has_goto_type_action() {
2090 let (_, actions) = check_hover_result( 2090 let (_, actions) = check_hover_result(
2091 " 2091 r"
2092 //- /main.rs 2092 //- /main.rs
2093 trait Foo {} 2093 trait Foo {}
2094 struct S; 2094 struct S;
@@ -2156,7 +2156,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
2156 #[test] 2156 #[test]
2157 fn test_hover_dyn_arg_has_goto_type_action() { 2157 fn test_hover_dyn_arg_has_goto_type_action() {
2158 let (_, actions) = check_hover_result( 2158 let (_, actions) = check_hover_result(
2159 " 2159 r"
2160 //- /lib.rs 2160 //- /lib.rs
2161 trait Foo {} 2161 trait Foo {}
2162 fn foo(ar<|>g: &dyn Foo) {} 2162 fn foo(ar<|>g: &dyn Foo) {}
@@ -2196,7 +2196,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
2196 #[test] 2196 #[test]
2197 fn test_hover_generic_dyn_arg_has_goto_type_action() { 2197 fn test_hover_generic_dyn_arg_has_goto_type_action() {
2198 let (_, actions) = check_hover_result( 2198 let (_, actions) = check_hover_result(
2199 " 2199 r"
2200 //- /lib.rs 2200 //- /lib.rs
2201 trait Foo<T> {} 2201 trait Foo<T> {}
2202 struct S {} 2202 struct S {}
@@ -2256,7 +2256,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
2256 #[test] 2256 #[test]
2257 fn test_hover_goto_type_action_links_order() { 2257 fn test_hover_goto_type_action_links_order() {
2258 let (_, actions) = check_hover_result( 2258 let (_, actions) = check_hover_result(
2259 " 2259 r"
2260 //- /lib.rs 2260 //- /lib.rs
2261 trait ImplTrait<T> {} 2261 trait ImplTrait<T> {}
2262 trait DynTrait<T> {} 2262 trait DynTrait<T> {}
@@ -2357,7 +2357,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
2357 #[test] 2357 #[test]
2358 fn test_hover_associated_type_has_goto_type_action() { 2358 fn test_hover_associated_type_has_goto_type_action() {
2359 let (_, actions) = check_hover_result( 2359 let (_, actions) = check_hover_result(
2360 " 2360 r"
2361 //- /main.rs 2361 //- /main.rs
2362 trait Foo { 2362 trait Foo {
2363 type Item; 2363 type Item;
diff --git a/crates/ra_ide/src/lib.rs b/crates/ra_ide/src/lib.rs
index ecac5134e..8660278f1 100644
--- a/crates/ra_ide/src/lib.rs
+++ b/crates/ra_ide/src/lib.rs
@@ -75,7 +75,7 @@ pub use crate::{
75 }, 75 },
76}; 76};
77 77
78pub use hir::Documentation; 78pub use hir::{Documentation, Semantics};
79pub use ra_assists::{Assist, AssistConfig, AssistId, ResolvedAssist}; 79pub use ra_assists::{Assist, AssistConfig, AssistId, ResolvedAssist};
80pub use ra_db::{ 80pub use ra_db::{
81 Canceled, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange, SourceRoot, 81 Canceled, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange, SourceRoot,
@@ -385,7 +385,9 @@ impl Analysis {
385 position: FilePosition, 385 position: FilePosition,
386 search_scope: Option<SearchScope>, 386 search_scope: Option<SearchScope>,
387 ) -> Cancelable<Option<ReferenceSearchResult>> { 387 ) -> Cancelable<Option<ReferenceSearchResult>> {
388 self.with_db(|db| references::find_all_refs(db, position, search_scope).map(|it| it.info)) 388 self.with_db(|db| {
389 references::find_all_refs(&Semantics::new(db), position, search_scope).map(|it| it.info)
390 })
389 } 391 }
390 392
391 /// Returns a short text describing element at position. 393 /// Returns a short text describing element at position.
diff --git a/crates/ra_ide/src/references.rs b/crates/ra_ide/src/references.rs
index 3433fdae3..c2b0d5efe 100644
--- a/crates/ra_ide/src/references.rs
+++ b/crates/ra_ide/src/references.rs
@@ -86,12 +86,11 @@ impl IntoIterator for ReferenceSearchResult {
86} 86}
87 87
88pub(crate) fn find_all_refs( 88pub(crate) fn find_all_refs(
89 db: &RootDatabase, 89 sema: &Semantics<RootDatabase>,
90 position: FilePosition, 90 position: FilePosition,
91 search_scope: Option<SearchScope>, 91 search_scope: Option<SearchScope>,
92) -> Option<RangeInfo<ReferenceSearchResult>> { 92) -> Option<RangeInfo<ReferenceSearchResult>> {
93 let _p = profile("find_all_refs"); 93 let _p = profile("find_all_refs");
94 let sema = Semantics::new(db);
95 let syntax = sema.parse(position.file_id).syntax().clone(); 94 let syntax = sema.parse(position.file_id).syntax().clone();
96 95
97 let (opt_name, search_kind) = if let Some(name) = 96 let (opt_name, search_kind) = if let Some(name) =
@@ -108,15 +107,15 @@ pub(crate) fn find_all_refs(
108 let RangeInfo { range, info: def } = find_name(&sema, &syntax, position, opt_name)?; 107 let RangeInfo { range, info: def } = find_name(&sema, &syntax, position, opt_name)?;
109 108
110 let references = def 109 let references = def
111 .find_usages(db, search_scope) 110 .find_usages(sema, search_scope)
112 .into_iter() 111 .into_iter()
113 .filter(|r| search_kind == ReferenceKind::Other || search_kind == r.kind) 112 .filter(|r| search_kind == ReferenceKind::Other || search_kind == r.kind)
114 .collect(); 113 .collect();
115 114
116 let decl_range = def.try_to_nav(db)?.range(); 115 let decl_range = def.try_to_nav(sema.db)?.range();
117 116
118 let declaration = Declaration { 117 let declaration = Declaration {
119 nav: def.try_to_nav(db)?, 118 nav: def.try_to_nav(sema.db)?,
120 kind: ReferenceKind::Other, 119 kind: ReferenceKind::Other,
121 access: decl_access(&def, &syntax, decl_range), 120 access: decl_access(&def, &syntax, decl_range),
122 }; 121 };
diff --git a/crates/ra_ide/src/references/rename.rs b/crates/ra_ide/src/references/rename.rs
index 7ebc0adcf..b6a2266b4 100644
--- a/crates/ra_ide/src/references/rename.rs
+++ b/crates/ra_ide/src/references/rename.rs
@@ -24,23 +24,24 @@ pub(crate) fn rename(
24 position: FilePosition, 24 position: FilePosition,
25 new_name: &str, 25 new_name: &str,
26) -> Option<RangeInfo<SourceChange>> { 26) -> Option<RangeInfo<SourceChange>> {
27 let sema = Semantics::new(db);
28
27 match lex_single_valid_syntax_kind(new_name)? { 29 match lex_single_valid_syntax_kind(new_name)? {
28 SyntaxKind::IDENT | SyntaxKind::UNDERSCORE => (), 30 SyntaxKind::IDENT | SyntaxKind::UNDERSCORE => (),
29 SyntaxKind::SELF_KW => return rename_to_self(db, position), 31 SyntaxKind::SELF_KW => return rename_to_self(&sema, position),
30 _ => return None, 32 _ => return None,
31 } 33 }
32 34
33 let sema = Semantics::new(db);
34 let source_file = sema.parse(position.file_id); 35 let source_file = sema.parse(position.file_id);
35 let syntax = source_file.syntax(); 36 let syntax = source_file.syntax();
36 if let Some(module) = find_module_at_offset(&sema, position, syntax) { 37 if let Some(module) = find_module_at_offset(&sema, position, syntax) {
37 rename_mod(db, position, module, new_name) 38 rename_mod(&sema, position, module, new_name)
38 } else if let Some(self_token) = 39 } else if let Some(self_token) =
39 syntax.token_at_offset(position.offset).find(|t| t.kind() == SyntaxKind::SELF_KW) 40 syntax.token_at_offset(position.offset).find(|t| t.kind() == SyntaxKind::SELF_KW)
40 { 41 {
41 rename_self_to_param(db, position, self_token, new_name) 42 rename_self_to_param(&sema, position, self_token, new_name)
42 } else { 43 } else {
43 rename_reference(sema.db, position, new_name) 44 rename_reference(&sema, position, new_name)
44 } 45 }
45} 46}
46 47
@@ -97,7 +98,7 @@ fn source_edit_from_reference(reference: Reference, new_name: &str) -> SourceFil
97} 98}
98 99
99fn rename_mod( 100fn rename_mod(
100 db: &RootDatabase, 101 sema: &Semantics<RootDatabase>,
101 position: FilePosition, 102 position: FilePosition,
102 module: Module, 103 module: Module,
103 new_name: &str, 104 new_name: &str,
@@ -105,12 +106,12 @@ fn rename_mod(
105 let mut source_file_edits = Vec::new(); 106 let mut source_file_edits = Vec::new();
106 let mut file_system_edits = Vec::new(); 107 let mut file_system_edits = Vec::new();
107 108
108 let src = module.definition_source(db); 109 let src = module.definition_source(sema.db);
109 let file_id = src.file_id.original_file(db); 110 let file_id = src.file_id.original_file(sema.db);
110 match src.value { 111 match src.value {
111 ModuleSource::SourceFile(..) => { 112 ModuleSource::SourceFile(..) => {
112 // mod is defined in path/to/dir/mod.rs 113 // mod is defined in path/to/dir/mod.rs
113 let dst = if module.is_mod_rs(db) { 114 let dst = if module.is_mod_rs(sema.db) {
114 format!("../{}/mod.rs", new_name) 115 format!("../{}/mod.rs", new_name)
115 } else { 116 } else {
116 format!("{}.rs", new_name) 117 format!("{}.rs", new_name)
@@ -122,17 +123,17 @@ fn rename_mod(
122 ModuleSource::Module(..) => {} 123 ModuleSource::Module(..) => {}
123 } 124 }
124 125
125 if let Some(src) = module.declaration_source(db) { 126 if let Some(src) = module.declaration_source(sema.db) {
126 let file_id = src.file_id.original_file(db); 127 let file_id = src.file_id.original_file(sema.db);
127 let name = src.value.name()?; 128 let name = src.value.name()?;
128 let edit = SourceFileEdit { 129 let edit = SourceFileEdit {
129 file_id: file_id, 130 file_id,
130 edit: TextEdit::replace(name.syntax().text_range(), new_name.into()), 131 edit: TextEdit::replace(name.syntax().text_range(), new_name.into()),
131 }; 132 };
132 source_file_edits.push(edit); 133 source_file_edits.push(edit);
133 } 134 }
134 135
135 let RangeInfo { range, info: refs } = find_all_refs(db, position, None)?; 136 let RangeInfo { range, info: refs } = find_all_refs(sema, position, None)?;
136 let ref_edits = refs 137 let ref_edits = refs
137 .references 138 .references
138 .into_iter() 139 .into_iter()
@@ -142,8 +143,10 @@ fn rename_mod(
142 Some(RangeInfo::new(range, SourceChange::from_edits(source_file_edits, file_system_edits))) 143 Some(RangeInfo::new(range, SourceChange::from_edits(source_file_edits, file_system_edits)))
143} 144}
144 145
145fn rename_to_self(db: &RootDatabase, position: FilePosition) -> Option<RangeInfo<SourceChange>> { 146fn rename_to_self(
146 let sema = Semantics::new(db); 147 sema: &Semantics<RootDatabase>,
148 position: FilePosition,
149) -> Option<RangeInfo<SourceChange>> {
147 let source_file = sema.parse(position.file_id); 150 let source_file = sema.parse(position.file_id);
148 let syn = source_file.syntax(); 151 let syn = source_file.syntax();
149 152
@@ -158,7 +161,7 @@ fn rename_to_self(db: &RootDatabase, position: FilePosition) -> Option<RangeInfo
158 _ => return None, // not renaming other types 161 _ => return None, // not renaming other types
159 }; 162 };
160 163
161 let RangeInfo { range, info: refs } = find_all_refs(db, position, None)?; 164 let RangeInfo { range, info: refs } = find_all_refs(sema, position, None)?;
162 165
163 let param_range = first_param.syntax().text_range(); 166 let param_range = first_param.syntax().text_range();
164 let (param_ref, usages): (Vec<Reference>, Vec<Reference>) = refs 167 let (param_ref, usages): (Vec<Reference>, Vec<Reference>) = refs
@@ -210,16 +213,15 @@ fn text_edit_from_self_param(
210} 213}
211 214
212fn rename_self_to_param( 215fn rename_self_to_param(
213 db: &RootDatabase, 216 sema: &Semantics<RootDatabase>,
214 position: FilePosition, 217 position: FilePosition,
215 self_token: SyntaxToken, 218 self_token: SyntaxToken,
216 new_name: &str, 219 new_name: &str,
217) -> Option<RangeInfo<SourceChange>> { 220) -> Option<RangeInfo<SourceChange>> {
218 let sema = Semantics::new(db);
219 let source_file = sema.parse(position.file_id); 221 let source_file = sema.parse(position.file_id);
220 let syn = source_file.syntax(); 222 let syn = source_file.syntax();
221 223
222 let text = db.file_text(position.file_id); 224 let text = sema.db.file_text(position.file_id);
223 let fn_def = find_node_at_offset::<ast::FnDef>(syn, position.offset)?; 225 let fn_def = find_node_at_offset::<ast::FnDef>(syn, position.offset)?;
224 let search_range = fn_def.syntax().text_range(); 226 let search_range = fn_def.syntax().text_range();
225 227
@@ -249,11 +251,11 @@ fn rename_self_to_param(
249} 251}
250 252
251fn rename_reference( 253fn rename_reference(
252 db: &RootDatabase, 254 sema: &Semantics<RootDatabase>,
253 position: FilePosition, 255 position: FilePosition,
254 new_name: &str, 256 new_name: &str,
255) -> Option<RangeInfo<SourceChange>> { 257) -> Option<RangeInfo<SourceChange>> {
256 let RangeInfo { range, info: refs } = find_all_refs(db, position, None)?; 258 let RangeInfo { range, info: refs } = find_all_refs(sema, position, None)?;
257 259
258 let edit = refs 260 let edit = refs
259 .into_iter() 261 .into_iter()
diff --git a/crates/ra_ide/src/syntax_highlighting/tests.rs b/crates/ra_ide/src/syntax_highlighting/tests.rs
index b7fad9719..aa7c887d6 100644
--- a/crates/ra_ide/src/syntax_highlighting/tests.rs
+++ b/crates/ra_ide/src/syntax_highlighting/tests.rs
@@ -1,6 +1,7 @@
1use std::fs; 1use std::fs;
2 2
3use test_utils::{assert_eq_text, project_dir, read_text}; 3use expect::{expect_file, ExpectFile};
4use test_utils::project_dir;
4 5
5use crate::{mock_analysis::single_file, FileRange, TextRange}; 6use crate::{mock_analysis::single_file, FileRange, TextRange};
6 7
@@ -91,7 +92,7 @@ impl<T> Option<T> {
91} 92}
92"# 93"#
93 .trim(), 94 .trim(),
94 "crates/ra_ide/src/snapshots/highlighting.html", 95 expect_file!["crates/ra_ide/test_data/highlighting.html"],
95 false, 96 false,
96 ); 97 );
97} 98}
@@ -114,7 +115,7 @@ fn bar() {
114} 115}
115"# 116"#
116 .trim(), 117 .trim(),
117 "crates/ra_ide/src/snapshots/rainbow_highlighting.html", 118 expect_file!["crates/ra_ide/test_data/rainbow_highlighting.html"],
118 true, 119 true,
119 ); 120 );
120} 121}
@@ -167,7 +168,7 @@ fn main() {
167 ); 168 );
168}"## 169}"##
169 .trim(), 170 .trim(),
170 "crates/ra_ide/src/snapshots/highlight_injection.html", 171 expect_file!["crates/ra_ide/test_data/highlight_injection.html"],
171 false, 172 false,
172 ); 173 );
173} 174}
@@ -250,7 +251,7 @@ fn main() {
250 println!("{ничоси}", ничоси = 92); 251 println!("{ничоси}", ничоси = 92);
251}"# 252}"#
252 .trim(), 253 .trim(),
253 "crates/ra_ide/src/snapshots/highlight_strings.html", 254 expect_file!["crates/ra_ide/test_data/highlight_strings.html"],
254 false, 255 false,
255 ); 256 );
256} 257}
@@ -278,7 +279,7 @@ fn main() {
278} 279}
279"# 280"#
280 .trim(), 281 .trim(),
281 "crates/ra_ide/src/snapshots/highlight_unsafe.html", 282 expect_file!["crates/ra_ide/test_data/highlight_unsafe.html"],
282 false, 283 false,
283 ); 284 );
284} 285}
@@ -354,7 +355,7 @@ macro_rules! noop {
354} 355}
355"# 356"#
356 .trim(), 357 .trim(),
357 "crates/ra_ide/src/snapshots/highlight_doctest.html", 358 expect_file!["crates/ra_ide/test_data/highlight_doctest.html"],
358 false, 359 false,
359 ); 360 );
360} 361}
@@ -362,11 +363,8 @@ macro_rules! noop {
362/// Highlights the code given by the `ra_fixture` argument, renders the 363/// Highlights the code given by the `ra_fixture` argument, renders the
363/// result as HTML, and compares it with the HTML file given as `snapshot`. 364/// result as HTML, and compares it with the HTML file given as `snapshot`.
364/// Note that the `snapshot` file is overwritten by the rendered HTML. 365/// Note that the `snapshot` file is overwritten by the rendered HTML.
365fn check_highlighting(ra_fixture: &str, snapshot: &str, rainbow: bool) { 366fn check_highlighting(ra_fixture: &str, expect: ExpectFile, rainbow: bool) {
366 let (analysis, file_id) = single_file(ra_fixture); 367 let (analysis, file_id) = single_file(ra_fixture);
367 let dst_file = project_dir().join(snapshot);
368 let actual_html = &analysis.highlight_as_html(file_id, rainbow).unwrap(); 368 let actual_html = &analysis.highlight_as_html(file_id, rainbow).unwrap();
369 let expected_html = &read_text(&dst_file); 369 expect.assert_eq(actual_html)
370 fs::write(dst_file, &actual_html).unwrap();
371 assert_eq_text!(expected_html, actual_html);
372} 370}
diff --git a/crates/ra_ide/src/snapshots/highlight_doctest.html b/crates/ra_ide/test_data/highlight_doctest.html
index e8155def7..e8155def7 100644
--- a/crates/ra_ide/src/snapshots/highlight_doctest.html
+++ b/crates/ra_ide/test_data/highlight_doctest.html
diff --git a/crates/ra_ide/src/snapshots/highlight_injection.html b/crates/ra_ide/test_data/highlight_injection.html
index 1b0349bae..1b0349bae 100644
--- a/crates/ra_ide/src/snapshots/highlight_injection.html
+++ b/crates/ra_ide/test_data/highlight_injection.html
diff --git a/crates/ra_ide/src/snapshots/highlight_strings.html b/crates/ra_ide/test_data/highlight_strings.html
index d184b5691..d184b5691 100644
--- a/crates/ra_ide/src/snapshots/highlight_strings.html
+++ b/crates/ra_ide/test_data/highlight_strings.html
diff --git a/crates/ra_ide/src/snapshots/highlight_unsafe.html b/crates/ra_ide/test_data/highlight_unsafe.html
index 6936e949f..6936e949f 100644
--- a/crates/ra_ide/src/snapshots/highlight_unsafe.html
+++ b/crates/ra_ide/test_data/highlight_unsafe.html
diff --git a/crates/ra_ide/src/snapshots/highlighting.html b/crates/ra_ide/test_data/highlighting.html
index 8d0b38f95..8d0b38f95 100644
--- a/crates/ra_ide/src/snapshots/highlighting.html
+++ b/crates/ra_ide/test_data/highlighting.html
diff --git a/crates/ra_ide/src/snapshots/rainbow_highlighting.html b/crates/ra_ide/test_data/rainbow_highlighting.html
index 9516c7441..9516c7441 100644
--- a/crates/ra_ide/src/snapshots/rainbow_highlighting.html
+++ b/crates/ra_ide/test_data/rainbow_highlighting.html
diff --git a/crates/ra_ide_db/src/change.rs b/crates/ra_ide_db/src/change.rs
index b507000f2..dbe6eacc5 100644
--- a/crates/ra_ide_db/src/change.rs
+++ b/crates/ra_ide_db/src/change.rs
@@ -243,8 +243,9 @@ impl RootDatabase {
243 hir::db::GenericPredicatesForParamQuery 243 hir::db::GenericPredicatesForParamQuery
244 hir::db::GenericPredicatesQuery 244 hir::db::GenericPredicatesQuery
245 hir::db::GenericDefaultsQuery 245 hir::db::GenericDefaultsQuery
246 hir::db::ImplsInCrateQuery 246 hir::db::InherentImplsInCrateQuery
247 hir::db::ImplsFromDepsQuery 247 hir::db::TraitImplsInCrateQuery
248 hir::db::TraitImplsInDepsQuery
248 hir::db::InternTypeCtorQuery 249 hir::db::InternTypeCtorQuery
249 hir::db::InternTypeParamIdQuery 250 hir::db::InternTypeParamIdQuery
250 hir::db::InternChalkImplQuery 251 hir::db::InternChalkImplQuery
diff --git a/crates/ra_ide_db/src/imports_locator.rs b/crates/ra_ide_db/src/imports_locator.rs
index fff112e66..1fba71ff8 100644
--- a/crates/ra_ide_db/src/imports_locator.rs
+++ b/crates/ra_ide_db/src/imports_locator.rs
@@ -13,57 +13,53 @@ use crate::{
13use either::Either; 13use either::Either;
14use rustc_hash::FxHashSet; 14use rustc_hash::FxHashSet;
15 15
16pub struct ImportsLocator<'a> { 16pub fn find_imports<'a>(
17 sema: Semantics<'a, RootDatabase>, 17 sema: &Semantics<'a, RootDatabase>,
18 krate: Crate, 18 krate: Crate,
19} 19 name_to_import: &str,
20 20) -> Vec<Either<ModuleDef, MacroDef>> {
21impl<'a> ImportsLocator<'a> { 21 let _p = profile("search_for_imports");
22 pub fn new(db: &'a RootDatabase, krate: Crate) -> Self { 22 let db = sema.db;
23 Self { sema: Semantics::new(db), krate }
24 }
25 23
26 pub fn find_imports(&mut self, name_to_import: &str) -> Vec<Either<ModuleDef, MacroDef>> { 24 // Query dependencies first.
27 let _p = profile("search_for_imports"); 25 let mut candidates: FxHashSet<_> =
28 let db = self.sema.db; 26 krate.query_external_importables(db, name_to_import).collect();
29 27
30 // Query dependencies first. 28 // Query the local crate using the symbol index.
31 let mut candidates: FxHashSet<_> = 29 let local_results = {
32 self.krate.query_external_importables(db, name_to_import).collect(); 30 let mut query = Query::new(name_to_import.to_string());
31 query.exact();
32 query.limit(40);
33 symbol_index::crate_symbols(db, krate.into(), query)
34 };
33 35
34 // Query the local crate using the symbol index. 36 candidates.extend(
35 let local_results = { 37 local_results
36 let mut query = Query::new(name_to_import.to_string()); 38 .into_iter()
37 query.exact(); 39 .filter_map(|import_candidate| get_name_definition(sema, &import_candidate))
38 query.limit(40); 40 .filter_map(|name_definition_to_import| match name_definition_to_import {
39 symbol_index::crate_symbols(db, self.krate.into(), query) 41 Definition::ModuleDef(module_def) => Some(Either::Left(module_def)),
40 }; 42 Definition::Macro(macro_def) => Some(Either::Right(macro_def)),
43 _ => None,
44 }),
45 );
41 46
42 candidates.extend( 47 candidates.into_iter().collect()
43 local_results 48}
44 .into_iter()
45 .filter_map(|import_candidate| self.get_name_definition(&import_candidate))
46 .filter_map(|name_definition_to_import| match name_definition_to_import {
47 Definition::ModuleDef(module_def) => Some(Either::Left(module_def)),
48 Definition::Macro(macro_def) => Some(Either::Right(macro_def)),
49 _ => None,
50 }),
51 );
52
53 candidates.into_iter().collect()
54 }
55 49
56 fn get_name_definition(&mut self, import_candidate: &FileSymbol) -> Option<Definition> { 50fn get_name_definition<'a>(
57 let _p = profile("get_name_definition"); 51 sema: &Semantics<'a, RootDatabase>,
58 let file_id = import_candidate.file_id; 52 import_candidate: &FileSymbol,
53) -> Option<Definition> {
54 let _p = profile("get_name_definition");
55 let file_id = import_candidate.file_id;
59 56
60 let candidate_node = import_candidate.ptr.to_node(self.sema.parse(file_id).syntax()); 57 let candidate_node = import_candidate.ptr.to_node(sema.parse(file_id).syntax());
61 let candidate_name_node = if candidate_node.kind() != NAME { 58 let candidate_name_node = if candidate_node.kind() != NAME {
62 candidate_node.children().find(|it| it.kind() == NAME)? 59 candidate_node.children().find(|it| it.kind() == NAME)?
63 } else { 60 } else {
64 candidate_node 61 candidate_node
65 }; 62 };
66 let name = ast::Name::cast(candidate_name_node)?; 63 let name = ast::Name::cast(candidate_name_node)?;
67 classify_name(&self.sema, &name)?.into_definition() 64 classify_name(sema, &name)?.into_definition()
68 }
69} 65}
diff --git a/crates/ra_ide_db/src/lib.rs b/crates/ra_ide_db/src/lib.rs
index a808de4f1..c78071ad6 100644
--- a/crates/ra_ide_db/src/lib.rs
+++ b/crates/ra_ide_db/src/lib.rs
@@ -13,7 +13,7 @@ mod wasm_shims;
13 13
14use std::sync::Arc; 14use std::sync::Arc;
15 15
16use hir::db::{AstDatabase, DefDatabase}; 16use hir::db::{AstDatabase, DefDatabase, HirDatabase};
17use ra_db::{ 17use ra_db::{
18 salsa::{self, Database, Durability}, 18 salsa::{self, Database, Durability},
19 Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, 19 Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase,
@@ -52,6 +52,12 @@ impl Upcast<dyn DefDatabase> for RootDatabase {
52 } 52 }
53} 53}
54 54
55impl Upcast<dyn HirDatabase> for RootDatabase {
56 fn upcast(&self) -> &(dyn HirDatabase + 'static) {
57 &*self
58 }
59}
60
55impl FileLoader for RootDatabase { 61impl FileLoader for RootDatabase {
56 fn file_text(&self, file_id: FileId) -> Arc<String> { 62 fn file_text(&self, file_id: FileId) -> Arc<String> {
57 FileLoaderDelegate(self).file_text(file_id) 63 FileLoaderDelegate(self).file_text(file_id)
diff --git a/crates/ra_ide_db/src/search.rs b/crates/ra_ide_db/src/search.rs
index 44d5c35e6..81553150b 100644
--- a/crates/ra_ide_db/src/search.rs
+++ b/crates/ra_ide_db/src/search.rs
@@ -180,20 +180,20 @@ impl Definition {
180 180
181 pub fn find_usages( 181 pub fn find_usages(
182 &self, 182 &self,
183 db: &RootDatabase, 183 sema: &Semantics<RootDatabase>,
184 search_scope: Option<SearchScope>, 184 search_scope: Option<SearchScope>,
185 ) -> Vec<Reference> { 185 ) -> Vec<Reference> {
186 let _p = profile("Definition::find_usages"); 186 let _p = profile("Definition::find_usages");
187 187
188 let search_scope = { 188 let search_scope = {
189 let base = self.search_scope(db); 189 let base = self.search_scope(sema.db);
190 match search_scope { 190 match search_scope {
191 None => base, 191 None => base,
192 Some(scope) => base.intersection(&scope), 192 Some(scope) => base.intersection(&scope),
193 } 193 }
194 }; 194 };
195 195
196 let name = match self.name(db) { 196 let name = match self.name(sema.db) {
197 None => return Vec::new(), 197 None => return Vec::new(),
198 Some(it) => it.to_string(), 198 Some(it) => it.to_string(),
199 }; 199 };
@@ -202,11 +202,10 @@ impl Definition {
202 let mut refs = vec![]; 202 let mut refs = vec![];
203 203
204 for (file_id, search_range) in search_scope { 204 for (file_id, search_range) in search_scope {
205 let text = db.file_text(file_id); 205 let text = sema.db.file_text(file_id);
206 let search_range = 206 let search_range =
207 search_range.unwrap_or(TextRange::up_to(TextSize::of(text.as_str()))); 207 search_range.unwrap_or(TextRange::up_to(TextSize::of(text.as_str())));
208 208
209 let sema = Semantics::new(db);
210 let tree = Lazy::new(|| sema.parse(file_id).syntax().clone()); 209 let tree = Lazy::new(|| sema.parse(file_id).syntax().clone());
211 210
212 for (idx, _) in text.match_indices(pat) { 211 for (idx, _) in text.match_indices(pat) {
@@ -222,9 +221,6 @@ impl Definition {
222 continue; 221 continue;
223 }; 222 };
224 223
225 // FIXME: reuse sb
226 // See https://github.com/rust-lang/rust/pull/68198#issuecomment-574269098
227
228 match classify_name_ref(&sema, &name_ref) { 224 match classify_name_ref(&sema, &name_ref) {
229 Some(NameRefClass::Definition(def)) if &def == self => { 225 Some(NameRefClass::Definition(def)) if &def == self => {
230 let kind = if is_record_lit_name_ref(&name_ref) 226 let kind = if is_record_lit_name_ref(&name_ref)
diff --git a/crates/ra_proc_macro_srv/src/tests/mod.rs b/crates/ra_proc_macro_srv/src/tests/mod.rs
index 82cefbb29..8e6f28abd 100644
--- a/crates/ra_proc_macro_srv/src/tests/mod.rs
+++ b/crates/ra_proc_macro_srv/src/tests/mod.rs
@@ -11,7 +11,7 @@ fn test_derive_serialize_proc_macro() {
11 "serde_derive", 11 "serde_derive",
12 "Serialize", 12 "Serialize",
13 "1.0", 13 "1.0",
14 r##"struct Foo {}"##, 14 r"struct Foo {}",
15 include_str!("fixtures/test_serialize_proc_macro.txt"), 15 include_str!("fixtures/test_serialize_proc_macro.txt"),
16 ); 16 );
17} 17}
@@ -22,9 +22,7 @@ fn test_derive_serialize_proc_macro_failed() {
22 "serde_derive", 22 "serde_derive",
23 "Serialize", 23 "Serialize",
24 "1.0", 24 "1.0",
25 r##" 25 r"struct {}",
26 struct {}
27"##,
28 r##" 26 r##"
29SUBTREE $ 27SUBTREE $
30 IDENT compile_error 4294967295 28 IDENT compile_error 4294967295
diff --git a/crates/ra_proc_macro_srv/src/tests/utils.rs b/crates/ra_proc_macro_srv/src/tests/utils.rs
index 8d85f2d8a..dcb00671f 100644
--- a/crates/ra_proc_macro_srv/src/tests/utils.rs
+++ b/crates/ra_proc_macro_srv/src/tests/utils.rs
@@ -44,12 +44,12 @@ pub fn assert_expand(
44 crate_name: &str, 44 crate_name: &str,
45 macro_name: &str, 45 macro_name: &str,
46 version: &str, 46 version: &str,
47 fixture: &str, 47 ra_fixture: &str,
48 expect: &str, 48 expect: &str,
49) { 49) {
50 let path = fixtures::dylib_path(crate_name, version); 50 let path = fixtures::dylib_path(crate_name, version);
51 let expander = dylib::Expander::new(&path).unwrap(); 51 let expander = dylib::Expander::new(&path).unwrap();
52 let fixture = parse_string(fixture).unwrap(); 52 let fixture = parse_string(ra_fixture).unwrap();
53 53
54 let res = expander.expand(macro_name, &fixture.subtree, None).unwrap(); 54 let res = expander.expand(macro_name, &fixture.subtree, None).unwrap();
55 assert_eq_text!(&format!("{:?}", res), &expect.trim()); 55 assert_eq_text!(&format!("{:?}", res), &expect.trim());
diff --git a/crates/ra_project_model/src/project_json.rs b/crates/ra_project_model/src/project_json.rs
index 9fe1e2dcb..b0fe09333 100644
--- a/crates/ra_project_model/src/project_json.rs
+++ b/crates/ra_project_model/src/project_json.rs
@@ -10,7 +10,7 @@ use serde::{de, Deserialize};
10use stdx::split_delim; 10use stdx::split_delim;
11 11
12/// Roots and crates that compose this Rust project. 12/// Roots and crates that compose this Rust project.
13#[derive(Clone, Debug)] 13#[derive(Clone, Debug, Eq, PartialEq)]
14pub struct ProjectJson { 14pub struct ProjectJson {
15 pub(crate) roots: Vec<Root>, 15 pub(crate) roots: Vec<Root>,
16 pub(crate) crates: Vec<Crate>, 16 pub(crate) crates: Vec<Crate>,
@@ -18,14 +18,14 @@ pub struct ProjectJson {
18 18
19/// A root points to the directory which contains Rust crates. rust-analyzer watches all files in 19/// A root points to the directory which contains Rust crates. rust-analyzer watches all files in
20/// all roots. Roots might be nested. 20/// all roots. Roots might be nested.
21#[derive(Clone, Debug)] 21#[derive(Clone, Debug, Eq, PartialEq)]
22pub struct Root { 22pub struct Root {
23 pub(crate) path: AbsPathBuf, 23 pub(crate) path: AbsPathBuf,
24} 24}
25 25
26/// A crate points to the root module of a crate and lists the dependencies of the crate. This is 26/// A crate points to the root module of a crate and lists the dependencies of the crate. This is
27/// useful in creating the crate graph. 27/// useful in creating the crate graph.
28#[derive(Clone, Debug)] 28#[derive(Clone, Debug, Eq, PartialEq)]
29pub struct Crate { 29pub struct Crate {
30 pub(crate) root_module: AbsPathBuf, 30 pub(crate) root_module: AbsPathBuf,
31 pub(crate) edition: Edition, 31 pub(crate) edition: Edition,
diff --git a/crates/ra_syntax/src/tests.rs b/crates/ra_syntax/src/tests.rs
index 959967b79..7b4232497 100644
--- a/crates/ra_syntax/src/tests.rs
+++ b/crates/ra_syntax/src/tests.rs
@@ -1,9 +1,11 @@
1use std::{ 1use std::{
2 env,
2 fmt::Write, 3 fmt::Write,
4 fs,
3 path::{Component, Path, PathBuf}, 5 path::{Component, Path, PathBuf},
4}; 6};
5 7
6use test_utils::{collect_rust_files, dir_tests, project_dir, read_text}; 8use test_utils::{assert_eq_text, project_dir};
7 9
8use crate::{fuzz, tokenize, SourceFile, SyntaxError, TextRange, TextSize, Token}; 10use crate::{fuzz, tokenize, SourceFile, SyntaxError, TextRange, TextSize, Token};
9 11
@@ -200,3 +202,99 @@ where
200 } 202 }
201 }); 203 });
202} 204}
205
206/// Calls callback `f` with input code and file paths for each `.rs` file in `test_data_dir`
207/// subdirectories defined by `paths`.
208///
209/// If the content of the matching output file differs from the output of `f()`
210/// the test will fail.
211///
212/// If there is no matching output file it will be created and filled with the
213/// output of `f()`, but the test will fail.
214fn dir_tests<F>(test_data_dir: &Path, paths: &[&str], outfile_extension: &str, f: F)
215where
216 F: Fn(&str, &Path) -> String,
217{
218 for (path, input_code) in collect_rust_files(test_data_dir, paths) {
219 let actual = f(&input_code, &path);
220 let path = path.with_extension(outfile_extension);
221 if !path.exists() {
222 println!("\nfile: {}", path.display());
223 println!("No .txt file with expected result, creating...\n");
224 println!("{}\n{}", input_code, actual);
225 fs::write(&path, &actual).unwrap();
226 panic!("No expected result");
227 }
228 let expected = read_text(&path);
229 assert_equal_text(&expected, &actual, &path);
230 }
231}
232
233/// Collects all `.rs` files from `dir` subdirectories defined by `paths`.
234fn collect_rust_files(root_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, String)> {
235 paths
236 .iter()
237 .flat_map(|path| {
238 let path = root_dir.to_owned().join(path);
239 rust_files_in_dir(&path).into_iter()
240 })
241 .map(|path| {
242 let text = read_text(&path);
243 (path, text)
244 })
245 .collect()
246}
247
248/// Collects paths to all `.rs` files from `dir` in a sorted `Vec<PathBuf>`.
249fn rust_files_in_dir(dir: &Path) -> Vec<PathBuf> {
250 let mut acc = Vec::new();
251 for file in fs::read_dir(&dir).unwrap() {
252 let file = file.unwrap();
253 let path = file.path();
254 if path.extension().unwrap_or_default() == "rs" {
255 acc.push(path);
256 }
257 }
258 acc.sort();
259 acc
260}
261
262/// Asserts that `expected` and `actual` strings are equal. If they differ only
263/// in trailing or leading whitespace the test won't fail and
264/// the contents of `actual` will be written to the file located at `path`.
265fn assert_equal_text(expected: &str, actual: &str, path: &Path) {
266 if expected == actual {
267 return;
268 }
269 let dir = project_dir();
270 let pretty_path = path.strip_prefix(&dir).unwrap_or_else(|_| path);
271 if expected.trim() == actual.trim() {
272 println!("whitespace difference, rewriting");
273 println!("file: {}\n", pretty_path.display());
274 fs::write(path, actual).unwrap();
275 return;
276 }
277 if env::var("UPDATE_EXPECT").is_ok() {
278 println!("rewriting {}", pretty_path.display());
279 fs::write(path, actual).unwrap();
280 return;
281 }
282 assert_eq_text!(expected, actual, "file: {}", pretty_path.display());
283}
284
285/// Read file and normalize newlines.
286///
287/// `rustc` seems to always normalize `\r\n` newlines to `\n`:
288///
289/// ```
290/// let s = "
291/// ";
292/// assert_eq!(s.as_bytes(), &[10]);
293/// ```
294///
295/// so this should always be correct.
296fn read_text(path: &Path) -> String {
297 fs::read_to_string(path)
298 .unwrap_or_else(|_| panic!("File at {:?} should be valid", path))
299 .replace("\r\n", "\n")
300}
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 6b17ce18b..6c311648a 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -44,7 +44,7 @@ pub struct Config {
44 pub root_path: AbsPathBuf, 44 pub root_path: AbsPathBuf,
45} 45}
46 46
47#[derive(Debug, Clone)] 47#[derive(Debug, Clone, Eq, PartialEq)]
48pub enum LinkedProject { 48pub enum LinkedProject {
49 ProjectManifest(ProjectManifest), 49 ProjectManifest(ProjectManifest),
50 InlineJsonProject(ProjectJson), 50 InlineJsonProject(ProjectJson),
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index b8aa1e5b5..b7b4edf66 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -27,7 +27,7 @@ use crate::{
27 Result, 27 Result,
28}; 28};
29 29
30#[derive(Eq, PartialEq)] 30#[derive(Eq, PartialEq, Copy, Clone)]
31pub(crate) enum Status { 31pub(crate) enum Status {
32 Loading, 32 Loading,
33 Ready, 33 Ready,
diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs
index 25bcd80af..3cb532b62 100644
--- a/crates/rust-analyzer/src/handlers.rs
+++ b/crates/rust-analyzer/src/handlers.rs
@@ -415,7 +415,7 @@ pub(crate) fn handle_runnables(
415 let source_file = snap.analysis.parse(file_id)?; 415 let source_file = snap.analysis.parse(file_id)?;
416 algo::find_node_at_offset::<ast::MacroCall>(source_file.syntax(), offset) 416 algo::find_node_at_offset::<ast::MacroCall>(source_file.syntax(), offset)
417 .and_then(|it| it.path()?.segment()?.name_ref()) 417 .and_then(|it| it.path()?.segment()?.name_ref())
418 .map_or(false, |it| it.text() == "expect") 418 .map_or(false, |it| it.text() == "expect" || it.text() == "expect_file")
419 } 419 }
420 None => false, 420 None => false,
421 }; 421 };
@@ -1045,7 +1045,7 @@ pub(crate) fn handle_call_hierarchy_incoming(
1045 let item = params.item; 1045 let item = params.item;
1046 1046
1047 let doc = TextDocumentIdentifier::new(item.uri); 1047 let doc = TextDocumentIdentifier::new(item.uri);
1048 let frange = from_proto::file_range(&snap, doc, item.range)?; 1048 let frange = from_proto::file_range(&snap, doc, item.selection_range)?;
1049 let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() }; 1049 let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
1050 1050
1051 let call_items = match snap.analysis.incoming_calls(fpos)? { 1051 let call_items = match snap.analysis.incoming_calls(fpos)? {
@@ -1080,7 +1080,7 @@ pub(crate) fn handle_call_hierarchy_outgoing(
1080 let item = params.item; 1080 let item = params.item;
1081 1081
1082 let doc = TextDocumentIdentifier::new(item.uri); 1082 let doc = TextDocumentIdentifier::new(item.uri);
1083 let frange = from_proto::file_range(&snap, doc, item.range)?; 1083 let frange = from_proto::file_range(&snap, doc, item.selection_range)?;
1084 let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() }; 1084 let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
1085 1085
1086 let call_items = match snap.analysis.outgoing_calls(fpos)? { 1086 let call_items = match snap.analysis.outgoing_calls(fpos)? {
diff --git a/crates/rust-analyzer/src/lsp_ext.rs b/crates/rust-analyzer/src/lsp_ext.rs
index 1befe678c..82207bbb8 100644
--- a/crates/rust-analyzer/src/lsp_ext.rs
+++ b/crates/rust-analyzer/src/lsp_ext.rs
@@ -14,12 +14,12 @@ impl Request for AnalyzerStatus {
14 const METHOD: &'static str = "rust-analyzer/analyzerStatus"; 14 const METHOD: &'static str = "rust-analyzer/analyzerStatus";
15} 15}
16 16
17pub enum CollectGarbage {} 17pub enum ReloadWorkspace {}
18 18
19impl Request for CollectGarbage { 19impl Request for ReloadWorkspace {
20 type Params = (); 20 type Params = ();
21 type Result = (); 21 type Result = ();
22 const METHOD: &'static str = "rust-analyzer/collectGarbage"; 22 const METHOD: &'static str = "rust-analyzer/reloadWorkspace";
23} 23}
24 24
25pub enum SyntaxTree {} 25pub enum SyntaxTree {}
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index 9fd16ef3b..e03038b25 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -136,7 +136,7 @@ impl GlobalState {
136 log::info!("queued count = {}", queue_count); 136 log::info!("queued count = {}", queue_count);
137 } 137 }
138 138
139 let mut became_ready = false; 139 let prev_status = self.status;
140 match event { 140 match event {
141 Event::Lsp(msg) => match msg { 141 Event::Lsp(msg) => match msg {
142 lsp_server::Message::Request(req) => self.on_request(loop_start, req)?, 142 lsp_server::Message::Request(req) => self.on_request(loop_start, req)?,
@@ -168,22 +168,26 @@ impl GlobalState {
168 } 168 }
169 } 169 }
170 vfs::loader::Message::Progress { n_total, n_done } => { 170 vfs::loader::Message::Progress { n_total, n_done } => {
171 let state = if n_done == 0 { 171 if n_total == 0 {
172 Progress::Begin
173 } else if n_done < n_total {
174 Progress::Report
175 } else {
176 assert_eq!(n_done, n_total);
177 self.status = Status::Ready; 172 self.status = Status::Ready;
178 became_ready = true; 173 } else {
179 Progress::End 174 let state = if n_done == 0 {
180 }; 175 self.status = Status::Loading;
181 self.report_progress( 176 Progress::Begin
182 "roots scanned", 177 } else if n_done < n_total {
183 state, 178 Progress::Report
184 Some(format!("{}/{}", n_done, n_total)), 179 } else {
185 Some(Progress::percentage(n_done, n_total)), 180 assert_eq!(n_done, n_total);
186 ) 181 self.status = Status::Ready;
182 Progress::End
183 };
184 self.report_progress(
185 "roots scanned",
186 state,
187 Some(format!("{}/{}", n_done, n_total)),
188 Some(Progress::percentage(n_done, n_total)),
189 )
190 }
187 } 191 }
188 }, 192 },
189 Event::Flycheck(task) => match task { 193 Event::Flycheck(task) => match task {
@@ -231,13 +235,13 @@ impl GlobalState {
231 } 235 }
232 236
233 let state_changed = self.process_changes(); 237 let state_changed = self.process_changes();
234 if became_ready { 238 if prev_status == Status::Loading && self.status == Status::Ready {
235 if let Some(flycheck) = &self.flycheck { 239 if let Some(flycheck) = &self.flycheck {
236 flycheck.handle.update(); 240 flycheck.handle.update();
237 } 241 }
238 } 242 }
239 243
240 if self.status == Status::Ready && (state_changed || became_ready) { 244 if self.status == Status::Ready && (state_changed || prev_status == Status::Loading) {
241 let subscriptions = self 245 let subscriptions = self
242 .mem_docs 246 .mem_docs
243 .iter() 247 .iter()
@@ -274,7 +278,7 @@ impl GlobalState {
274 self.register_request(&req, request_received); 278 self.register_request(&req, request_received);
275 279
276 RequestDispatcher { req: Some(req), global_state: self } 280 RequestDispatcher { req: Some(req), global_state: self }
277 .on_sync::<lsp_ext::CollectGarbage>(|s, ()| Ok(s.analysis_host.collect_garbage()))? 281 .on_sync::<lsp_ext::ReloadWorkspace>(|s, ()| Ok(s.reload()))?
278 .on_sync::<lsp_ext::JoinLines>(|s, p| handlers::handle_join_lines(s.snapshot(), p))? 282 .on_sync::<lsp_ext::JoinLines>(|s, p| handlers::handle_join_lines(s.snapshot(), p))?
279 .on_sync::<lsp_ext::OnEnter>(|s, p| handlers::handle_on_enter(s.snapshot(), p))? 283 .on_sync::<lsp_ext::OnEnter>(|s, p| handlers::handle_on_enter(s.snapshot(), p))?
280 .on_sync::<lsp_types::request::Shutdown>(|_, ()| Ok(()))? 284 .on_sync::<lsp_types::request::Shutdown>(|_, ()| Ok(()))?
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index ec71f8b29..0c1fd1b8b 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -19,11 +19,14 @@ impl GlobalState {
19 if self.config.lru_capacity != old_config.lru_capacity { 19 if self.config.lru_capacity != old_config.lru_capacity {
20 self.analysis_host.update_lru_capacity(old_config.lru_capacity); 20 self.analysis_host.update_lru_capacity(old_config.lru_capacity);
21 } 21 }
22 if self.config.flycheck != old_config.flycheck { 22 if self.config.linked_projects != old_config.linked_projects {
23 self.reload()
24 } else if self.config.flycheck != old_config.flycheck {
23 self.reload_flycheck(); 25 self.reload_flycheck();
24 } 26 }
25 } 27 }
26 pub(crate) fn reload(&mut self) { 28 pub(crate) fn reload(&mut self) {
29 log::info!("reloading projects: {:?}", self.config.linked_projects);
27 let workspaces = { 30 let workspaces = {
28 if self.config.linked_projects.is_empty() 31 if self.config.linked_projects.is_empty()
29 && self.config.notifications.cargo_toml_not_found 32 && self.config.notifications.cargo_toml_not_found
diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs
index a0a58f689..95dd8e408 100644
--- a/crates/rust-analyzer/src/to_proto.rs
+++ b/crates/rust-analyzer/src/to_proto.rs
@@ -352,7 +352,7 @@ pub(crate) fn folding_range(
352 let kind = match fold.kind { 352 let kind = match fold.kind {
353 FoldKind::Comment => Some(lsp_types::FoldingRangeKind::Comment), 353 FoldKind::Comment => Some(lsp_types::FoldingRangeKind::Comment),
354 FoldKind::Imports => Some(lsp_types::FoldingRangeKind::Imports), 354 FoldKind::Imports => Some(lsp_types::FoldingRangeKind::Imports),
355 FoldKind::Mods | FoldKind::Block => None, 355 FoldKind::Mods | FoldKind::Block | FoldKind::ArgList => None,
356 }; 356 };
357 357
358 let range = range(line_index, fold.range); 358 let range = range(line_index, fold.range);
@@ -685,32 +685,27 @@ pub(crate) fn runnable(
685 685
686#[cfg(test)] 686#[cfg(test)]
687mod tests { 687mod tests {
688 use test_utils::extract_ranges; 688 use ra_ide::Analysis;
689 689
690 use super::*; 690 use super::*;
691 691
692 #[test] 692 #[test]
693 fn conv_fold_line_folding_only_fixup() { 693 fn conv_fold_line_folding_only_fixup() {
694 let text = r#"<fold>mod a; 694 let text = r#"mod a;
695mod b; 695mod b;
696mod c;</fold> 696mod c;
697 697
698fn main() <fold>{ 698fn main() {
699 if cond <fold>{ 699 if cond {
700 a::do_a(); 700 a::do_a();
701 }</fold> else <fold>{ 701 } else {
702 b::do_b(); 702 b::do_b();
703 }</fold> 703 }
704}</fold>"#; 704}"#;
705 705
706 let (ranges, text) = extract_ranges(text, "fold"); 706 let (analysis, file_id) = Analysis::from_single_file(text.to_string());
707 assert_eq!(ranges.len(), 4); 707 let folds = analysis.folding_ranges(file_id).unwrap();
708 let folds = vec![ 708 assert_eq!(folds.len(), 4);
709 Fold { range: ranges[0], kind: FoldKind::Mods },
710 Fold { range: ranges[1], kind: FoldKind::Block },
711 Fold { range: ranges[2], kind: FoldKind::Block },
712 Fold { range: ranges[3], kind: FoldKind::Block },
713 ];
714 709
715 let line_index = LineIndex::new(&text); 710 let line_index = LineIndex::new(&text);
716 let converted: Vec<lsp_types::FoldingRange> = 711 let converted: Vec<lsp_types::FoldingRange> =
diff --git a/crates/test_utils/src/lib.rs b/crates/test_utils/src/lib.rs
index e32a0a0c3..e4aa894ac 100644
--- a/crates/test_utils/src/lib.rs
+++ b/crates/test_utils/src/lib.rs
@@ -13,7 +13,7 @@ mod fixture;
13use std::{ 13use std::{
14 convert::{TryFrom, TryInto}, 14 convert::{TryFrom, TryInto},
15 env, fs, 15 env, fs,
16 path::{Path, PathBuf}, 16 path::PathBuf,
17}; 17};
18 18
19use serde_json::Value; 19use serde_json::Value;
@@ -118,8 +118,8 @@ pub fn extract_range_or_offset(text: &str) -> (RangeOrOffset, String) {
118} 118}
119 119
120/// Extracts ranges, marked with `<tag> </tag>` pairs from the `text` 120/// Extracts ranges, marked with `<tag> </tag>` pairs from the `text`
121pub fn extract_ranges(mut text: &str, tag: &str) -> (Vec<TextRange>, String) { 121pub fn extract_tags(mut text: &str, tag: &str) -> (Vec<(TextRange, Option<String>)>, String) {
122 let open = format!("<{}>", tag); 122 let open = format!("<{}", tag);
123 let close = format!("</{}>", tag); 123 let close = format!("</{}>", tag);
124 let mut ranges = Vec::new(); 124 let mut ranges = Vec::new();
125 let mut res = String::new(); 125 let mut res = String::new();
@@ -134,22 +134,35 @@ pub fn extract_ranges(mut text: &str, tag: &str) -> (Vec<TextRange>, String) {
134 res.push_str(&text[..i]); 134 res.push_str(&text[..i]);
135 text = &text[i..]; 135 text = &text[i..];
136 if text.starts_with(&open) { 136 if text.starts_with(&open) {
137 text = &text[open.len()..]; 137 let close_open = text.find('>').unwrap();
138 let attr = text[open.len()..close_open].trim();
139 let attr = if attr.is_empty() { None } else { Some(attr.to_string()) };
140 text = &text[close_open + '>'.len_utf8()..];
138 let from = TextSize::of(&res); 141 let from = TextSize::of(&res);
139 stack.push(from); 142 stack.push((from, attr));
140 } else if text.starts_with(&close) { 143 } else if text.starts_with(&close) {
141 text = &text[close.len()..]; 144 text = &text[close.len()..];
142 let from = stack.pop().unwrap_or_else(|| panic!("unmatched </{}>", tag)); 145 let (from, attr) =
146 stack.pop().unwrap_or_else(|| panic!("unmatched </{}>", tag));
143 let to = TextSize::of(&res); 147 let to = TextSize::of(&res);
144 ranges.push(TextRange::new(from, to)); 148 ranges.push((TextRange::new(from, to), attr));
149 } else {
150 res.push('<');
151 text = &text['<'.len_utf8()..];
145 } 152 }
146 } 153 }
147 } 154 }
148 } 155 }
149 assert!(stack.is_empty(), "unmatched <{}>", tag); 156 assert!(stack.is_empty(), "unmatched <{}>", tag);
150 ranges.sort_by_key(|r| (r.start(), r.end())); 157 ranges.sort_by_key(|r| (r.0.start(), r.0.end()));
151 (ranges, res) 158 (ranges, res)
152} 159}
160#[test]
161fn test_extract_tags() {
162 let (tags, text) = extract_tags(r#"<tag fn>fn <tag>main</tag>() {}</tag>"#, "tag");
163 let actual = tags.into_iter().map(|(range, attr)| (&text[range], attr)).collect::<Vec<_>>();
164 assert_eq!(actual, vec![("fn main() {}", Some("fn".into())), ("main", None),]);
165}
153 166
154/// Inserts `<|>` marker into the `text` at `offset`. 167/// Inserts `<|>` marker into the `text` at `offset`.
155pub fn add_cursor(text: &str, offset: TextSize) -> String { 168pub fn add_cursor(text: &str, offset: TextSize) -> String {
@@ -299,85 +312,6 @@ pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a
299 } 312 }
300} 313}
301 314
302/// Calls callback `f` with input code and file paths for each `.rs` file in `test_data_dir`
303/// subdirectories defined by `paths`.
304///
305/// If the content of the matching output file differs from the output of `f()`
306/// the test will fail.
307///
308/// If there is no matching output file it will be created and filled with the
309/// output of `f()`, but the test will fail.
310pub fn dir_tests<F>(test_data_dir: &Path, paths: &[&str], outfile_extension: &str, f: F)
311where
312 F: Fn(&str, &Path) -> String,
313{
314 for (path, input_code) in collect_rust_files(test_data_dir, paths) {
315 let actual = f(&input_code, &path);
316 let path = path.with_extension(outfile_extension);
317 if !path.exists() {
318 println!("\nfile: {}", path.display());
319 println!("No .txt file with expected result, creating...\n");
320 println!("{}\n{}", input_code, actual);
321 fs::write(&path, &actual).unwrap();
322 panic!("No expected result");
323 }
324 let expected = read_text(&path);
325 assert_equal_text(&expected, &actual, &path);
326 }
327}
328
329/// Collects all `.rs` files from `dir` subdirectories defined by `paths`.
330pub fn collect_rust_files(root_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, String)> {
331 paths
332 .iter()
333 .flat_map(|path| {
334 let path = root_dir.to_owned().join(path);
335 rust_files_in_dir(&path).into_iter()
336 })
337 .map(|path| {
338 let text = read_text(&path);
339 (path, text)
340 })
341 .collect()
342}
343
344/// Collects paths to all `.rs` files from `dir` in a sorted `Vec<PathBuf>`.
345fn rust_files_in_dir(dir: &Path) -> Vec<PathBuf> {
346 let mut acc = Vec::new();
347 for file in fs::read_dir(&dir).unwrap() {
348 let file = file.unwrap();
349 let path = file.path();
350 if path.extension().unwrap_or_default() == "rs" {
351 acc.push(path);
352 }
353 }
354 acc.sort();
355 acc
356}
357
358/// Returns the path to the root directory of `rust-analyzer` project.
359pub fn project_dir() -> PathBuf {
360 let dir = env!("CARGO_MANIFEST_DIR");
361 PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned()
362}
363
364/// Read file and normalize newlines.
365///
366/// `rustc` seems to always normalize `\r\n` newlines to `\n`:
367///
368/// ```
369/// let s = "
370/// ";
371/// assert_eq!(s.as_bytes(), &[10]);
372/// ```
373///
374/// so this should always be correct.
375pub fn read_text(path: &Path) -> String {
376 fs::read_to_string(path)
377 .unwrap_or_else(|_| panic!("File at {:?} should be valid", path))
378 .replace("\r\n", "\n")
379}
380
381/// Returns `false` if slow tests should not run, otherwise returns `true` and 315/// Returns `false` if slow tests should not run, otherwise returns `true` and
382/// also creates a file at `./target/.slow_tests_cookie` which serves as a flag 316/// also creates a file at `./target/.slow_tests_cookie` which serves as a flag
383/// that slow tests did run. 317/// that slow tests did run.
@@ -392,25 +326,8 @@ pub fn skip_slow_tests() -> bool {
392 should_skip 326 should_skip
393} 327}
394 328
395/// Asserts that `expected` and `actual` strings are equal. If they differ only 329/// Returns the path to the root directory of `rust-analyzer` project.
396/// in trailing or leading whitespace the test won't fail and 330pub fn project_dir() -> PathBuf {
397/// the contents of `actual` will be written to the file located at `path`. 331 let dir = env!("CARGO_MANIFEST_DIR");
398fn assert_equal_text(expected: &str, actual: &str, path: &Path) { 332 PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned()
399 if expected == actual {
400 return;
401 }
402 let dir = project_dir();
403 let pretty_path = path.strip_prefix(&dir).unwrap_or_else(|_| path);
404 if expected.trim() == actual.trim() {
405 println!("whitespace difference, rewriting");
406 println!("file: {}\n", pretty_path.display());
407 fs::write(path, actual).unwrap();
408 return;
409 }
410 if env::var("UPDATE_EXPECTATIONS").is_ok() {
411 println!("rewriting {}", pretty_path.display());
412 fs::write(path, actual).unwrap();
413 return;
414 }
415 assert_eq_text!(expected, actual, "file: {}", pretty_path.display());
416} 333}