diff options
Diffstat (limited to 'crates')
51 files changed, 1405 insertions, 492 deletions
diff --git a/crates/ra_assists/src/assist_ctx.rs b/crates/ra_assists/src/assist_ctx.rs index 43f0d664b..2ab65ab99 100644 --- a/crates/ra_assists/src/assist_ctx.rs +++ b/crates/ra_assists/src/assist_ctx.rs | |||
@@ -101,7 +101,6 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> { | |||
101 | Some(assist) | 101 | Some(assist) |
102 | } | 102 | } |
103 | 103 | ||
104 | #[allow(dead_code)] // will be used for auto import assist with multiple actions | ||
105 | pub(crate) fn add_assist_group( | 104 | pub(crate) fn add_assist_group( |
106 | self, | 105 | self, |
107 | id: AssistId, | 106 | id: AssistId, |
@@ -168,7 +167,6 @@ pub(crate) struct ActionBuilder { | |||
168 | } | 167 | } |
169 | 168 | ||
170 | impl ActionBuilder { | 169 | impl ActionBuilder { |
171 | #[allow(dead_code)] | ||
172 | /// Adds a custom label to the action, if it needs to be different from the assist label | 170 | /// Adds a custom label to the action, if it needs to be different from the assist label |
173 | pub(crate) fn label(&mut self, label: impl Into<String>) { | 171 | pub(crate) fn label(&mut self, label: impl Into<String>) { |
174 | self.label = Some(label.into()) | 172 | self.label = Some(label.into()) |
diff --git a/crates/ra_assists/src/assists/auto_import.rs b/crates/ra_assists/src/assists/auto_import.rs new file mode 100644 index 000000000..69126a1c9 --- /dev/null +++ b/crates/ra_assists/src/assists/auto_import.rs | |||
@@ -0,0 +1,210 @@ | |||
1 | use hir::db::HirDatabase; | ||
2 | use ra_syntax::{ | ||
3 | ast::{self, AstNode}, | ||
4 | SmolStr, | ||
5 | SyntaxKind::USE_ITEM, | ||
6 | SyntaxNode, | ||
7 | }; | ||
8 | |||
9 | use crate::{ | ||
10 | assist_ctx::{ActionBuilder, Assist, AssistCtx}, | ||
11 | auto_import_text_edit, AssistId, ImportsLocator, | ||
12 | }; | ||
13 | |||
14 | // Assist: auto_import | ||
15 | // | ||
16 | // If the name is unresolved, provides all possible imports for it. | ||
17 | // | ||
18 | // ``` | ||
19 | // fn main() { | ||
20 | // let map = HashMap<|>::new(); | ||
21 | // } | ||
22 | // ``` | ||
23 | // -> | ||
24 | // ``` | ||
25 | // use std::collections::HashMap; | ||
26 | // | ||
27 | // fn main() { | ||
28 | // let map = HashMap<|>::new(); | ||
29 | // } | ||
30 | // ``` | ||
31 | pub(crate) fn auto_import<F: ImportsLocator>( | ||
32 | ctx: AssistCtx<impl HirDatabase>, | ||
33 | imports_locator: &mut F, | ||
34 | ) -> Option<Assist> { | ||
35 | let path_to_import: ast::Path = ctx.find_node_at_offset()?; | ||
36 | let path_to_import_syntax = path_to_import.syntax(); | ||
37 | if path_to_import_syntax.ancestors().find(|ancestor| ancestor.kind() == USE_ITEM).is_some() { | ||
38 | return None; | ||
39 | } | ||
40 | |||
41 | let module = path_to_import_syntax.ancestors().find_map(ast::Module::cast); | ||
42 | let position = match module.and_then(|it| it.item_list()) { | ||
43 | Some(item_list) => item_list.syntax().clone(), | ||
44 | None => { | ||
45 | let current_file = path_to_import_syntax.ancestors().find_map(ast::SourceFile::cast)?; | ||
46 | current_file.syntax().clone() | ||
47 | } | ||
48 | }; | ||
49 | let source_analyzer = ctx.source_analyzer(&position, None); | ||
50 | let module_with_name_to_import = source_analyzer.module()?; | ||
51 | if source_analyzer.resolve_path(ctx.db, &path_to_import).is_some() { | ||
52 | return None; | ||
53 | } | ||
54 | |||
55 | let proposed_imports = imports_locator | ||
56 | .find_imports(&path_to_import_syntax.to_string()) | ||
57 | .into_iter() | ||
58 | .filter_map(|module_def| module_with_name_to_import.find_use_path(ctx.db, module_def)) | ||
59 | .filter(|use_path| !use_path.segments.is_empty()) | ||
60 | .take(20) | ||
61 | .map(|import| import.to_string()) | ||
62 | .collect::<std::collections::BTreeSet<_>>(); | ||
63 | if proposed_imports.is_empty() { | ||
64 | return None; | ||
65 | } | ||
66 | |||
67 | ctx.add_assist_group(AssistId("auto_import"), "auto import", || { | ||
68 | proposed_imports | ||
69 | .into_iter() | ||
70 | .map(|import| import_to_action(import, &position, &path_to_import_syntax)) | ||
71 | .collect() | ||
72 | }) | ||
73 | } | ||
74 | |||
75 | fn import_to_action(import: String, position: &SyntaxNode, anchor: &SyntaxNode) -> ActionBuilder { | ||
76 | let mut action_builder = ActionBuilder::default(); | ||
77 | action_builder.label(format!("Import `{}`", &import)); | ||
78 | auto_import_text_edit( | ||
79 | position, | ||
80 | anchor, | ||
81 | &[SmolStr::new(import)], | ||
82 | action_builder.text_edit_builder(), | ||
83 | ); | ||
84 | action_builder | ||
85 | } | ||
86 | |||
87 | #[cfg(test)] | ||
88 | mod tests { | ||
89 | use super::*; | ||
90 | use crate::helpers::{ | ||
91 | check_assist_with_imports_locator, check_assist_with_imports_locator_not_applicable, | ||
92 | TestImportsLocator, | ||
93 | }; | ||
94 | |||
95 | #[test] | ||
96 | fn applicable_when_found_an_import() { | ||
97 | check_assist_with_imports_locator( | ||
98 | auto_import, | ||
99 | TestImportsLocator::new, | ||
100 | r" | ||
101 | <|>PubStruct | ||
102 | |||
103 | pub mod PubMod { | ||
104 | pub struct PubStruct; | ||
105 | } | ||
106 | ", | ||
107 | r" | ||
108 | <|>use PubMod::PubStruct; | ||
109 | |||
110 | PubStruct | ||
111 | |||
112 | pub mod PubMod { | ||
113 | pub struct PubStruct; | ||
114 | } | ||
115 | ", | ||
116 | ); | ||
117 | } | ||
118 | |||
119 | #[test] | ||
120 | fn applicable_when_found_multiple_imports() { | ||
121 | check_assist_with_imports_locator( | ||
122 | auto_import, | ||
123 | TestImportsLocator::new, | ||
124 | r" | ||
125 | PubSt<|>ruct | ||
126 | |||
127 | pub mod PubMod1 { | ||
128 | pub struct PubStruct; | ||
129 | } | ||
130 | pub mod PubMod2 { | ||
131 | pub struct PubStruct; | ||
132 | } | ||
133 | pub mod PubMod3 { | ||
134 | pub struct PubStruct; | ||
135 | } | ||
136 | ", | ||
137 | r" | ||
138 | use PubMod1::PubStruct; | ||
139 | |||
140 | PubSt<|>ruct | ||
141 | |||
142 | pub mod PubMod1 { | ||
143 | pub struct PubStruct; | ||
144 | } | ||
145 | pub mod PubMod2 { | ||
146 | pub struct PubStruct; | ||
147 | } | ||
148 | pub mod PubMod3 { | ||
149 | pub struct PubStruct; | ||
150 | } | ||
151 | ", | ||
152 | ); | ||
153 | } | ||
154 | |||
155 | #[test] | ||
156 | fn not_applicable_for_already_imported_types() { | ||
157 | check_assist_with_imports_locator_not_applicable( | ||
158 | auto_import, | ||
159 | TestImportsLocator::new, | ||
160 | r" | ||
161 | use PubMod::PubStruct; | ||
162 | |||
163 | PubStruct<|> | ||
164 | |||
165 | pub mod PubMod { | ||
166 | pub struct PubStruct; | ||
167 | } | ||
168 | ", | ||
169 | ); | ||
170 | } | ||
171 | |||
172 | #[test] | ||
173 | fn not_applicable_for_types_with_private_paths() { | ||
174 | check_assist_with_imports_locator_not_applicable( | ||
175 | auto_import, | ||
176 | TestImportsLocator::new, | ||
177 | r" | ||
178 | PrivateStruct<|> | ||
179 | |||
180 | pub mod PubMod { | ||
181 | struct PrivateStruct; | ||
182 | } | ||
183 | ", | ||
184 | ); | ||
185 | } | ||
186 | |||
187 | #[test] | ||
188 | fn not_applicable_when_no_imports_found() { | ||
189 | check_assist_with_imports_locator_not_applicable( | ||
190 | auto_import, | ||
191 | TestImportsLocator::new, | ||
192 | " | ||
193 | PubStruct<|>", | ||
194 | ); | ||
195 | } | ||
196 | |||
197 | #[test] | ||
198 | fn not_applicable_in_import_statements() { | ||
199 | check_assist_with_imports_locator_not_applicable( | ||
200 | auto_import, | ||
201 | TestImportsLocator::new, | ||
202 | r" | ||
203 | use PubStruct<|>; | ||
204 | |||
205 | pub mod PubMod { | ||
206 | pub struct PubStruct; | ||
207 | }", | ||
208 | ); | ||
209 | } | ||
210 | } | ||
diff --git a/crates/ra_assists/src/assists/inline_local_variable.rs b/crates/ra_assists/src/assists/inline_local_variable.rs index d0c5c3b8c..83527d904 100644 --- a/crates/ra_assists/src/assists/inline_local_variable.rs +++ b/crates/ra_assists/src/assists/inline_local_variable.rs | |||
@@ -47,6 +47,9 @@ pub(crate) fn inline_local_variable(ctx: AssistCtx<impl HirDatabase>) -> Option< | |||
47 | }; | 47 | }; |
48 | let analyzer = ctx.source_analyzer(bind_pat.syntax(), None); | 48 | let analyzer = ctx.source_analyzer(bind_pat.syntax(), None); |
49 | let refs = analyzer.find_all_refs(&bind_pat); | 49 | let refs = analyzer.find_all_refs(&bind_pat); |
50 | if refs.is_empty() { | ||
51 | return None; | ||
52 | }; | ||
50 | 53 | ||
51 | let mut wrap_in_parens = vec![true; refs.len()]; | 54 | let mut wrap_in_parens = vec![true; refs.len()]; |
52 | 55 | ||
@@ -645,4 +648,16 @@ fn foo() { | |||
645 | }", | 648 | }", |
646 | ); | 649 | ); |
647 | } | 650 | } |
651 | |||
652 | #[test] | ||
653 | fn test_not_applicable_if_variable_unused() { | ||
654 | check_assist_not_applicable( | ||
655 | inline_local_variable, | ||
656 | " | ||
657 | fn foo() { | ||
658 | let <|>a = 0; | ||
659 | } | ||
660 | ", | ||
661 | ) | ||
662 | } | ||
648 | } | 663 | } |
diff --git a/crates/ra_assists/src/doc_tests.rs b/crates/ra_assists/src/doc_tests.rs index 5dc1ee233..65d51428b 100644 --- a/crates/ra_assists/src/doc_tests.rs +++ b/crates/ra_assists/src/doc_tests.rs | |||
@@ -11,6 +11,10 @@ use test_utils::{assert_eq_text, extract_range_or_offset}; | |||
11 | use crate::test_db::TestDB; | 11 | use crate::test_db::TestDB; |
12 | 12 | ||
13 | fn check(assist_id: &str, before: &str, after: &str) { | 13 | fn check(assist_id: &str, before: &str, after: &str) { |
14 | // FIXME we cannot get the imports search functionality here yet, but still need to generate a test and a doc for an assist | ||
15 | if assist_id == "auto_import" { | ||
16 | return; | ||
17 | } | ||
14 | let (selection, before) = extract_range_or_offset(before); | 18 | let (selection, before) = extract_range_or_offset(before); |
15 | let (db, file_id) = TestDB::with_single_file(&before); | 19 | let (db, file_id) = TestDB::with_single_file(&before); |
16 | let frange = FileRange { file_id, range: selection.into() }; | 20 | let frange = FileRange { file_id, range: selection.into() }; |
diff --git a/crates/ra_assists/src/doc_tests/generated.rs b/crates/ra_assists/src/doc_tests/generated.rs index 7d84dc8fb..ec4587ce7 100644 --- a/crates/ra_assists/src/doc_tests/generated.rs +++ b/crates/ra_assists/src/doc_tests/generated.rs | |||
@@ -215,6 +215,25 @@ fn main() { | |||
215 | } | 215 | } |
216 | 216 | ||
217 | #[test] | 217 | #[test] |
218 | fn doctest_auto_import() { | ||
219 | check( | ||
220 | "auto_import", | ||
221 | r#####" | ||
222 | fn main() { | ||
223 | let map = HashMap<|>::new(); | ||
224 | } | ||
225 | "#####, | ||
226 | r#####" | ||
227 | use std::collections::HashMap; | ||
228 | |||
229 | fn main() { | ||
230 | let map = HashMap<|>::new(); | ||
231 | } | ||
232 | "#####, | ||
233 | ) | ||
234 | } | ||
235 | |||
236 | #[test] | ||
218 | fn doctest_change_visibility() { | 237 | fn doctest_change_visibility() { |
219 | check( | 238 | check( |
220 | "change_visibility", | 239 | "change_visibility", |
diff --git a/crates/ra_assists/src/lib.rs b/crates/ra_assists/src/lib.rs index 3337805a5..625ebc4a2 100644 --- a/crates/ra_assists/src/lib.rs +++ b/crates/ra_assists/src/lib.rs | |||
@@ -14,7 +14,7 @@ mod test_db; | |||
14 | pub mod ast_transform; | 14 | pub mod ast_transform; |
15 | 15 | ||
16 | use either::Either; | 16 | use either::Either; |
17 | use hir::db::HirDatabase; | 17 | use hir::{db::HirDatabase, ModuleDef}; |
18 | use ra_db::FileRange; | 18 | use ra_db::FileRange; |
19 | use ra_syntax::{TextRange, TextUnit}; | 19 | use ra_syntax::{TextRange, TextUnit}; |
20 | use ra_text_edit::TextEdit; | 20 | use ra_text_edit::TextEdit; |
@@ -77,6 +77,51 @@ where | |||
77 | }) | 77 | }) |
78 | } | 78 | } |
79 | 79 | ||
80 | /// A functionality for locating imports for the given name. | ||
81 | /// | ||
82 | /// Currently has to be a trait with the real implementation provided by the ra_ide_api crate, | ||
83 | /// due to the search functionality located there. | ||
84 | /// Later, this trait should be removed completely and the search functionality moved to a separate crate, | ||
85 | /// accessible from the ra_assists crate. | ||
86 | pub trait ImportsLocator { | ||
87 | /// Finds all imports for the given name and the module that contains this name. | ||
88 | fn find_imports(&mut self, name_to_import: &str) -> Vec<ModuleDef>; | ||
89 | } | ||
90 | |||
91 | /// Return all the assists applicable at the given position | ||
92 | /// and additional assists that need the imports locator functionality to work. | ||
93 | /// | ||
94 | /// Assists are returned in the "resolved" state, that is with edit fully | ||
95 | /// computed. | ||
96 | pub fn assists_with_imports_locator<H, F>( | ||
97 | db: &H, | ||
98 | range: FileRange, | ||
99 | mut imports_locator: F, | ||
100 | ) -> Vec<ResolvedAssist> | ||
101 | where | ||
102 | H: HirDatabase + 'static, | ||
103 | F: ImportsLocator, | ||
104 | { | ||
105 | AssistCtx::with_ctx(db, range, true, |ctx| { | ||
106 | let mut assists = assists::all() | ||
107 | .iter() | ||
108 | .map(|f| f(ctx.clone())) | ||
109 | .chain( | ||
110 | assists::all_with_imports_locator() | ||
111 | .iter() | ||
112 | .map(|f| f(ctx.clone(), &mut imports_locator)), | ||
113 | ) | ||
114 | .filter_map(std::convert::identity) | ||
115 | .map(|a| match a { | ||
116 | Assist::Resolved { assist } => assist, | ||
117 | Assist::Unresolved { .. } => unreachable!(), | ||
118 | }) | ||
119 | .collect(); | ||
120 | sort_assists(&mut assists); | ||
121 | assists | ||
122 | }) | ||
123 | } | ||
124 | |||
80 | /// Return all the assists applicable at the given position. | 125 | /// Return all the assists applicable at the given position. |
81 | /// | 126 | /// |
82 | /// Assists are returned in the "resolved" state, that is with edit fully | 127 | /// Assists are returned in the "resolved" state, that is with edit fully |
@@ -85,8 +130,6 @@ pub fn assists<H>(db: &H, range: FileRange) -> Vec<ResolvedAssist> | |||
85 | where | 130 | where |
86 | H: HirDatabase + 'static, | 131 | H: HirDatabase + 'static, |
87 | { | 132 | { |
88 | use std::cmp::Ordering; | ||
89 | |||
90 | AssistCtx::with_ctx(db, range, true, |ctx| { | 133 | AssistCtx::with_ctx(db, range, true, |ctx| { |
91 | let mut a = assists::all() | 134 | let mut a = assists::all() |
92 | .iter() | 135 | .iter() |
@@ -95,19 +138,24 @@ where | |||
95 | Assist::Resolved { assist } => assist, | 138 | Assist::Resolved { assist } => assist, |
96 | Assist::Unresolved { .. } => unreachable!(), | 139 | Assist::Unresolved { .. } => unreachable!(), |
97 | }) | 140 | }) |
98 | .collect::<Vec<_>>(); | 141 | .collect(); |
99 | a.sort_by(|a, b| match (a.get_first_action().target, b.get_first_action().target) { | 142 | sort_assists(&mut a); |
100 | (Some(a), Some(b)) => a.len().cmp(&b.len()), | ||
101 | (Some(_), None) => Ordering::Less, | ||
102 | (None, Some(_)) => Ordering::Greater, | ||
103 | (None, None) => Ordering::Equal, | ||
104 | }); | ||
105 | a | 143 | a |
106 | }) | 144 | }) |
107 | } | 145 | } |
108 | 146 | ||
147 | fn sort_assists(assists: &mut Vec<ResolvedAssist>) { | ||
148 | use std::cmp::Ordering; | ||
149 | assists.sort_by(|a, b| match (a.get_first_action().target, b.get_first_action().target) { | ||
150 | (Some(a), Some(b)) => a.len().cmp(&b.len()), | ||
151 | (Some(_), None) => Ordering::Less, | ||
152 | (None, Some(_)) => Ordering::Greater, | ||
153 | (None, None) => Ordering::Equal, | ||
154 | }); | ||
155 | } | ||
156 | |||
109 | mod assists { | 157 | mod assists { |
110 | use crate::{Assist, AssistCtx}; | 158 | use crate::{Assist, AssistCtx, ImportsLocator}; |
111 | use hir::db::HirDatabase; | 159 | use hir::db::HirDatabase; |
112 | 160 | ||
113 | mod add_derive; | 161 | mod add_derive; |
@@ -116,6 +164,7 @@ mod assists { | |||
116 | mod add_custom_impl; | 164 | mod add_custom_impl; |
117 | mod add_new; | 165 | mod add_new; |
118 | mod apply_demorgan; | 166 | mod apply_demorgan; |
167 | mod auto_import; | ||
119 | mod invert_if; | 168 | mod invert_if; |
120 | mod flip_comma; | 169 | mod flip_comma; |
121 | mod flip_binexpr; | 170 | mod flip_binexpr; |
@@ -168,15 +217,69 @@ mod assists { | |||
168 | early_return::convert_to_guarded_return, | 217 | early_return::convert_to_guarded_return, |
169 | ] | 218 | ] |
170 | } | 219 | } |
220 | |||
221 | pub(crate) fn all_with_imports_locator<'a, DB: HirDatabase, F: ImportsLocator>( | ||
222 | ) -> &'a [fn(AssistCtx<DB>, &mut F) -> Option<Assist>] { | ||
223 | &[auto_import::auto_import] | ||
224 | } | ||
171 | } | 225 | } |
172 | 226 | ||
173 | #[cfg(test)] | 227 | #[cfg(test)] |
174 | mod helpers { | 228 | mod helpers { |
175 | use ra_db::{fixture::WithFixture, FileRange}; | 229 | use hir::db::DefDatabase; |
230 | use ra_db::{fixture::WithFixture, FileId, FileRange}; | ||
176 | use ra_syntax::TextRange; | 231 | use ra_syntax::TextRange; |
177 | use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range}; | 232 | use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range}; |
178 | 233 | ||
179 | use crate::{test_db::TestDB, Assist, AssistCtx}; | 234 | use crate::{test_db::TestDB, Assist, AssistCtx, ImportsLocator}; |
235 | use std::sync::Arc; | ||
236 | |||
237 | // FIXME remove the `ModuleDefId` reexport from `ra_hir` when this gets removed. | ||
238 | pub(crate) struct TestImportsLocator { | ||
239 | db: Arc<TestDB>, | ||
240 | test_file_id: FileId, | ||
241 | } | ||
242 | |||
243 | impl TestImportsLocator { | ||
244 | pub(crate) fn new(db: Arc<TestDB>, test_file_id: FileId) -> Self { | ||
245 | TestImportsLocator { db, test_file_id } | ||
246 | } | ||
247 | } | ||
248 | |||
249 | impl ImportsLocator for TestImportsLocator { | ||
250 | fn find_imports(&mut self, name_to_import: &str) -> Vec<hir::ModuleDef> { | ||
251 | let crate_def_map = self.db.crate_def_map(self.db.test_crate()); | ||
252 | let mut findings = Vec::new(); | ||
253 | |||
254 | let mut module_ids_to_process = | ||
255 | crate_def_map.modules_for_file(self.test_file_id).collect::<Vec<_>>(); | ||
256 | |||
257 | while !module_ids_to_process.is_empty() { | ||
258 | let mut more_ids_to_process = Vec::new(); | ||
259 | for local_module_id in module_ids_to_process.drain(..) { | ||
260 | for (name, namespace_data) in | ||
261 | crate_def_map[local_module_id].scope.entries_without_primitives() | ||
262 | { | ||
263 | let found_a_match = &name.to_string() == name_to_import; | ||
264 | vec![namespace_data.types, namespace_data.values] | ||
265 | .into_iter() | ||
266 | .filter_map(std::convert::identity) | ||
267 | .for_each(|(module_def_id, _)| { | ||
268 | if found_a_match { | ||
269 | findings.push(module_def_id.into()); | ||
270 | } | ||
271 | if let hir::ModuleDefId::ModuleId(module_id) = module_def_id { | ||
272 | more_ids_to_process.push(module_id.local_id); | ||
273 | } | ||
274 | }); | ||
275 | } | ||
276 | } | ||
277 | module_ids_to_process = more_ids_to_process; | ||
278 | } | ||
279 | |||
280 | findings | ||
281 | } | ||
282 | } | ||
180 | 283 | ||
181 | pub(crate) fn check_assist( | 284 | pub(crate) fn check_assist( |
182 | assist: fn(AssistCtx<TestDB>) -> Option<Assist>, | 285 | assist: fn(AssistCtx<TestDB>) -> Option<Assist>, |
@@ -206,6 +309,38 @@ mod helpers { | |||
206 | assert_eq_text!(after, &actual); | 309 | assert_eq_text!(after, &actual); |
207 | } | 310 | } |
208 | 311 | ||
312 | pub(crate) fn check_assist_with_imports_locator<F: ImportsLocator>( | ||
313 | assist: fn(AssistCtx<TestDB>, &mut F) -> Option<Assist>, | ||
314 | imports_locator_provider: fn(db: Arc<TestDB>, file_id: FileId) -> F, | ||
315 | before: &str, | ||
316 | after: &str, | ||
317 | ) { | ||
318 | let (before_cursor_pos, before) = extract_offset(before); | ||
319 | let (db, file_id) = TestDB::with_single_file(&before); | ||
320 | let db = Arc::new(db); | ||
321 | let mut imports_locator = imports_locator_provider(Arc::clone(&db), file_id); | ||
322 | let frange = | ||
323 | FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) }; | ||
324 | let assist = | ||
325 | AssistCtx::with_ctx(db.as_ref(), frange, true, |ctx| assist(ctx, &mut imports_locator)) | ||
326 | .expect("code action is not applicable"); | ||
327 | let action = match assist { | ||
328 | Assist::Unresolved { .. } => unreachable!(), | ||
329 | Assist::Resolved { assist } => assist.get_first_action(), | ||
330 | }; | ||
331 | |||
332 | let actual = action.edit.apply(&before); | ||
333 | let actual_cursor_pos = match action.cursor_position { | ||
334 | None => action | ||
335 | .edit | ||
336 | .apply_to_offset(before_cursor_pos) | ||
337 | .expect("cursor position is affected by the edit"), | ||
338 | Some(off) => off, | ||
339 | }; | ||
340 | let actual = add_cursor(&actual, actual_cursor_pos); | ||
341 | assert_eq_text!(after, &actual); | ||
342 | } | ||
343 | |||
209 | pub(crate) fn check_assist_range( | 344 | pub(crate) fn check_assist_range( |
210 | assist: fn(AssistCtx<TestDB>) -> Option<Assist>, | 345 | assist: fn(AssistCtx<TestDB>) -> Option<Assist>, |
211 | before: &str, | 346 | before: &str, |
@@ -279,6 +414,22 @@ mod helpers { | |||
279 | assert!(assist.is_none()); | 414 | assert!(assist.is_none()); |
280 | } | 415 | } |
281 | 416 | ||
417 | pub(crate) fn check_assist_with_imports_locator_not_applicable<F: ImportsLocator>( | ||
418 | assist: fn(AssistCtx<TestDB>, &mut F) -> Option<Assist>, | ||
419 | imports_locator_provider: fn(db: Arc<TestDB>, file_id: FileId) -> F, | ||
420 | before: &str, | ||
421 | ) { | ||
422 | let (before_cursor_pos, before) = extract_offset(before); | ||
423 | let (db, file_id) = TestDB::with_single_file(&before); | ||
424 | let db = Arc::new(db); | ||
425 | let mut imports_locator = imports_locator_provider(Arc::clone(&db), file_id); | ||
426 | let frange = | ||
427 | FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) }; | ||
428 | let assist = | ||
429 | AssistCtx::with_ctx(db.as_ref(), frange, true, |ctx| assist(ctx, &mut imports_locator)); | ||
430 | assert!(assist.is_none()); | ||
431 | } | ||
432 | |||
282 | pub(crate) fn check_assist_range_not_applicable( | 433 | pub(crate) fn check_assist_range_not_applicable( |
283 | assist: fn(AssistCtx<TestDB>) -> Option<Assist>, | 434 | assist: fn(AssistCtx<TestDB>) -> Option<Assist>, |
284 | before: &str, | 435 | before: &str, |
diff --git a/crates/ra_cargo_watch/Cargo.toml b/crates/ra_cargo_watch/Cargo.toml index 9ead48abf..dd814fc9d 100644 --- a/crates/ra_cargo_watch/Cargo.toml +++ b/crates/ra_cargo_watch/Cargo.toml | |||
@@ -6,12 +6,13 @@ authors = ["rust-analyzer developers"] | |||
6 | 6 | ||
7 | [dependencies] | 7 | [dependencies] |
8 | crossbeam-channel = "0.4" | 8 | crossbeam-channel = "0.4" |
9 | lsp-types = { version = "0.69.0", features = ["proposed"] } | 9 | lsp-types = { version = "0.70.0", features = ["proposed"] } |
10 | log = "0.4.3" | 10 | log = "0.4.3" |
11 | cargo_metadata = "0.9.1" | 11 | cargo_metadata = "0.9.1" |
12 | jod-thread = "0.1.0" | 12 | jod-thread = "0.1.0" |
13 | parking_lot = "0.10.0" | 13 | parking_lot = "0.10.0" |
14 | serde_json = "1.0.45" | ||
14 | 15 | ||
15 | [dev-dependencies] | 16 | [dev-dependencies] |
16 | insta = "0.12.0" | 17 | insta = "0.13.0" |
17 | serde_json = "1.0" \ No newline at end of file | 18 | serde_json = "1.0" \ No newline at end of file |
diff --git a/crates/ra_cargo_watch/src/conv.rs b/crates/ra_cargo_watch/src/conv.rs index ac0f1d28a..8fba400ae 100644 --- a/crates/ra_cargo_watch/src/conv.rs +++ b/crates/ra_cargo_watch/src/conv.rs | |||
@@ -117,7 +117,7 @@ fn is_deprecated(rd: &RustDiagnostic) -> bool { | |||
117 | } | 117 | } |
118 | } | 118 | } |
119 | 119 | ||
120 | #[derive(Debug)] | 120 | #[derive(Clone, Debug)] |
121 | pub struct SuggestedFix { | 121 | pub struct SuggestedFix { |
122 | pub title: String, | 122 | pub title: String, |
123 | pub location: Location, | 123 | pub location: Location, |
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_clippy_pass_by_ref.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_clippy_pass_by_ref.snap index cb0920914..cb0920914 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_clippy_pass_by_ref.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_clippy_pass_by_ref.snap | |||
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_handles_macro_location.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_handles_macro_location.snap index 19510ecc1..19510ecc1 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_handles_macro_location.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_handles_macro_location.snap | |||
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_macro_compiler_error.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_macro_compiler_error.snap index 92f7eec05..92f7eec05 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_macro_compiler_error.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_macro_compiler_error.snap | |||
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_rustc_incompatible_type_for_trait.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_incompatible_type_for_trait.snap index cf683e4b6..cf683e4b6 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_rustc_incompatible_type_for_trait.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_incompatible_type_for_trait.snap | |||
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_rustc_mismatched_type.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_mismatched_type.snap index 8c1483c74..8c1483c74 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_rustc_mismatched_type.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_mismatched_type.snap | |||
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_rustc_unused_variable.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_unused_variable.snap index eb5a2247b..eb5a2247b 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_rustc_unused_variable.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_unused_variable.snap | |||
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_rustc_wrong_number_of_parameters.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_wrong_number_of_parameters.snap index 2f4518931..2f4518931 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_rustc_wrong_number_of_parameters.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_wrong_number_of_parameters.snap | |||
diff --git a/crates/ra_cargo_watch/src/lib.rs b/crates/ra_cargo_watch/src/lib.rs index 7f4c9280c..ea7ddc86b 100644 --- a/crates/ra_cargo_watch/src/lib.rs +++ b/crates/ra_cargo_watch/src/lib.rs | |||
@@ -7,9 +7,9 @@ use lsp_types::{ | |||
7 | Diagnostic, Url, WorkDoneProgress, WorkDoneProgressBegin, WorkDoneProgressEnd, | 7 | Diagnostic, Url, WorkDoneProgress, WorkDoneProgressBegin, WorkDoneProgressEnd, |
8 | WorkDoneProgressReport, | 8 | WorkDoneProgressReport, |
9 | }; | 9 | }; |
10 | use parking_lot::RwLock; | ||
11 | use std::{ | 10 | use std::{ |
12 | collections::HashMap, | 11 | collections::HashMap, |
12 | io::{BufRead, BufReader}, | ||
13 | path::PathBuf, | 13 | path::PathBuf, |
14 | process::{Command, Stdio}, | 14 | process::{Command, Stdio}, |
15 | sync::Arc, | 15 | sync::Arc, |
@@ -38,7 +38,7 @@ pub struct CheckOptions { | |||
38 | #[derive(Debug)] | 38 | #[derive(Debug)] |
39 | pub struct CheckWatcher { | 39 | pub struct CheckWatcher { |
40 | pub task_recv: Receiver<CheckTask>, | 40 | pub task_recv: Receiver<CheckTask>, |
41 | pub state: Arc<RwLock<CheckState>>, | 41 | pub state: Arc<CheckState>, |
42 | cmd_send: Option<Sender<CheckCommand>>, | 42 | cmd_send: Option<Sender<CheckCommand>>, |
43 | handle: Option<JoinHandle<()>>, | 43 | handle: Option<JoinHandle<()>>, |
44 | } | 44 | } |
@@ -46,7 +46,7 @@ pub struct CheckWatcher { | |||
46 | impl CheckWatcher { | 46 | impl CheckWatcher { |
47 | pub fn new(options: &CheckOptions, workspace_root: PathBuf) -> CheckWatcher { | 47 | pub fn new(options: &CheckOptions, workspace_root: PathBuf) -> CheckWatcher { |
48 | let options = options.clone(); | 48 | let options = options.clone(); |
49 | let state = Arc::new(RwLock::new(CheckState::new())); | 49 | let state = Arc::new(CheckState::new()); |
50 | 50 | ||
51 | let (task_send, task_recv) = unbounded::<CheckTask>(); | 51 | let (task_send, task_recv) = unbounded::<CheckTask>(); |
52 | let (cmd_send, cmd_recv) = unbounded::<CheckCommand>(); | 52 | let (cmd_send, cmd_recv) = unbounded::<CheckCommand>(); |
@@ -59,7 +59,7 @@ impl CheckWatcher { | |||
59 | 59 | ||
60 | /// Returns a CheckWatcher that doesn't actually do anything | 60 | /// Returns a CheckWatcher that doesn't actually do anything |
61 | pub fn dummy() -> CheckWatcher { | 61 | pub fn dummy() -> CheckWatcher { |
62 | let state = Arc::new(RwLock::new(CheckState::new())); | 62 | let state = Arc::new(CheckState::new()); |
63 | CheckWatcher { task_recv: never(), cmd_send: None, handle: None, state } | 63 | CheckWatcher { task_recv: never(), cmd_send: None, handle: None, state } |
64 | } | 64 | } |
65 | 65 | ||
@@ -87,7 +87,7 @@ impl std::ops::Drop for CheckWatcher { | |||
87 | } | 87 | } |
88 | } | 88 | } |
89 | 89 | ||
90 | #[derive(Debug)] | 90 | #[derive(Clone, Debug)] |
91 | pub struct CheckState { | 91 | pub struct CheckState { |
92 | diagnostic_collection: HashMap<Url, Vec<Diagnostic>>, | 92 | diagnostic_collection: HashMap<Url, Vec<Diagnostic>>, |
93 | suggested_fix_collection: HashMap<Url, Vec<SuggestedFix>>, | 93 | suggested_fix_collection: HashMap<Url, Vec<SuggestedFix>>, |
@@ -216,8 +216,10 @@ impl CheckWatcherThread { | |||
216 | self.last_update_req.take(); | 216 | self.last_update_req.take(); |
217 | task_send.send(CheckTask::ClearDiagnostics).unwrap(); | 217 | task_send.send(CheckTask::ClearDiagnostics).unwrap(); |
218 | 218 | ||
219 | // By replacing the watcher, we drop the previous one which | 219 | // Replace with a dummy watcher first so we drop the original and wait for completion |
220 | // causes it to shut down automatically. | 220 | std::mem::replace(&mut self.watcher, WatchThread::dummy()); |
221 | |||
222 | // Then create the actual new watcher | ||
221 | self.watcher = WatchThread::new(&self.options, &self.workspace_root); | 223 | self.watcher = WatchThread::new(&self.options, &self.workspace_root); |
222 | } | 224 | } |
223 | } | 225 | } |
@@ -348,15 +350,45 @@ impl WatchThread { | |||
348 | // which will break out of the loop, and continue the shutdown | 350 | // which will break out of the loop, and continue the shutdown |
349 | let _ = message_send.send(CheckEvent::Begin); | 351 | let _ = message_send.send(CheckEvent::Begin); |
350 | 352 | ||
351 | for message in cargo_metadata::parse_messages(command.stdout.take().unwrap()) { | 353 | // We manually read a line at a time, instead of using serde's |
354 | // stream deserializers, because the deserializer cannot recover | ||
355 | // from an error, resulting in it getting stuck, because we try to | ||
356 | // be resillient against failures. | ||
357 | // | ||
358 | // Because cargo only outputs one JSON object per line, we can | ||
359 | // simply skip a line if it doesn't parse, which just ignores any | ||
360 | // erroneus output. | ||
361 | let stdout = BufReader::new(command.stdout.take().unwrap()); | ||
362 | for line in stdout.lines() { | ||
363 | let line = match line { | ||
364 | Ok(line) => line, | ||
365 | Err(err) => { | ||
366 | log::error!("Couldn't read line from cargo: {}", err); | ||
367 | continue; | ||
368 | } | ||
369 | }; | ||
370 | |||
371 | let message = serde_json::from_str::<cargo_metadata::Message>(&line); | ||
352 | let message = match message { | 372 | let message = match message { |
353 | Ok(message) => message, | 373 | Ok(message) => message, |
354 | Err(err) => { | 374 | Err(err) => { |
355 | log::error!("Invalid json from cargo check, ignoring: {}", err); | 375 | log::error!( |
376 | "Invalid json from cargo check, ignoring ({}): {:?} ", | ||
377 | err, | ||
378 | line | ||
379 | ); | ||
356 | continue; | 380 | continue; |
357 | } | 381 | } |
358 | }; | 382 | }; |
359 | 383 | ||
384 | // Skip certain kinds of messages to only spend time on what's useful | ||
385 | match &message { | ||
386 | Message::CompilerArtifact(artifact) if artifact.fresh => continue, | ||
387 | Message::BuildScriptExecuted(_) => continue, | ||
388 | Message::Unknown => continue, | ||
389 | _ => {} | ||
390 | } | ||
391 | |||
360 | match message_send.send(CheckEvent::Msg(message)) { | 392 | match message_send.send(CheckEvent::Msg(message)) { |
361 | Ok(()) => {} | 393 | Ok(()) => {} |
362 | Err(_err) => { | 394 | Err(_err) => { |
diff --git a/crates/ra_cli/Cargo.toml b/crates/ra_cli/Cargo.toml index 12af075f7..bcd408421 100644 --- a/crates/ra_cli/Cargo.toml +++ b/crates/ra_cli/Cargo.toml | |||
@@ -7,7 +7,7 @@ publish = false | |||
7 | 7 | ||
8 | [dependencies] | 8 | [dependencies] |
9 | pico-args = "0.3.0" | 9 | pico-args = "0.3.0" |
10 | env_logger = { version = "0.7.1", default-features = false, features = ["humantime"] } | 10 | env_logger = { version = "0.7.1", default-features = false } |
11 | 11 | ||
12 | ra_syntax = { path = "../ra_syntax" } | 12 | ra_syntax = { path = "../ra_syntax" } |
13 | ra_ide = { path = "../ra_ide" } | 13 | ra_ide = { path = "../ra_ide" } |
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs index e6079b88d..a77bf6de6 100644 --- a/crates/ra_hir/src/db.rs +++ b/crates/ra_hir/src/db.rs | |||
@@ -1,20 +1,24 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | pub use hir_def::db::{ | 3 | pub use hir_def::db::{ |
4 | BodyQuery, BodyWithSourceMapQuery, ComputeCrateDefMapQuery, ConstDataQuery, | 4 | AttrsQuery, BodyQuery, BodyWithSourceMapQuery, ComputeCrateDefMapQuery, ConstDataQuery, |
5 | CrateLangItemsQuery, DefDatabase, DefDatabaseStorage, DocumentationQuery, EnumDataQuery, | 5 | CrateLangItemsQuery, DefDatabase, DefDatabaseStorage, DocumentationQuery, EnumDataQuery, |
6 | ExprScopesQuery, FunctionDataQuery, GenericParamsQuery, ImplDataQuery, InternDatabase, | 6 | ExprScopesQuery, FunctionDataQuery, GenericParamsQuery, ImplDataQuery, InternConstQuery, |
7 | InternDatabaseStorage, LangItemQuery, ModuleLangItemsQuery, RawItemsQuery, StaticDataQuery, | 7 | InternDatabase, InternDatabaseStorage, InternEnumQuery, InternFunctionQuery, InternImplQuery, |
8 | StructDataQuery, TraitDataQuery, TypeAliasDataQuery, | 8 | InternStaticQuery, InternStructQuery, InternTraitQuery, InternTypeAliasQuery, InternUnionQuery, |
9 | LangItemQuery, ModuleLangItemsQuery, RawItemsQuery, StaticDataQuery, StructDataQuery, | ||
10 | TraitDataQuery, TypeAliasDataQuery, UnionDataQuery, | ||
9 | }; | 11 | }; |
10 | pub use hir_expand::db::{ | 12 | pub use hir_expand::db::{ |
11 | AstDatabase, AstDatabaseStorage, AstIdMapQuery, MacroArgQuery, MacroDefQuery, MacroExpandQuery, | 13 | AstDatabase, AstDatabaseStorage, AstIdMapQuery, InternMacroQuery, MacroArgQuery, MacroDefQuery, |
12 | ParseMacroQuery, | 14 | MacroExpandQuery, ParseMacroQuery, |
13 | }; | 15 | }; |
14 | pub use hir_ty::db::{ | 16 | pub use hir_ty::db::{ |
15 | AssociatedTyDataQuery, CallableItemSignatureQuery, DoInferQuery, FieldTypesQuery, | 17 | AssociatedTyDataQuery, AssociatedTyValueQuery, CallableItemSignatureQuery, DoInferQuery, |
16 | GenericDefaultsQuery, GenericPredicatesQuery, HirDatabase, HirDatabaseStorage, ImplDatumQuery, | 18 | FieldTypesQuery, GenericDefaultsQuery, GenericPredicatesForParamQuery, GenericPredicatesQuery, |
17 | ImplsForTraitQuery, ImplsInCrateQuery, StructDatumQuery, TraitDatumQuery, TraitSolveQuery, | 19 | HirDatabase, HirDatabaseStorage, ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, |
20 | ImplsForTraitQuery, ImplsInCrateQuery, InternAssocTyValueQuery, InternChalkImplQuery, | ||
21 | InternTypeCtorQuery, StructDatumQuery, TraitDatumQuery, TraitSolveQuery, TraitSolverQuery, | ||
18 | TyQuery, ValueTyQuery, | 22 | TyQuery, ValueTyQuery, |
19 | }; | 23 | }; |
20 | 24 | ||
diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs index e1c7b7a20..9e2673d13 100644 --- a/crates/ra_hir/src/lib.rs +++ b/crates/ra_hir/src/lib.rs | |||
@@ -56,6 +56,7 @@ pub use hir_def::{ | |||
56 | nameres::ModuleSource, | 56 | nameres::ModuleSource, |
57 | path::{ModPath, Path, PathKind}, | 57 | path::{ModPath, Path, PathKind}, |
58 | type_ref::Mutability, | 58 | type_ref::Mutability, |
59 | ModuleDefId, // FIXME this is exposed and should be used for implementing the `TestImportsLocator` in `ra_assists` only, should be removed later along with the trait and the implementation. | ||
59 | }; | 60 | }; |
60 | pub use hir_expand::{ | 61 | pub use hir_expand::{ |
61 | name::Name, HirFileId, InFile, MacroCallId, MacroCallLoc, MacroDefId, MacroFile, Origin, | 62 | name::Name, HirFileId, InFile, MacroCallId, MacroCallLoc, MacroDefId, MacroFile, Origin, |
diff --git a/crates/ra_hir_def/Cargo.toml b/crates/ra_hir_def/Cargo.toml index 2c368f690..1efa00fe0 100644 --- a/crates/ra_hir_def/Cargo.toml +++ b/crates/ra_hir_def/Cargo.toml | |||
@@ -26,4 +26,4 @@ ra_cfg = { path = "../ra_cfg" } | |||
26 | tt = { path = "../ra_tt", package = "ra_tt" } | 26 | tt = { path = "../ra_tt", package = "ra_tt" } |
27 | 27 | ||
28 | [dev-dependencies] | 28 | [dev-dependencies] |
29 | insta = "0.12.0" | 29 | insta = "0.13.0" |
diff --git a/crates/ra_hir_def/src/find_path.rs b/crates/ra_hir_def/src/find_path.rs index 8cc2fb160..43b9b124a 100644 --- a/crates/ra_hir_def/src/find_path.rs +++ b/crates/ra_hir_def/src/find_path.rs | |||
@@ -7,10 +7,39 @@ use crate::{ | |||
7 | visibility::Visibility, | 7 | visibility::Visibility, |
8 | CrateId, ModuleDefId, ModuleId, | 8 | CrateId, ModuleDefId, ModuleId, |
9 | }; | 9 | }; |
10 | use hir_expand::name::Name; | 10 | use hir_expand::name::{known, Name}; |
11 | use test_utils::tested_by; | ||
11 | 12 | ||
12 | const MAX_PATH_LEN: usize = 15; | 13 | const MAX_PATH_LEN: usize = 15; |
13 | 14 | ||
15 | impl ModPath { | ||
16 | fn starts_with_std(&self) -> bool { | ||
17 | self.segments.first().filter(|&first_segment| first_segment == &known::std).is_some() | ||
18 | } | ||
19 | |||
20 | // When std library is present, paths starting with `std::` | ||
21 | // should be preferred over paths starting with `core::` and `alloc::` | ||
22 | fn should_start_with_std(&self) -> bool { | ||
23 | self.segments | ||
24 | .first() | ||
25 | .filter(|&first_segment| { | ||
26 | first_segment == &known::alloc || first_segment == &known::core | ||
27 | }) | ||
28 | .is_some() | ||
29 | } | ||
30 | |||
31 | fn len(&self) -> usize { | ||
32 | self.segments.len() | ||
33 | + match self.kind { | ||
34 | PathKind::Plain => 0, | ||
35 | PathKind::Super(i) => i as usize, | ||
36 | PathKind::Crate => 1, | ||
37 | PathKind::Abs => 0, | ||
38 | PathKind::DollarCrate(_) => 1, | ||
39 | } | ||
40 | } | ||
41 | } | ||
42 | |||
14 | // FIXME: handle local items | 43 | // FIXME: handle local items |
15 | 44 | ||
16 | /// Find a path that can be used to refer to a certain item. This can depend on | 45 | /// Find a path that can be used to refer to a certain item. This can depend on |
@@ -112,23 +141,27 @@ fn find_path_inner( | |||
112 | Some(path) => path, | 141 | Some(path) => path, |
113 | }; | 142 | }; |
114 | path.segments.push(name); | 143 | path.segments.push(name); |
115 | if path_len(&path) < best_path_len { | 144 | |
116 | best_path_len = path_len(&path); | 145 | let new_path = |
117 | best_path = Some(path); | 146 | if let Some(best_path) = best_path { select_best_path(best_path, path) } else { path }; |
118 | } | 147 | best_path_len = new_path.len(); |
148 | best_path = Some(new_path); | ||
119 | } | 149 | } |
120 | best_path | 150 | best_path |
121 | } | 151 | } |
122 | 152 | ||
123 | fn path_len(path: &ModPath) -> usize { | 153 | fn select_best_path(old_path: ModPath, new_path: ModPath) -> ModPath { |
124 | path.segments.len() | 154 | if old_path.starts_with_std() && new_path.should_start_with_std() { |
125 | + match path.kind { | 155 | tested_by!(prefer_std_paths); |
126 | PathKind::Plain => 0, | 156 | old_path |
127 | PathKind::Super(i) => i as usize, | 157 | } else if new_path.starts_with_std() && old_path.should_start_with_std() { |
128 | PathKind::Crate => 1, | 158 | tested_by!(prefer_std_paths); |
129 | PathKind::Abs => 0, | 159 | new_path |
130 | PathKind::DollarCrate(_) => 1, | 160 | } else if new_path.len() < old_path.len() { |
131 | } | 161 | new_path |
162 | } else { | ||
163 | old_path | ||
164 | } | ||
132 | } | 165 | } |
133 | 166 | ||
134 | fn find_importable_locations( | 167 | fn find_importable_locations( |
@@ -201,6 +234,7 @@ mod tests { | |||
201 | use hir_expand::hygiene::Hygiene; | 234 | use hir_expand::hygiene::Hygiene; |
202 | use ra_db::fixture::WithFixture; | 235 | use ra_db::fixture::WithFixture; |
203 | use ra_syntax::ast::AstNode; | 236 | use ra_syntax::ast::AstNode; |
237 | use test_utils::covers; | ||
204 | 238 | ||
205 | /// `code` needs to contain a cursor marker; checks that `find_path` for the | 239 | /// `code` needs to contain a cursor marker; checks that `find_path` for the |
206 | /// item the `path` refers to returns that same path when called from the | 240 | /// item the `path` refers to returns that same path when called from the |
@@ -452,4 +486,41 @@ mod tests { | |||
452 | "#; | 486 | "#; |
453 | check_found_path(code, "crate::foo::S"); | 487 | check_found_path(code, "crate::foo::S"); |
454 | } | 488 | } |
489 | |||
490 | #[test] | ||
491 | fn prefer_std_paths_over_alloc() { | ||
492 | covers!(prefer_std_paths); | ||
493 | let code = r#" | ||
494 | //- /main.rs crate:main deps:alloc,std | ||
495 | <|> | ||
496 | |||
497 | //- /std.rs crate:std deps:alloc | ||
498 | pub mod sync { | ||
499 | pub use alloc::sync::Arc; | ||
500 | } | ||
501 | |||
502 | //- /zzz.rs crate:alloc | ||
503 | pub mod sync { | ||
504 | pub struct Arc; | ||
505 | } | ||
506 | "#; | ||
507 | check_found_path(code, "std::sync::Arc"); | ||
508 | } | ||
509 | |||
510 | #[test] | ||
511 | fn prefer_shorter_paths_if_not_alloc() { | ||
512 | let code = r#" | ||
513 | //- /main.rs crate:main deps:megaalloc,std | ||
514 | <|> | ||
515 | |||
516 | //- /std.rs crate:std deps:megaalloc | ||
517 | pub mod sync { | ||
518 | pub use megaalloc::sync::Arc; | ||
519 | } | ||
520 | |||
521 | //- /zzz.rs crate:megaalloc | ||
522 | pub struct Arc; | ||
523 | "#; | ||
524 | check_found_path(code, "megaalloc::Arc"); | ||
525 | } | ||
455 | } | 526 | } |
diff --git a/crates/ra_hir_def/src/marks.rs b/crates/ra_hir_def/src/marks.rs index 457ba4abe..daa49d5f1 100644 --- a/crates/ra_hir_def/src/marks.rs +++ b/crates/ra_hir_def/src/marks.rs | |||
@@ -13,4 +13,5 @@ test_utils::marks!( | |||
13 | macro_dollar_crate_self | 13 | macro_dollar_crate_self |
14 | macro_dollar_crate_other | 14 | macro_dollar_crate_other |
15 | infer_resolve_while_let | 15 | infer_resolve_while_let |
16 | prefer_std_paths | ||
16 | ); | 17 | ); |
diff --git a/crates/ra_hir_def/src/nameres.rs b/crates/ra_hir_def/src/nameres.rs index 27c12e46c..852304dd0 100644 --- a/crates/ra_hir_def/src/nameres.rs +++ b/crates/ra_hir_def/src/nameres.rs | |||
@@ -229,6 +229,46 @@ impl CrateDefMap { | |||
229 | self.resolve_path_fp_with_macro(db, ResolveMode::Other, original_module, path, shadow); | 229 | self.resolve_path_fp_with_macro(db, ResolveMode::Other, original_module, path, shadow); |
230 | (res.resolved_def, res.segment_index) | 230 | (res.resolved_def, res.segment_index) |
231 | } | 231 | } |
232 | |||
233 | // FIXME: this can use some more human-readable format (ideally, an IR | ||
234 | // even), as this should be a great debugging aid. | ||
235 | pub fn dump(&self) -> String { | ||
236 | let mut buf = String::new(); | ||
237 | go(&mut buf, self, "\ncrate", self.root); | ||
238 | return buf.trim().to_string(); | ||
239 | |||
240 | fn go(buf: &mut String, map: &CrateDefMap, path: &str, module: LocalModuleId) { | ||
241 | *buf += path; | ||
242 | *buf += "\n"; | ||
243 | |||
244 | let mut entries: Vec<_> = map.modules[module].scope.resolutions().collect(); | ||
245 | entries.sort_by_key(|(name, _)| name.clone()); | ||
246 | |||
247 | for (name, def) in entries { | ||
248 | *buf += &format!("{}:", name); | ||
249 | |||
250 | if def.types.is_some() { | ||
251 | *buf += " t"; | ||
252 | } | ||
253 | if def.values.is_some() { | ||
254 | *buf += " v"; | ||
255 | } | ||
256 | if def.macros.is_some() { | ||
257 | *buf += " m"; | ||
258 | } | ||
259 | if def.is_none() { | ||
260 | *buf += " _"; | ||
261 | } | ||
262 | |||
263 | *buf += "\n"; | ||
264 | } | ||
265 | |||
266 | for (name, child) in map.modules[module].children.iter() { | ||
267 | let path = path.to_string() + &format!("::{}", name); | ||
268 | go(buf, map, &path, *child); | ||
269 | } | ||
270 | } | ||
271 | } | ||
232 | } | 272 | } |
233 | 273 | ||
234 | impl ModuleData { | 274 | impl ModuleData { |
diff --git a/crates/ra_hir_def/src/nameres/tests.rs b/crates/ra_hir_def/src/nameres/tests.rs index 78bcdc850..82f0f835c 100644 --- a/crates/ra_hir_def/src/nameres/tests.rs +++ b/crates/ra_hir_def/src/nameres/tests.rs | |||
@@ -10,11 +10,10 @@ use insta::assert_snapshot; | |||
10 | use ra_db::{fixture::WithFixture, SourceDatabase}; | 10 | use ra_db::{fixture::WithFixture, SourceDatabase}; |
11 | use test_utils::covers; | 11 | use test_utils::covers; |
12 | 12 | ||
13 | use crate::{db::DefDatabase, nameres::*, test_db::TestDB, LocalModuleId}; | 13 | use crate::{db::DefDatabase, nameres::*, test_db::TestDB}; |
14 | 14 | ||
15 | fn def_map(fixture: &str) -> String { | 15 | fn def_map(fixture: &str) -> String { |
16 | let dm = compute_crate_def_map(fixture); | 16 | compute_crate_def_map(fixture).dump() |
17 | render_crate_def_map(&dm) | ||
18 | } | 17 | } |
19 | 18 | ||
20 | fn compute_crate_def_map(fixture: &str) -> Arc<CrateDefMap> { | 19 | fn compute_crate_def_map(fixture: &str) -> Arc<CrateDefMap> { |
@@ -23,44 +22,6 @@ fn compute_crate_def_map(fixture: &str) -> Arc<CrateDefMap> { | |||
23 | db.crate_def_map(krate) | 22 | db.crate_def_map(krate) |
24 | } | 23 | } |
25 | 24 | ||
26 | fn render_crate_def_map(map: &CrateDefMap) -> String { | ||
27 | let mut buf = String::new(); | ||
28 | go(&mut buf, map, "\ncrate", map.root); | ||
29 | return buf.trim().to_string(); | ||
30 | |||
31 | fn go(buf: &mut String, map: &CrateDefMap, path: &str, module: LocalModuleId) { | ||
32 | *buf += path; | ||
33 | *buf += "\n"; | ||
34 | |||
35 | let mut entries: Vec<_> = map.modules[module].scope.resolutions().collect(); | ||
36 | entries.sort_by_key(|(name, _)| name.clone()); | ||
37 | |||
38 | for (name, def) in entries { | ||
39 | *buf += &format!("{}:", name); | ||
40 | |||
41 | if def.types.is_some() { | ||
42 | *buf += " t"; | ||
43 | } | ||
44 | if def.values.is_some() { | ||
45 | *buf += " v"; | ||
46 | } | ||
47 | if def.macros.is_some() { | ||
48 | *buf += " m"; | ||
49 | } | ||
50 | if def.is_none() { | ||
51 | *buf += " _"; | ||
52 | } | ||
53 | |||
54 | *buf += "\n"; | ||
55 | } | ||
56 | |||
57 | for (name, child) in map.modules[module].children.iter() { | ||
58 | let path = path.to_string() + &format!("::{}", name); | ||
59 | go(buf, map, &path, *child); | ||
60 | } | ||
61 | } | ||
62 | } | ||
63 | |||
64 | #[test] | 25 | #[test] |
65 | fn crate_def_map_smoke_test() { | 26 | fn crate_def_map_smoke_test() { |
66 | let map = def_map( | 27 | let map = def_map( |
diff --git a/crates/ra_hir_expand/src/name.rs b/crates/ra_hir_expand/src/name.rs index b3fa1efba..b2e10f445 100644 --- a/crates/ra_hir_expand/src/name.rs +++ b/crates/ra_hir_expand/src/name.rs | |||
@@ -141,6 +141,8 @@ pub mod known { | |||
141 | macro_rules, | 141 | macro_rules, |
142 | // Components of known path (value or mod name) | 142 | // Components of known path (value or mod name) |
143 | std, | 143 | std, |
144 | core, | ||
145 | alloc, | ||
144 | iter, | 146 | iter, |
145 | ops, | 147 | ops, |
146 | future, | 148 | future, |
diff --git a/crates/ra_hir_ty/Cargo.toml b/crates/ra_hir_ty/Cargo.toml index 60793db44..f5484bf70 100644 --- a/crates/ra_hir_ty/Cargo.toml +++ b/crates/ra_hir_ty/Cargo.toml | |||
@@ -21,11 +21,11 @@ ra_prof = { path = "../ra_prof" } | |||
21 | ra_syntax = { path = "../ra_syntax" } | 21 | ra_syntax = { path = "../ra_syntax" } |
22 | test_utils = { path = "../test_utils" } | 22 | test_utils = { path = "../test_utils" } |
23 | 23 | ||
24 | chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5" } | 24 | chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "af48f302a1f571b3ca418f7c5aa639a144a34f75" } |
25 | chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5" } | 25 | chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "af48f302a1f571b3ca418f7c5aa639a144a34f75" } |
26 | chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5" } | 26 | chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "af48f302a1f571b3ca418f7c5aa639a144a34f75" } |
27 | 27 | ||
28 | lalrpop-intern = "0.15.1" | 28 | lalrpop-intern = "0.15.1" |
29 | 29 | ||
30 | [dev-dependencies] | 30 | [dev-dependencies] |
31 | insta = "0.12.0" | 31 | insta = "0.13.0" |
diff --git a/crates/ra_hir_ty/src/lib.rs b/crates/ra_hir_ty/src/lib.rs index 908e4862d..08d501ccd 100644 --- a/crates/ra_hir_ty/src/lib.rs +++ b/crates/ra_hir_ty/src/lib.rs | |||
@@ -847,7 +847,7 @@ impl HirDisplay for ApplicationTy { | |||
847 | } | 847 | } |
848 | TypeCtor::Array => { | 848 | TypeCtor::Array => { |
849 | let t = self.parameters.as_single(); | 849 | let t = self.parameters.as_single(); |
850 | write!(f, "[{};_]", t.display(f.db))?; | 850 | write!(f, "[{}; _]", t.display(f.db))?; |
851 | } | 851 | } |
852 | TypeCtor::RawPtr(m) => { | 852 | TypeCtor::RawPtr(m) => { |
853 | let t = self.parameters.as_single(); | 853 | let t = self.parameters.as_single(); |
diff --git a/crates/ra_hir_ty/src/tests/coercion.rs b/crates/ra_hir_ty/src/tests/coercion.rs index 7e99a42ed..76a1b46c0 100644 --- a/crates/ra_hir_ty/src/tests/coercion.rs +++ b/crates/ra_hir_ty/src/tests/coercion.rs | |||
@@ -71,42 +71,42 @@ fn test2() { | |||
71 | [82; 93) '{ loop {} }': T | 71 | [82; 93) '{ loop {} }': T |
72 | [84; 91) 'loop {}': ! | 72 | [84; 91) 'loop {}': ! |
73 | [89; 91) '{}': () | 73 | [89; 91) '{}': () |
74 | [122; 133) '{ loop {} }': *mut [T;_] | 74 | [122; 133) '{ loop {} }': *mut [T; _] |
75 | [124; 131) 'loop {}': ! | 75 | [124; 131) 'loop {}': ! |
76 | [129; 131) '{}': () | 76 | [129; 131) '{}': () |
77 | [160; 173) '{ gen() }': *mut [U] | 77 | [160; 173) '{ gen() }': *mut [U] |
78 | [166; 169) 'gen': fn gen<U>() -> *mut [T;_] | 78 | [166; 169) 'gen': fn gen<U>() -> *mut [T; _] |
79 | [166; 171) 'gen()': *mut [U;_] | 79 | [166; 171) 'gen()': *mut [U; _] |
80 | [186; 420) '{ ...rr); }': () | 80 | [186; 420) '{ ...rr); }': () |
81 | [196; 199) 'arr': &[u8;_] | 81 | [196; 199) 'arr': &[u8; _] |
82 | [212; 216) '&[1]': &[u8;_] | 82 | [212; 216) '&[1]': &[u8; _] |
83 | [213; 216) '[1]': [u8;_] | 83 | [213; 216) '[1]': [u8; _] |
84 | [214; 215) '1': u8 | 84 | [214; 215) '1': u8 |
85 | [227; 228) 'a': &[u8] | 85 | [227; 228) 'a': &[u8] |
86 | [237; 240) 'arr': &[u8;_] | 86 | [237; 240) 'arr': &[u8; _] |
87 | [250; 251) 'b': u8 | 87 | [250; 251) 'b': u8 |
88 | [254; 255) 'f': fn f<u8>(&[T]) -> T | 88 | [254; 255) 'f': fn f<u8>(&[T]) -> T |
89 | [254; 260) 'f(arr)': u8 | 89 | [254; 260) 'f(arr)': u8 |
90 | [256; 259) 'arr': &[u8;_] | 90 | [256; 259) 'arr': &[u8; _] |
91 | [270; 271) 'c': &[u8] | 91 | [270; 271) 'c': &[u8] |
92 | [280; 287) '{ arr }': &[u8] | 92 | [280; 287) '{ arr }': &[u8] |
93 | [282; 285) 'arr': &[u8;_] | 93 | [282; 285) 'arr': &[u8; _] |
94 | [297; 298) 'd': u8 | 94 | [297; 298) 'd': u8 |
95 | [301; 302) 'g': fn g<u8>(S<&[T]>) -> T | 95 | [301; 302) 'g': fn g<u8>(S<&[T]>) -> T |
96 | [301; 316) 'g(S { a: arr })': u8 | 96 | [301; 316) 'g(S { a: arr })': u8 |
97 | [303; 315) 'S { a: arr }': S<&[u8]> | 97 | [303; 315) 'S { a: arr }': S<&[u8]> |
98 | [310; 313) 'arr': &[u8;_] | 98 | [310; 313) 'arr': &[u8; _] |
99 | [326; 327) 'e': [&[u8];_] | 99 | [326; 327) 'e': [&[u8]; _] |
100 | [341; 346) '[arr]': [&[u8];_] | 100 | [341; 346) '[arr]': [&[u8]; _] |
101 | [342; 345) 'arr': &[u8;_] | 101 | [342; 345) 'arr': &[u8; _] |
102 | [356; 357) 'f': [&[u8];_] | 102 | [356; 357) 'f': [&[u8]; _] |
103 | [371; 379) '[arr; 2]': [&[u8];_] | 103 | [371; 379) '[arr; 2]': [&[u8]; _] |
104 | [372; 375) 'arr': &[u8;_] | 104 | [372; 375) 'arr': &[u8; _] |
105 | [377; 378) '2': usize | 105 | [377; 378) '2': usize |
106 | [389; 390) 'g': (&[u8], &[u8]) | 106 | [389; 390) 'g': (&[u8], &[u8]) |
107 | [407; 417) '(arr, arr)': (&[u8], &[u8]) | 107 | [407; 417) '(arr, arr)': (&[u8], &[u8]) |
108 | [408; 411) 'arr': &[u8;_] | 108 | [408; 411) 'arr': &[u8; _] |
109 | [413; 416) 'arr': &[u8;_] | 109 | [413; 416) 'arr': &[u8; _] |
110 | "### | 110 | "### |
111 | ); | 111 | ); |
112 | } | 112 | } |
@@ -122,8 +122,8 @@ fn test() { | |||
122 | @r###" | 122 | @r###" |
123 | [11; 40) '{ ...[1]; }': () | 123 | [11; 40) '{ ...[1]; }': () |
124 | [21; 22) 'x': &[i32] | 124 | [21; 22) 'x': &[i32] |
125 | [33; 37) '&[1]': &[i32;_] | 125 | [33; 37) '&[1]': &[i32; _] |
126 | [34; 37) '[1]': [i32;_] | 126 | [34; 37) '[1]': [i32; _] |
127 | [35; 36) '1': i32 | 127 | [35; 36) '1': i32 |
128 | "###); | 128 | "###); |
129 | } | 129 | } |
@@ -159,22 +159,22 @@ fn test(a: A<[u8; 2]>, b: B<[u8; 2]>, c: C<[u8; 2]>) { | |||
159 | [334; 335) 'x': C<[T]> | 159 | [334; 335) 'x': C<[T]> |
160 | [355; 360) '{ x }': C<[T]> | 160 | [355; 360) '{ x }': C<[T]> |
161 | [357; 358) 'x': C<[T]> | 161 | [357; 358) 'x': C<[T]> |
162 | [370; 371) 'a': A<[u8;_]> | 162 | [370; 371) 'a': A<[u8; _]> |
163 | [385; 386) 'b': B<[u8;_]> | 163 | [385; 386) 'b': B<[u8; _]> |
164 | [400; 401) 'c': C<[u8;_]> | 164 | [400; 401) 'c': C<[u8; _]> |
165 | [415; 481) '{ ...(c); }': () | 165 | [415; 481) '{ ...(c); }': () |
166 | [425; 426) 'd': A<[{unknown}]> | 166 | [425; 426) 'd': A<[{unknown}]> |
167 | [429; 433) 'foo1': fn foo1<{unknown}>(A<[T]>) -> A<[T]> | 167 | [429; 433) 'foo1': fn foo1<{unknown}>(A<[T]>) -> A<[T]> |
168 | [429; 436) 'foo1(a)': A<[{unknown}]> | 168 | [429; 436) 'foo1(a)': A<[{unknown}]> |
169 | [434; 435) 'a': A<[u8;_]> | 169 | [434; 435) 'a': A<[u8; _]> |
170 | [446; 447) 'e': B<[u8]> | 170 | [446; 447) 'e': B<[u8]> |
171 | [450; 454) 'foo2': fn foo2<u8>(B<[T]>) -> B<[T]> | 171 | [450; 454) 'foo2': fn foo2<u8>(B<[T]>) -> B<[T]> |
172 | [450; 457) 'foo2(b)': B<[u8]> | 172 | [450; 457) 'foo2(b)': B<[u8]> |
173 | [455; 456) 'b': B<[u8;_]> | 173 | [455; 456) 'b': B<[u8; _]> |
174 | [467; 468) 'f': C<[u8]> | 174 | [467; 468) 'f': C<[u8]> |
175 | [471; 475) 'foo3': fn foo3<u8>(C<[T]>) -> C<[T]> | 175 | [471; 475) 'foo3': fn foo3<u8>(C<[T]>) -> C<[T]> |
176 | [471; 478) 'foo3(c)': C<[u8]> | 176 | [471; 478) 'foo3(c)': C<[u8]> |
177 | [476; 477) 'c': C<[u8;_]> | 177 | [476; 477) 'c': C<[u8; _]> |
178 | "### | 178 | "### |
179 | ); | 179 | ); |
180 | } | 180 | } |
@@ -204,12 +204,12 @@ fn test() { | |||
204 | [72; 97) '{ ... }': &[i32] | 204 | [72; 97) '{ ... }': &[i32] |
205 | [82; 85) 'foo': fn foo<i32>(&[T]) -> &[T] | 205 | [82; 85) 'foo': fn foo<i32>(&[T]) -> &[T] |
206 | [82; 91) 'foo(&[1])': &[i32] | 206 | [82; 91) 'foo(&[1])': &[i32] |
207 | [86; 90) '&[1]': &[i32;_] | 207 | [86; 90) '&[1]': &[i32; _] |
208 | [87; 90) '[1]': [i32;_] | 208 | [87; 90) '[1]': [i32; _] |
209 | [88; 89) '1': i32 | 209 | [88; 89) '1': i32 |
210 | [103; 123) '{ ... }': &[i32;_] | 210 | [103; 123) '{ ... }': &[i32; _] |
211 | [113; 117) '&[1]': &[i32;_] | 211 | [113; 117) '&[1]': &[i32; _] |
212 | [114; 117) '[1]': [i32;_] | 212 | [114; 117) '[1]': [i32; _] |
213 | [115; 116) '1': i32 | 213 | [115; 116) '1': i32 |
214 | "### | 214 | "### |
215 | ); | 215 | ); |
@@ -237,15 +237,15 @@ fn test() { | |||
237 | [60; 61) 'x': &[i32] | 237 | [60; 61) 'x': &[i32] |
238 | [64; 123) 'if tru... }': &[i32] | 238 | [64; 123) 'if tru... }': &[i32] |
239 | [67; 71) 'true': bool | 239 | [67; 71) 'true': bool |
240 | [72; 92) '{ ... }': &[i32;_] | 240 | [72; 92) '{ ... }': &[i32; _] |
241 | [82; 86) '&[1]': &[i32;_] | 241 | [82; 86) '&[1]': &[i32; _] |
242 | [83; 86) '[1]': [i32;_] | 242 | [83; 86) '[1]': [i32; _] |
243 | [84; 85) '1': i32 | 243 | [84; 85) '1': i32 |
244 | [98; 123) '{ ... }': &[i32] | 244 | [98; 123) '{ ... }': &[i32] |
245 | [108; 111) 'foo': fn foo<i32>(&[T]) -> &[T] | 245 | [108; 111) 'foo': fn foo<i32>(&[T]) -> &[T] |
246 | [108; 117) 'foo(&[1])': &[i32] | 246 | [108; 117) 'foo(&[1])': &[i32] |
247 | [112; 116) '&[1]': &[i32;_] | 247 | [112; 116) '&[1]': &[i32; _] |
248 | [113; 116) '[1]': [i32;_] | 248 | [113; 116) '[1]': [i32; _] |
249 | [114; 115) '1': i32 | 249 | [114; 115) '1': i32 |
250 | "### | 250 | "### |
251 | ); | 251 | ); |
@@ -277,16 +277,16 @@ fn test(i: i32) { | |||
277 | [88; 89) '2': i32 | 277 | [88; 89) '2': i32 |
278 | [93; 96) 'foo': fn foo<i32>(&[T]) -> &[T] | 278 | [93; 96) 'foo': fn foo<i32>(&[T]) -> &[T] |
279 | [93; 102) 'foo(&[2])': &[i32] | 279 | [93; 102) 'foo(&[2])': &[i32] |
280 | [97; 101) '&[2]': &[i32;_] | 280 | [97; 101) '&[2]': &[i32; _] |
281 | [98; 101) '[2]': [i32;_] | 281 | [98; 101) '[2]': [i32; _] |
282 | [99; 100) '2': i32 | 282 | [99; 100) '2': i32 |
283 | [112; 113) '1': i32 | 283 | [112; 113) '1': i32 |
284 | [117; 121) '&[1]': &[i32;_] | 284 | [117; 121) '&[1]': &[i32; _] |
285 | [118; 121) '[1]': [i32;_] | 285 | [118; 121) '[1]': [i32; _] |
286 | [119; 120) '1': i32 | 286 | [119; 120) '1': i32 |
287 | [131; 132) '_': i32 | 287 | [131; 132) '_': i32 |
288 | [136; 140) '&[3]': &[i32;_] | 288 | [136; 140) '&[3]': &[i32; _] |
289 | [137; 140) '[3]': [i32;_] | 289 | [137; 140) '[3]': [i32; _] |
290 | [138; 139) '3': i32 | 290 | [138; 139) '3': i32 |
291 | "### | 291 | "### |
292 | ); | 292 | ); |
@@ -316,18 +316,18 @@ fn test(i: i32) { | |||
316 | [70; 147) 'match ... }': &[i32] | 316 | [70; 147) 'match ... }': &[i32] |
317 | [76; 77) 'i': i32 | 317 | [76; 77) 'i': i32 |
318 | [88; 89) '1': i32 | 318 | [88; 89) '1': i32 |
319 | [93; 97) '&[1]': &[i32;_] | 319 | [93; 97) '&[1]': &[i32; _] |
320 | [94; 97) '[1]': [i32;_] | 320 | [94; 97) '[1]': [i32; _] |
321 | [95; 96) '1': i32 | 321 | [95; 96) '1': i32 |
322 | [107; 108) '2': i32 | 322 | [107; 108) '2': i32 |
323 | [112; 115) 'foo': fn foo<i32>(&[T]) -> &[T] | 323 | [112; 115) 'foo': fn foo<i32>(&[T]) -> &[T] |
324 | [112; 121) 'foo(&[2])': &[i32] | 324 | [112; 121) 'foo(&[2])': &[i32] |
325 | [116; 120) '&[2]': &[i32;_] | 325 | [116; 120) '&[2]': &[i32; _] |
326 | [117; 120) '[2]': [i32;_] | 326 | [117; 120) '[2]': [i32; _] |
327 | [118; 119) '2': i32 | 327 | [118; 119) '2': i32 |
328 | [131; 132) '_': i32 | 328 | [131; 132) '_': i32 |
329 | [136; 140) '&[3]': &[i32;_] | 329 | [136; 140) '&[3]': &[i32; _] |
330 | [137; 140) '[3]': [i32;_] | 330 | [137; 140) '[3]': [i32; _] |
331 | [138; 139) '3': i32 | 331 | [138; 139) '3': i32 |
332 | "### | 332 | "### |
333 | ); | 333 | ); |
diff --git a/crates/ra_hir_ty/src/tests/regression.rs b/crates/ra_hir_ty/src/tests/regression.rs index aa948dcbf..02bab6dbe 100644 --- a/crates/ra_hir_ty/src/tests/regression.rs +++ b/crates/ra_hir_ty/src/tests/regression.rs | |||
@@ -102,7 +102,7 @@ fn test() { | |||
102 | [11; 48) '{ ...&y]; }': () | 102 | [11; 48) '{ ...&y]; }': () |
103 | [21; 22) 'y': &{unknown} | 103 | [21; 22) 'y': &{unknown} |
104 | [25; 32) 'unknown': &{unknown} | 104 | [25; 32) 'unknown': &{unknown} |
105 | [38; 45) '[y, &y]': [&&{unknown};_] | 105 | [38; 45) '[y, &y]': [&&{unknown}; _] |
106 | [39; 40) 'y': &{unknown} | 106 | [39; 40) 'y': &{unknown} |
107 | [42; 44) '&y': &&{unknown} | 107 | [42; 44) '&y': &&{unknown} |
108 | [43; 44) 'y': &{unknown} | 108 | [43; 44) 'y': &{unknown} |
@@ -128,7 +128,7 @@ fn test() { | |||
128 | [25; 32) 'unknown': &&{unknown} | 128 | [25; 32) 'unknown': &&{unknown} |
129 | [42; 43) 'y': &&{unknown} | 129 | [42; 43) 'y': &&{unknown} |
130 | [46; 53) 'unknown': &&{unknown} | 130 | [46; 53) 'unknown': &&{unknown} |
131 | [59; 77) '[(x, y..., &x)]': [(&&&{unknown}, &&&{unknown});_] | 131 | [59; 77) '[(x, y..., &x)]': [(&&&{unknown}, &&&{unknown}); _] |
132 | [60; 66) '(x, y)': (&&&{unknown}, &&&{unknown}) | 132 | [60; 66) '(x, y)': (&&&{unknown}, &&&{unknown}) |
133 | [61; 62) 'x': &&{unknown} | 133 | [61; 62) 'x': &&{unknown} |
134 | [64; 65) 'y': &&{unknown} | 134 | [64; 65) 'y': &&{unknown} |
@@ -180,8 +180,8 @@ fn test_line_buffer() { | |||
180 | "#), | 180 | "#), |
181 | @r###" | 181 | @r###" |
182 | [23; 53) '{ ...n']; }': () | 182 | [23; 53) '{ ...n']; }': () |
183 | [29; 50) '&[0, b...b'\n']': &[u8;_] | 183 | [29; 50) '&[0, b...b'\n']': &[u8; _] |
184 | [30; 50) '[0, b'...b'\n']': [u8;_] | 184 | [30; 50) '[0, b'...b'\n']': [u8; _] |
185 | [31; 32) '0': u8 | 185 | [31; 32) '0': u8 |
186 | [34; 39) 'b'\n'': u8 | 186 | [34; 39) 'b'\n'': u8 |
187 | [41; 42) '1': u8 | 187 | [41; 42) '1': u8 |
diff --git a/crates/ra_hir_ty/src/tests/simple.rs b/crates/ra_hir_ty/src/tests/simple.rs index b7204ec00..fdab9c187 100644 --- a/crates/ra_hir_ty/src/tests/simple.rs +++ b/crates/ra_hir_ty/src/tests/simple.rs | |||
@@ -28,7 +28,7 @@ mod boxed { | |||
28 | 28 | ||
29 | "#, | 29 | "#, |
30 | ); | 30 | ); |
31 | assert_eq!("(Box<i32>, Box<Box<i32>>, Box<&i32>, Box<[i32;_]>)", type_at_pos(&db, pos)); | 31 | assert_eq!("(Box<i32>, Box<Box<i32>>, Box<&i32>, Box<[i32; _]>)", type_at_pos(&db, pos)); |
32 | } | 32 | } |
33 | 33 | ||
34 | #[test] | 34 | #[test] |
@@ -1061,55 +1061,55 @@ fn test(x: &str, y: isize) { | |||
1061 | [9; 10) 'x': &str | 1061 | [9; 10) 'x': &str |
1062 | [18; 19) 'y': isize | 1062 | [18; 19) 'y': isize |
1063 | [28; 293) '{ ... []; }': () | 1063 | [28; 293) '{ ... []; }': () |
1064 | [38; 39) 'a': [&str;_] | 1064 | [38; 39) 'a': [&str; _] |
1065 | [42; 45) '[x]': [&str;_] | 1065 | [42; 45) '[x]': [&str; _] |
1066 | [43; 44) 'x': &str | 1066 | [43; 44) 'x': &str |
1067 | [55; 56) 'b': [[&str;_];_] | 1067 | [55; 56) 'b': [[&str; _]; _] |
1068 | [59; 65) '[a, a]': [[&str;_];_] | 1068 | [59; 65) '[a, a]': [[&str; _]; _] |
1069 | [60; 61) 'a': [&str;_] | 1069 | [60; 61) 'a': [&str; _] |
1070 | [63; 64) 'a': [&str;_] | 1070 | [63; 64) 'a': [&str; _] |
1071 | [75; 76) 'c': [[[&str;_];_];_] | 1071 | [75; 76) 'c': [[[&str; _]; _]; _] |
1072 | [79; 85) '[b, b]': [[[&str;_];_];_] | 1072 | [79; 85) '[b, b]': [[[&str; _]; _]; _] |
1073 | [80; 81) 'b': [[&str;_];_] | 1073 | [80; 81) 'b': [[&str; _]; _] |
1074 | [83; 84) 'b': [[&str;_];_] | 1074 | [83; 84) 'b': [[&str; _]; _] |
1075 | [96; 97) 'd': [isize;_] | 1075 | [96; 97) 'd': [isize; _] |
1076 | [100; 112) '[y, 1, 2, 3]': [isize;_] | 1076 | [100; 112) '[y, 1, 2, 3]': [isize; _] |
1077 | [101; 102) 'y': isize | 1077 | [101; 102) 'y': isize |
1078 | [104; 105) '1': isize | 1078 | [104; 105) '1': isize |
1079 | [107; 108) '2': isize | 1079 | [107; 108) '2': isize |
1080 | [110; 111) '3': isize | 1080 | [110; 111) '3': isize |
1081 | [122; 123) 'd': [isize;_] | 1081 | [122; 123) 'd': [isize; _] |
1082 | [126; 138) '[1, y, 2, 3]': [isize;_] | 1082 | [126; 138) '[1, y, 2, 3]': [isize; _] |
1083 | [127; 128) '1': isize | 1083 | [127; 128) '1': isize |
1084 | [130; 131) 'y': isize | 1084 | [130; 131) 'y': isize |
1085 | [133; 134) '2': isize | 1085 | [133; 134) '2': isize |
1086 | [136; 137) '3': isize | 1086 | [136; 137) '3': isize |
1087 | [148; 149) 'e': [isize;_] | 1087 | [148; 149) 'e': [isize; _] |
1088 | [152; 155) '[y]': [isize;_] | 1088 | [152; 155) '[y]': [isize; _] |
1089 | [153; 154) 'y': isize | 1089 | [153; 154) 'y': isize |
1090 | [165; 166) 'f': [[isize;_];_] | 1090 | [165; 166) 'f': [[isize; _]; _] |
1091 | [169; 175) '[d, d]': [[isize;_];_] | 1091 | [169; 175) '[d, d]': [[isize; _]; _] |
1092 | [170; 171) 'd': [isize;_] | 1092 | [170; 171) 'd': [isize; _] |
1093 | [173; 174) 'd': [isize;_] | 1093 | [173; 174) 'd': [isize; _] |
1094 | [185; 186) 'g': [[isize;_];_] | 1094 | [185; 186) 'g': [[isize; _]; _] |
1095 | [189; 195) '[e, e]': [[isize;_];_] | 1095 | [189; 195) '[e, e]': [[isize; _]; _] |
1096 | [190; 191) 'e': [isize;_] | 1096 | [190; 191) 'e': [isize; _] |
1097 | [193; 194) 'e': [isize;_] | 1097 | [193; 194) 'e': [isize; _] |
1098 | [206; 207) 'h': [i32;_] | 1098 | [206; 207) 'h': [i32; _] |
1099 | [210; 216) '[1, 2]': [i32;_] | 1099 | [210; 216) '[1, 2]': [i32; _] |
1100 | [211; 212) '1': i32 | 1100 | [211; 212) '1': i32 |
1101 | [214; 215) '2': i32 | 1101 | [214; 215) '2': i32 |
1102 | [226; 227) 'i': [&str;_] | 1102 | [226; 227) 'i': [&str; _] |
1103 | [230; 240) '["a", "b"]': [&str;_] | 1103 | [230; 240) '["a", "b"]': [&str; _] |
1104 | [231; 234) '"a"': &str | 1104 | [231; 234) '"a"': &str |
1105 | [236; 239) '"b"': &str | 1105 | [236; 239) '"b"': &str |
1106 | [251; 252) 'b': [[&str;_];_] | 1106 | [251; 252) 'b': [[&str; _]; _] |
1107 | [255; 265) '[a, ["b"]]': [[&str;_];_] | 1107 | [255; 265) '[a, ["b"]]': [[&str; _]; _] |
1108 | [256; 257) 'a': [&str;_] | 1108 | [256; 257) 'a': [&str; _] |
1109 | [259; 264) '["b"]': [&str;_] | 1109 | [259; 264) '["b"]': [&str; _] |
1110 | [260; 263) '"b"': &str | 1110 | [260; 263) '"b"': &str |
1111 | [275; 276) 'x': [u8;_] | 1111 | [275; 276) 'x': [u8; _] |
1112 | [288; 290) '[]': [u8;_] | 1112 | [288; 290) '[]': [u8; _] |
1113 | "### | 1113 | "### |
1114 | ); | 1114 | ); |
1115 | } | 1115 | } |
diff --git a/crates/ra_hir_ty/src/tests/traits.rs b/crates/ra_hir_ty/src/tests/traits.rs index 4b268510c..a6ac18f86 100644 --- a/crates/ra_hir_ty/src/tests/traits.rs +++ b/crates/ra_hir_ty/src/tests/traits.rs | |||
@@ -479,7 +479,7 @@ fn indexing_arrays() { | |||
479 | @r###" | 479 | @r###" |
480 | [10; 26) '{ &mut...[2]; }': () | 480 | [10; 26) '{ &mut...[2]; }': () |
481 | [12; 23) '&mut [9][2]': &mut {unknown} | 481 | [12; 23) '&mut [9][2]': &mut {unknown} |
482 | [17; 20) '[9]': [i32;_] | 482 | [17; 20) '[9]': [i32; _] |
483 | [17; 23) '[9][2]': {unknown} | 483 | [17; 23) '[9][2]': {unknown} |
484 | [18; 19) '9': i32 | 484 | [18; 19) '9': i32 |
485 | [21; 22) '2': i32 | 485 | [21; 22) '2': i32 |
@@ -850,6 +850,7 @@ fn test<T: ApplyL>(t: T) { | |||
850 | } | 850 | } |
851 | 851 | ||
852 | #[test] | 852 | #[test] |
853 | #[ignore] | ||
853 | fn impl_trait() { | 854 | fn impl_trait() { |
854 | assert_snapshot!( | 855 | assert_snapshot!( |
855 | infer(r#" | 856 | infer(r#" |
@@ -1021,6 +1022,7 @@ fn test() { | |||
1021 | } | 1022 | } |
1022 | 1023 | ||
1023 | #[test] | 1024 | #[test] |
1025 | #[ignore] | ||
1024 | fn error_bound_chalk() { | 1026 | fn error_bound_chalk() { |
1025 | let t = type_at( | 1027 | let t = type_at( |
1026 | r#" | 1028 | r#" |
diff --git a/crates/ra_hir_ty/src/traits.rs b/crates/ra_hir_ty/src/traits.rs index 4aabd66dc..88af61e87 100644 --- a/crates/ra_hir_ty/src/traits.rs +++ b/crates/ra_hir_ty/src/traits.rs | |||
@@ -50,10 +50,19 @@ impl TraitSolver { | |||
50 | Err(_) => ra_db::Canceled::throw(), | 50 | Err(_) => ra_db::Canceled::throw(), |
51 | }; | 51 | }; |
52 | 52 | ||
53 | let fuel = std::cell::Cell::new(CHALK_SOLVER_FUEL); | ||
54 | |||
53 | let solution = panic::catch_unwind({ | 55 | let solution = panic::catch_unwind({ |
54 | let solver = panic::AssertUnwindSafe(&mut solver); | 56 | let solver = panic::AssertUnwindSafe(&mut solver); |
55 | let context = panic::AssertUnwindSafe(&context); | 57 | let context = panic::AssertUnwindSafe(&context); |
56 | move || solver.0.solve(context.0, goal) | 58 | move || { |
59 | solver.0.solve_limited(context.0, goal, || { | ||
60 | context.0.db.check_canceled(); | ||
61 | let remaining = fuel.get(); | ||
62 | fuel.set(remaining - 1); | ||
63 | remaining > 0 | ||
64 | }) | ||
65 | } | ||
57 | }); | 66 | }); |
58 | 67 | ||
59 | let solution = match solution { | 68 | let solution = match solution { |
@@ -78,7 +87,9 @@ impl TraitSolver { | |||
78 | /// This controls the maximum size of types Chalk considers. If we set this too | 87 | /// This controls the maximum size of types Chalk considers. If we set this too |
79 | /// high, we can run into slow edge cases; if we set it too low, Chalk won't | 88 | /// high, we can run into slow edge cases; if we set it too low, Chalk won't |
80 | /// find some solutions. | 89 | /// find some solutions. |
81 | const CHALK_SOLVER_MAX_SIZE: usize = 4; | 90 | const CHALK_SOLVER_MAX_SIZE: usize = 10; |
91 | /// This controls how much 'time' we give the Chalk solver before giving up. | ||
92 | const CHALK_SOLVER_FUEL: i32 = 100; | ||
82 | 93 | ||
83 | #[derive(Debug, Copy, Clone)] | 94 | #[derive(Debug, Copy, Clone)] |
84 | struct ChalkContext<'a, DB> { | 95 | struct ChalkContext<'a, DB> { |
@@ -97,7 +108,8 @@ pub(crate) fn trait_solver_query( | |||
97 | } | 108 | } |
98 | 109 | ||
99 | fn create_chalk_solver() -> chalk_solve::Solver<TypeFamily> { | 110 | fn create_chalk_solver() -> chalk_solve::Solver<TypeFamily> { |
100 | let solver_choice = chalk_solve::SolverChoice::SLG { max_size: CHALK_SOLVER_MAX_SIZE }; | 111 | let solver_choice = |
112 | chalk_solve::SolverChoice::SLG { max_size: CHALK_SOLVER_MAX_SIZE, expected_answers: None }; | ||
101 | solver_choice.into_solver() | 113 | solver_choice.into_solver() |
102 | } | 114 | } |
103 | 115 | ||
@@ -232,7 +244,6 @@ fn solution_from_chalk( | |||
232 | let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution<TypeFamily>>| { | 244 | let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution<TypeFamily>>| { |
233 | let value = subst | 245 | let value = subst |
234 | .value | 246 | .value |
235 | .parameters | ||
236 | .into_iter() | 247 | .into_iter() |
237 | .map(|p| { | 248 | .map(|p| { |
238 | let ty = match p.ty() { | 249 | let ty = match p.ty() { |
diff --git a/crates/ra_hir_ty/src/traits/chalk.rs b/crates/ra_hir_ty/src/traits/chalk.rs index 555930c9b..fe9cb556c 100644 --- a/crates/ra_hir_ty/src/traits/chalk.rs +++ b/crates/ra_hir_ty/src/traits/chalk.rs | |||
@@ -3,7 +3,7 @@ use std::{fmt, sync::Arc}; | |||
3 | 3 | ||
4 | use log::debug; | 4 | use log::debug; |
5 | 5 | ||
6 | use chalk_ir::{cast::Cast, Parameter, PlaceholderIndex, TypeName, UniverseIndex}; | 6 | use chalk_ir::{cast::Cast, GoalData, Parameter, PlaceholderIndex, TypeName, UniverseIndex}; |
7 | 7 | ||
8 | use hir_def::{AssocContainerId, AssocItemId, GenericDefId, HasModule, Lookup, TypeAliasId}; | 8 | use hir_def::{AssocContainerId, AssocItemId, GenericDefId, HasModule, Lookup, TypeAliasId}; |
9 | use ra_db::{ | 9 | use ra_db::{ |
@@ -24,6 +24,8 @@ impl chalk_ir::family::TypeFamily for TypeFamily { | |||
24 | type InternedType = Box<chalk_ir::TyData<Self>>; | 24 | type InternedType = Box<chalk_ir::TyData<Self>>; |
25 | type InternedLifetime = chalk_ir::LifetimeData<Self>; | 25 | type InternedLifetime = chalk_ir::LifetimeData<Self>; |
26 | type InternedParameter = chalk_ir::ParameterData<Self>; | 26 | type InternedParameter = chalk_ir::ParameterData<Self>; |
27 | type InternedGoal = Arc<GoalData<Self>>; | ||
28 | type InternedSubstitution = Vec<Parameter<Self>>; | ||
27 | type DefId = InternId; | 29 | type DefId = InternId; |
28 | 30 | ||
29 | // FIXME: implement these | 31 | // FIXME: implement these |
@@ -48,8 +50,8 @@ impl chalk_ir::family::TypeFamily for TypeFamily { | |||
48 | None | 50 | None |
49 | } | 51 | } |
50 | 52 | ||
51 | fn debug_projection( | 53 | fn debug_alias( |
52 | _projection: &chalk_ir::ProjectionTy<Self>, | 54 | _projection: &chalk_ir::AliasTy<Self>, |
53 | _fmt: &mut fmt::Formatter<'_>, | 55 | _fmt: &mut fmt::Formatter<'_>, |
54 | ) -> Option<fmt::Result> { | 56 | ) -> Option<fmt::Result> { |
55 | None | 57 | None |
@@ -78,6 +80,24 @@ impl chalk_ir::family::TypeFamily for TypeFamily { | |||
78 | fn parameter_data(parameter: &chalk_ir::ParameterData<Self>) -> &chalk_ir::ParameterData<Self> { | 80 | fn parameter_data(parameter: &chalk_ir::ParameterData<Self>) -> &chalk_ir::ParameterData<Self> { |
79 | parameter | 81 | parameter |
80 | } | 82 | } |
83 | |||
84 | fn intern_goal(goal: GoalData<Self>) -> Arc<GoalData<Self>> { | ||
85 | Arc::new(goal) | ||
86 | } | ||
87 | |||
88 | fn goal_data(goal: &Arc<GoalData<Self>>) -> &GoalData<Self> { | ||
89 | goal | ||
90 | } | ||
91 | |||
92 | fn intern_substitution<E>( | ||
93 | data: impl IntoIterator<Item = Result<Parameter<Self>, E>>, | ||
94 | ) -> Result<Vec<Parameter<Self>>, E> { | ||
95 | data.into_iter().collect() | ||
96 | } | ||
97 | |||
98 | fn substitution_data(substitution: &Vec<Parameter<Self>>) -> &[Parameter<Self>] { | ||
99 | substitution | ||
100 | } | ||
81 | } | 101 | } |
82 | 102 | ||
83 | impl chalk_ir::family::HasTypeFamily for TypeFamily { | 103 | impl chalk_ir::family::HasTypeFamily for TypeFamily { |
@@ -114,13 +134,13 @@ impl ToChalk for Ty { | |||
114 | match self { | 134 | match self { |
115 | Ty::Apply(apply_ty) => { | 135 | Ty::Apply(apply_ty) => { |
116 | let name = apply_ty.ctor.to_chalk(db); | 136 | let name = apply_ty.ctor.to_chalk(db); |
117 | let parameters = apply_ty.parameters.to_chalk(db); | 137 | let substitution = apply_ty.parameters.to_chalk(db); |
118 | chalk_ir::ApplicationTy { name, parameters }.cast().intern() | 138 | chalk_ir::ApplicationTy { name, substitution }.cast().intern() |
119 | } | 139 | } |
120 | Ty::Projection(proj_ty) => { | 140 | Ty::Projection(proj_ty) => { |
121 | let associated_ty_id = proj_ty.associated_ty.to_chalk(db); | 141 | let associated_ty_id = proj_ty.associated_ty.to_chalk(db); |
122 | let parameters = proj_ty.parameters.to_chalk(db); | 142 | let substitution = proj_ty.parameters.to_chalk(db); |
123 | chalk_ir::ProjectionTy { associated_ty_id, parameters }.cast().intern() | 143 | chalk_ir::AliasTy { associated_ty_id, substitution }.cast().intern() |
124 | } | 144 | } |
125 | Ty::Param { idx, .. } => { | 145 | Ty::Param { idx, .. } => { |
126 | PlaceholderIndex { ui: UniverseIndex::ROOT, idx: idx as usize } | 146 | PlaceholderIndex { ui: UniverseIndex::ROOT, idx: idx as usize } |
@@ -135,23 +155,13 @@ impl ToChalk for Ty { | |||
135 | .cloned() | 155 | .cloned() |
136 | .map(|p| p.to_chalk(db)) | 156 | .map(|p| p.to_chalk(db)) |
137 | .collect(); | 157 | .collect(); |
138 | let bounded_ty = chalk_ir::BoundedTy { bounds: make_binders(where_clauses, 1) }; | 158 | let bounded_ty = chalk_ir::DynTy { bounds: make_binders(where_clauses, 1) }; |
139 | chalk_ir::TyData::Dyn(bounded_ty).intern() | 159 | chalk_ir::TyData::Dyn(bounded_ty).intern() |
140 | } | 160 | } |
141 | Ty::Opaque(predicates) => { | 161 | Ty::Opaque(_) | Ty::Unknown => { |
142 | let where_clauses = predicates | 162 | let substitution = chalk_ir::Substitution::empty(); |
143 | .iter() | ||
144 | .filter(|p| !p.is_error()) | ||
145 | .cloned() | ||
146 | .map(|p| p.to_chalk(db)) | ||
147 | .collect(); | ||
148 | let bounded_ty = chalk_ir::BoundedTy { bounds: make_binders(where_clauses, 1) }; | ||
149 | chalk_ir::TyData::Opaque(bounded_ty).intern() | ||
150 | } | ||
151 | Ty::Unknown => { | ||
152 | let parameters = Vec::new(); | ||
153 | let name = TypeName::Error; | 163 | let name = TypeName::Error; |
154 | chalk_ir::ApplicationTy { name, parameters }.cast().intern() | 164 | chalk_ir::ApplicationTy { name, substitution }.cast().intern() |
155 | } | 165 | } |
156 | } | 166 | } |
157 | } | 167 | } |
@@ -161,7 +171,7 @@ impl ToChalk for Ty { | |||
161 | TypeName::Error => Ty::Unknown, | 171 | TypeName::Error => Ty::Unknown, |
162 | _ => { | 172 | _ => { |
163 | let ctor = from_chalk(db, apply_ty.name); | 173 | let ctor = from_chalk(db, apply_ty.name); |
164 | let parameters = from_chalk(db, apply_ty.parameters); | 174 | let parameters = from_chalk(db, apply_ty.substitution); |
165 | Ty::Apply(ApplicationTy { ctor, parameters }) | 175 | Ty::Apply(ApplicationTy { ctor, parameters }) |
166 | } | 176 | } |
167 | }, | 177 | }, |
@@ -169,12 +179,12 @@ impl ToChalk for Ty { | |||
169 | assert_eq!(idx.ui, UniverseIndex::ROOT); | 179 | assert_eq!(idx.ui, UniverseIndex::ROOT); |
170 | Ty::Param { idx: idx.idx as u32, name: crate::Name::missing() } | 180 | Ty::Param { idx: idx.idx as u32, name: crate::Name::missing() } |
171 | } | 181 | } |
172 | chalk_ir::TyData::Projection(proj) => { | 182 | chalk_ir::TyData::Alias(proj) => { |
173 | let associated_ty = from_chalk(db, proj.associated_ty_id); | 183 | let associated_ty = from_chalk(db, proj.associated_ty_id); |
174 | let parameters = from_chalk(db, proj.parameters); | 184 | let parameters = from_chalk(db, proj.substitution); |
175 | Ty::Projection(ProjectionTy { associated_ty, parameters }) | 185 | Ty::Projection(ProjectionTy { associated_ty, parameters }) |
176 | } | 186 | } |
177 | chalk_ir::TyData::ForAll(_) => unimplemented!(), | 187 | chalk_ir::TyData::Function(_) => unimplemented!(), |
178 | chalk_ir::TyData::BoundVar(idx) => Ty::Bound(idx as u32), | 188 | chalk_ir::TyData::BoundVar(idx) => Ty::Bound(idx as u32), |
179 | chalk_ir::TyData::InferenceVar(_iv) => Ty::Unknown, | 189 | chalk_ir::TyData::InferenceVar(_iv) => Ty::Unknown, |
180 | chalk_ir::TyData::Dyn(where_clauses) => { | 190 | chalk_ir::TyData::Dyn(where_clauses) => { |
@@ -183,27 +193,18 @@ impl ToChalk for Ty { | |||
183 | where_clauses.bounds.value.into_iter().map(|c| from_chalk(db, c)).collect(); | 193 | where_clauses.bounds.value.into_iter().map(|c| from_chalk(db, c)).collect(); |
184 | Ty::Dyn(predicates) | 194 | Ty::Dyn(predicates) |
185 | } | 195 | } |
186 | chalk_ir::TyData::Opaque(where_clauses) => { | ||
187 | assert_eq!(where_clauses.bounds.binders.len(), 1); | ||
188 | let predicates = | ||
189 | where_clauses.bounds.value.into_iter().map(|c| from_chalk(db, c)).collect(); | ||
190 | Ty::Opaque(predicates) | ||
191 | } | ||
192 | } | 196 | } |
193 | } | 197 | } |
194 | } | 198 | } |
195 | 199 | ||
196 | impl ToChalk for Substs { | 200 | impl ToChalk for Substs { |
197 | type Chalk = Vec<chalk_ir::Parameter<TypeFamily>>; | 201 | type Chalk = chalk_ir::Substitution<TypeFamily>; |
198 | 202 | ||
199 | fn to_chalk(self, db: &impl HirDatabase) -> Vec<Parameter<TypeFamily>> { | 203 | fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Substitution<TypeFamily> { |
200 | self.iter().map(|ty| ty.clone().to_chalk(db).cast()).collect() | 204 | chalk_ir::Substitution::from(self.iter().map(|ty| ty.clone().to_chalk(db))) |
201 | } | 205 | } |
202 | 206 | ||
203 | fn from_chalk( | 207 | fn from_chalk(db: &impl HirDatabase, parameters: chalk_ir::Substitution<TypeFamily>) -> Substs { |
204 | db: &impl HirDatabase, | ||
205 | parameters: Vec<chalk_ir::Parameter<TypeFamily>>, | ||
206 | ) -> Substs { | ||
207 | let tys = parameters | 208 | let tys = parameters |
208 | .into_iter() | 209 | .into_iter() |
209 | .map(|p| match p.ty() { | 210 | .map(|p| match p.ty() { |
@@ -220,13 +221,13 @@ impl ToChalk for TraitRef { | |||
220 | 221 | ||
221 | fn to_chalk(self: TraitRef, db: &impl HirDatabase) -> chalk_ir::TraitRef<TypeFamily> { | 222 | fn to_chalk(self: TraitRef, db: &impl HirDatabase) -> chalk_ir::TraitRef<TypeFamily> { |
222 | let trait_id = self.trait_.to_chalk(db); | 223 | let trait_id = self.trait_.to_chalk(db); |
223 | let parameters = self.substs.to_chalk(db); | 224 | let substitution = self.substs.to_chalk(db); |
224 | chalk_ir::TraitRef { trait_id, parameters } | 225 | chalk_ir::TraitRef { trait_id, substitution } |
225 | } | 226 | } |
226 | 227 | ||
227 | fn from_chalk(db: &impl HirDatabase, trait_ref: chalk_ir::TraitRef<TypeFamily>) -> Self { | 228 | fn from_chalk(db: &impl HirDatabase, trait_ref: chalk_ir::TraitRef<TypeFamily>) -> Self { |
228 | let trait_ = from_chalk(db, trait_ref.trait_id); | 229 | let trait_ = from_chalk(db, trait_ref.trait_id); |
229 | let substs = from_chalk(db, trait_ref.parameters); | 230 | let substs = from_chalk(db, trait_ref.substitution); |
230 | TraitRef { trait_, substs } | 231 | TraitRef { trait_, substs } |
231 | } | 232 | } |
232 | } | 233 | } |
@@ -317,8 +318,8 @@ impl ToChalk for GenericPredicate { | |||
317 | make_binders(chalk_ir::WhereClause::Implemented(trait_ref.to_chalk(db)), 0) | 318 | make_binders(chalk_ir::WhereClause::Implemented(trait_ref.to_chalk(db)), 0) |
318 | } | 319 | } |
319 | GenericPredicate::Projection(projection_pred) => make_binders( | 320 | GenericPredicate::Projection(projection_pred) => make_binders( |
320 | chalk_ir::WhereClause::ProjectionEq(chalk_ir::ProjectionEq { | 321 | chalk_ir::WhereClause::AliasEq(chalk_ir::AliasEq { |
321 | projection: projection_pred.projection_ty.to_chalk(db), | 322 | alias: projection_pred.projection_ty.to_chalk(db), |
322 | ty: projection_pred.ty.to_chalk(db), | 323 | ty: projection_pred.ty.to_chalk(db), |
323 | }), | 324 | }), |
324 | 0, | 325 | 0, |
@@ -335,8 +336,8 @@ impl ToChalk for GenericPredicate { | |||
335 | chalk_ir::WhereClause::Implemented(tr) => { | 336 | chalk_ir::WhereClause::Implemented(tr) => { |
336 | GenericPredicate::Implemented(from_chalk(db, tr)) | 337 | GenericPredicate::Implemented(from_chalk(db, tr)) |
337 | } | 338 | } |
338 | chalk_ir::WhereClause::ProjectionEq(projection_eq) => { | 339 | chalk_ir::WhereClause::AliasEq(projection_eq) => { |
339 | let projection_ty = from_chalk(db, projection_eq.projection); | 340 | let projection_ty = from_chalk(db, projection_eq.alias); |
340 | let ty = from_chalk(db, projection_eq.ty); | 341 | let ty = from_chalk(db, projection_eq.ty); |
341 | GenericPredicate::Projection(super::ProjectionPredicate { projection_ty, ty }) | 342 | GenericPredicate::Projection(super::ProjectionPredicate { projection_ty, ty }) |
342 | } | 343 | } |
@@ -345,22 +346,22 @@ impl ToChalk for GenericPredicate { | |||
345 | } | 346 | } |
346 | 347 | ||
347 | impl ToChalk for ProjectionTy { | 348 | impl ToChalk for ProjectionTy { |
348 | type Chalk = chalk_ir::ProjectionTy<TypeFamily>; | 349 | type Chalk = chalk_ir::AliasTy<TypeFamily>; |
349 | 350 | ||
350 | fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::ProjectionTy<TypeFamily> { | 351 | fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::AliasTy<TypeFamily> { |
351 | chalk_ir::ProjectionTy { | 352 | chalk_ir::AliasTy { |
352 | associated_ty_id: self.associated_ty.to_chalk(db), | 353 | associated_ty_id: self.associated_ty.to_chalk(db), |
353 | parameters: self.parameters.to_chalk(db), | 354 | substitution: self.parameters.to_chalk(db), |
354 | } | 355 | } |
355 | } | 356 | } |
356 | 357 | ||
357 | fn from_chalk( | 358 | fn from_chalk( |
358 | db: &impl HirDatabase, | 359 | db: &impl HirDatabase, |
359 | projection_ty: chalk_ir::ProjectionTy<TypeFamily>, | 360 | projection_ty: chalk_ir::AliasTy<TypeFamily>, |
360 | ) -> ProjectionTy { | 361 | ) -> ProjectionTy { |
361 | ProjectionTy { | 362 | ProjectionTy { |
362 | associated_ty: from_chalk(db, projection_ty.associated_ty_id), | 363 | associated_ty: from_chalk(db, projection_ty.associated_ty_id), |
363 | parameters: from_chalk(db, projection_ty.parameters), | 364 | parameters: from_chalk(db, projection_ty.substitution), |
364 | } | 365 | } |
365 | } | 366 | } |
366 | } | 367 | } |
@@ -369,10 +370,7 @@ impl ToChalk for super::ProjectionPredicate { | |||
369 | type Chalk = chalk_ir::Normalize<TypeFamily>; | 370 | type Chalk = chalk_ir::Normalize<TypeFamily>; |
370 | 371 | ||
371 | fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Normalize<TypeFamily> { | 372 | fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Normalize<TypeFamily> { |
372 | chalk_ir::Normalize { | 373 | chalk_ir::Normalize { alias: self.projection_ty.to_chalk(db), ty: self.ty.to_chalk(db) } |
373 | projection: self.projection_ty.to_chalk(db), | ||
374 | ty: self.ty.to_chalk(db), | ||
375 | } | ||
376 | } | 374 | } |
377 | 375 | ||
378 | fn from_chalk(_db: &impl HirDatabase, _normalize: chalk_ir::Normalize<TypeFamily>) -> Self { | 376 | fn from_chalk(_db: &impl HirDatabase, _normalize: chalk_ir::Normalize<TypeFamily>) -> Self { |
diff --git a/crates/ra_ide/Cargo.toml b/crates/ra_ide/Cargo.toml index 2c9f9dce0..53817d1f7 100644 --- a/crates/ra_ide/Cargo.toml +++ b/crates/ra_ide/Cargo.toml | |||
@@ -39,7 +39,7 @@ ra_assists = { path = "../ra_assists" } | |||
39 | hir = { path = "../ra_hir", package = "ra_hir" } | 39 | hir = { path = "../ra_hir", package = "ra_hir" } |
40 | 40 | ||
41 | [dev-dependencies] | 41 | [dev-dependencies] |
42 | insta = "0.12.0" | 42 | insta = "0.13.0" |
43 | 43 | ||
44 | [dev-dependencies.proptest] | 44 | [dev-dependencies.proptest] |
45 | version = "0.9.0" | 45 | version = "0.9.0" |
diff --git a/crates/ra_ide/src/assists.rs b/crates/ra_ide/src/assists.rs index a936900da..c43c45c65 100644 --- a/crates/ra_ide/src/assists.rs +++ b/crates/ra_ide/src/assists.rs | |||
@@ -2,8 +2,9 @@ | |||
2 | 2 | ||
3 | use ra_db::{FilePosition, FileRange}; | 3 | use ra_db::{FilePosition, FileRange}; |
4 | 4 | ||
5 | use crate::{db::RootDatabase, FileId, SourceChange, SourceFileEdit}; | 5 | use crate::{ |
6 | 6 | db::RootDatabase, imports_locator::ImportsLocatorIde, FileId, SourceChange, SourceFileEdit, | |
7 | }; | ||
7 | use either::Either; | 8 | use either::Either; |
8 | pub use ra_assists::AssistId; | 9 | pub use ra_assists::AssistId; |
9 | use ra_assists::{AssistAction, AssistLabel}; | 10 | use ra_assists::{AssistAction, AssistLabel}; |
@@ -16,7 +17,7 @@ pub struct Assist { | |||
16 | } | 17 | } |
17 | 18 | ||
18 | pub(crate) fn assists(db: &RootDatabase, frange: FileRange) -> Vec<Assist> { | 19 | pub(crate) fn assists(db: &RootDatabase, frange: FileRange) -> Vec<Assist> { |
19 | ra_assists::assists(db, frange) | 20 | ra_assists::assists_with_imports_locator(db, frange, ImportsLocatorIde::new(db)) |
20 | .into_iter() | 21 | .into_iter() |
21 | .map(|assist| { | 22 | .map(|assist| { |
22 | let file_id = frange.file_id; | 23 | let file_id = frange.file_id; |
diff --git a/crates/ra_ide/src/change.rs b/crates/ra_ide/src/change.rs index b0aa2c8e0..45a58690b 100644 --- a/crates/ra_ide/src/change.rs +++ b/crates/ra_ide/src/change.rs | |||
@@ -166,13 +166,15 @@ impl LibraryData { | |||
166 | const GC_COOLDOWN: time::Duration = time::Duration::from_millis(100); | 166 | const GC_COOLDOWN: time::Duration = time::Duration::from_millis(100); |
167 | 167 | ||
168 | impl RootDatabase { | 168 | impl RootDatabase { |
169 | pub(crate) fn request_cancellation(&mut self) { | ||
170 | let _p = profile("RootDatabase::request_cancellation"); | ||
171 | self.salsa_runtime_mut().synthetic_write(Durability::LOW); | ||
172 | } | ||
173 | |||
169 | pub(crate) fn apply_change(&mut self, change: AnalysisChange) { | 174 | pub(crate) fn apply_change(&mut self, change: AnalysisChange) { |
170 | let _p = profile("RootDatabase::apply_change"); | 175 | let _p = profile("RootDatabase::apply_change"); |
176 | self.request_cancellation(); | ||
171 | log::info!("apply_change {:?}", change); | 177 | log::info!("apply_change {:?}", change); |
172 | { | ||
173 | let _p = profile("RootDatabase::apply_change/cancellation"); | ||
174 | self.salsa_runtime_mut().synthetic_write(Durability::LOW); | ||
175 | } | ||
176 | if !change.new_roots.is_empty() { | 178 | if !change.new_roots.is_empty() { |
177 | let mut local_roots = Vec::clone(&self.local_roots()); | 179 | let mut local_roots = Vec::clone(&self.local_roots()); |
178 | for (root_id, is_local) in change.new_roots { | 180 | for (root_id, is_local) in change.new_roots { |
@@ -299,45 +301,74 @@ impl RootDatabase { | |||
299 | )*} | 301 | )*} |
300 | } | 302 | } |
301 | sweep_each_query![ | 303 | sweep_each_query![ |
304 | // SourceDatabase | ||
302 | ra_db::ParseQuery | 305 | ra_db::ParseQuery |
303 | ra_db::SourceRootCratesQuery | 306 | ra_db::SourceRootCratesQuery |
307 | |||
308 | // AstDatabase | ||
304 | hir::db::AstIdMapQuery | 309 | hir::db::AstIdMapQuery |
305 | hir::db::ParseMacroQuery | 310 | hir::db::InternMacroQuery |
306 | hir::db::MacroDefQuery | ||
307 | hir::db::MacroArgQuery | 311 | hir::db::MacroArgQuery |
312 | hir::db::MacroDefQuery | ||
313 | hir::db::ParseMacroQuery | ||
308 | hir::db::MacroExpandQuery | 314 | hir::db::MacroExpandQuery |
315 | |||
316 | // DefDatabase | ||
317 | hir::db::RawItemsQuery | ||
318 | hir::db::ComputeCrateDefMapQuery | ||
309 | hir::db::StructDataQuery | 319 | hir::db::StructDataQuery |
320 | hir::db::UnionDataQuery | ||
310 | hir::db::EnumDataQuery | 321 | hir::db::EnumDataQuery |
322 | hir::db::ImplDataQuery | ||
311 | hir::db::TraitDataQuery | 323 | hir::db::TraitDataQuery |
312 | hir::db::RawItemsQuery | ||
313 | hir::db::ComputeCrateDefMapQuery | ||
314 | hir::db::GenericParamsQuery | ||
315 | hir::db::FunctionDataQuery | ||
316 | hir::db::TypeAliasDataQuery | 324 | hir::db::TypeAliasDataQuery |
325 | hir::db::FunctionDataQuery | ||
317 | hir::db::ConstDataQuery | 326 | hir::db::ConstDataQuery |
318 | hir::db::StaticDataQuery | 327 | hir::db::StaticDataQuery |
328 | hir::db::BodyWithSourceMapQuery | ||
329 | hir::db::BodyQuery | ||
330 | hir::db::ExprScopesQuery | ||
331 | hir::db::GenericParamsQuery | ||
332 | hir::db::AttrsQuery | ||
319 | hir::db::ModuleLangItemsQuery | 333 | hir::db::ModuleLangItemsQuery |
320 | hir::db::CrateLangItemsQuery | 334 | hir::db::CrateLangItemsQuery |
321 | hir::db::LangItemQuery | 335 | hir::db::LangItemQuery |
322 | hir::db::DocumentationQuery | 336 | hir::db::DocumentationQuery |
323 | hir::db::ExprScopesQuery | 337 | |
338 | // InternDatabase | ||
339 | hir::db::InternFunctionQuery | ||
340 | hir::db::InternStructQuery | ||
341 | hir::db::InternUnionQuery | ||
342 | hir::db::InternEnumQuery | ||
343 | hir::db::InternConstQuery | ||
344 | hir::db::InternStaticQuery | ||
345 | hir::db::InternTraitQuery | ||
346 | hir::db::InternTypeAliasQuery | ||
347 | hir::db::InternImplQuery | ||
348 | |||
349 | // HirDatabase | ||
324 | hir::db::DoInferQuery | 350 | hir::db::DoInferQuery |
325 | hir::db::TyQuery | 351 | hir::db::TyQuery |
326 | hir::db::ValueTyQuery | 352 | hir::db::ValueTyQuery |
353 | hir::db::ImplSelfTyQuery | ||
354 | hir::db::ImplTraitQuery | ||
327 | hir::db::FieldTypesQuery | 355 | hir::db::FieldTypesQuery |
328 | hir::db::CallableItemSignatureQuery | 356 | hir::db::CallableItemSignatureQuery |
357 | hir::db::GenericPredicatesForParamQuery | ||
329 | hir::db::GenericPredicatesQuery | 358 | hir::db::GenericPredicatesQuery |
330 | hir::db::GenericDefaultsQuery | 359 | hir::db::GenericDefaultsQuery |
331 | hir::db::BodyWithSourceMapQuery | ||
332 | hir::db::BodyQuery | ||
333 | hir::db::ImplsInCrateQuery | 360 | hir::db::ImplsInCrateQuery |
334 | hir::db::ImplsForTraitQuery | 361 | hir::db::ImplsForTraitQuery |
362 | hir::db::TraitSolverQuery | ||
363 | hir::db::InternTypeCtorQuery | ||
364 | hir::db::InternChalkImplQuery | ||
365 | hir::db::InternAssocTyValueQuery | ||
335 | hir::db::AssociatedTyDataQuery | 366 | hir::db::AssociatedTyDataQuery |
367 | hir::db::AssociatedTyValueQuery | ||
368 | hir::db::TraitSolveQuery | ||
336 | hir::db::TraitDatumQuery | 369 | hir::db::TraitDatumQuery |
337 | hir::db::StructDatumQuery | 370 | hir::db::StructDatumQuery |
338 | hir::db::ImplDatumQuery | 371 | hir::db::ImplDatumQuery |
339 | hir::db::ImplDataQuery | ||
340 | hir::db::TraitSolveQuery | ||
341 | ]; | 372 | ]; |
342 | acc.sort_by_key(|it| std::cmp::Reverse(it.1)); | 373 | acc.sort_by_key(|it| std::cmp::Reverse(it.1)); |
343 | acc | 374 | acc |
diff --git a/crates/ra_ide/src/expand.rs b/crates/ra_ide/src/expand.rs index b82259a3d..831438c09 100644 --- a/crates/ra_ide/src/expand.rs +++ b/crates/ra_ide/src/expand.rs | |||
@@ -79,6 +79,14 @@ pub(crate) fn descend_into_macros( | |||
79 | let source_analyzer = | 79 | let source_analyzer = |
80 | hir::SourceAnalyzer::new(db, src.with_value(src.value.parent()).as_ref(), None); | 80 | hir::SourceAnalyzer::new(db, src.with_value(src.value.parent()).as_ref(), None); |
81 | 81 | ||
82 | descend_into_macros_with_analyzer(db, &source_analyzer, src) | ||
83 | } | ||
84 | |||
85 | pub(crate) fn descend_into_macros_with_analyzer( | ||
86 | db: &RootDatabase, | ||
87 | source_analyzer: &hir::SourceAnalyzer, | ||
88 | src: InFile<SyntaxToken>, | ||
89 | ) -> InFile<SyntaxToken> { | ||
82 | successors(Some(src), |token| { | 90 | successors(Some(src), |token| { |
83 | let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; | 91 | let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; |
84 | let tt = macro_call.token_tree()?; | 92 | let tt = macro_call.token_tree()?; |
diff --git a/crates/ra_ide/src/imports_locator.rs b/crates/ra_ide/src/imports_locator.rs new file mode 100644 index 000000000..48b014c7d --- /dev/null +++ b/crates/ra_ide/src/imports_locator.rs | |||
@@ -0,0 +1,76 @@ | |||
1 | //! This module contains an import search funcionality that is provided to the ra_assists module. | ||
2 | //! Later, this should be moved away to a separate crate that is accessible from the ra_assists module. | ||
3 | |||
4 | use crate::{ | ||
5 | db::RootDatabase, | ||
6 | references::{classify_name, NameDefinition, NameKind}, | ||
7 | symbol_index::{self, FileSymbol}, | ||
8 | Query, | ||
9 | }; | ||
10 | use hir::{db::HirDatabase, ModuleDef, SourceBinder}; | ||
11 | use ra_assists::ImportsLocator; | ||
12 | use ra_prof::profile; | ||
13 | use ra_syntax::{ast, AstNode, SyntaxKind::NAME}; | ||
14 | |||
15 | pub(crate) struct ImportsLocatorIde<'a> { | ||
16 | source_binder: SourceBinder<'a, RootDatabase>, | ||
17 | } | ||
18 | |||
19 | impl<'a> ImportsLocatorIde<'a> { | ||
20 | pub(crate) fn new(db: &'a RootDatabase) -> Self { | ||
21 | Self { source_binder: SourceBinder::new(db) } | ||
22 | } | ||
23 | |||
24 | fn get_name_definition( | ||
25 | &mut self, | ||
26 | db: &impl HirDatabase, | ||
27 | import_candidate: &FileSymbol, | ||
28 | ) -> Option<NameDefinition> { | ||
29 | let _p = profile("get_name_definition"); | ||
30 | let file_id = import_candidate.file_id.into(); | ||
31 | let candidate_node = import_candidate.ptr.to_node(&db.parse_or_expand(file_id)?); | ||
32 | let candidate_name_node = if candidate_node.kind() != NAME { | ||
33 | candidate_node.children().find(|it| it.kind() == NAME)? | ||
34 | } else { | ||
35 | candidate_node | ||
36 | }; | ||
37 | classify_name( | ||
38 | &mut self.source_binder, | ||
39 | hir::InFile { file_id, value: &ast::Name::cast(candidate_name_node)? }, | ||
40 | ) | ||
41 | } | ||
42 | } | ||
43 | |||
44 | impl ImportsLocator for ImportsLocatorIde<'_> { | ||
45 | fn find_imports(&mut self, name_to_import: &str) -> Vec<ModuleDef> { | ||
46 | let _p = profile("search_for_imports"); | ||
47 | let db = self.source_binder.db; | ||
48 | |||
49 | let project_results = { | ||
50 | let mut query = Query::new(name_to_import.to_string()); | ||
51 | query.exact(); | ||
52 | query.limit(40); | ||
53 | symbol_index::world_symbols(db, query) | ||
54 | }; | ||
55 | let lib_results = { | ||
56 | let mut query = Query::new(name_to_import.to_string()); | ||
57 | query.libs(); | ||
58 | query.exact(); | ||
59 | query.limit(40); | ||
60 | symbol_index::world_symbols(db, query) | ||
61 | }; | ||
62 | |||
63 | project_results | ||
64 | .into_iter() | ||
65 | .chain(lib_results.into_iter()) | ||
66 | .filter_map(|import_candidate| self.get_name_definition(db, &import_candidate)) | ||
67 | .filter_map(|name_definition_to_import| { | ||
68 | if let NameKind::Def(module_def) = name_definition_to_import.kind { | ||
69 | Some(module_def) | ||
70 | } else { | ||
71 | None | ||
72 | } | ||
73 | }) | ||
74 | .collect() | ||
75 | } | ||
76 | } | ||
diff --git a/crates/ra_ide/src/lib.rs b/crates/ra_ide/src/lib.rs index 4d8deb21c..03ad6b2c1 100644 --- a/crates/ra_ide/src/lib.rs +++ b/crates/ra_ide/src/lib.rs | |||
@@ -30,6 +30,7 @@ mod syntax_highlighting; | |||
30 | mod parent_module; | 30 | mod parent_module; |
31 | mod references; | 31 | mod references; |
32 | mod impls; | 32 | mod impls; |
33 | mod imports_locator; | ||
33 | mod assists; | 34 | mod assists; |
34 | mod diagnostics; | 35 | mod diagnostics; |
35 | mod syntax_tree; | 36 | mod syntax_tree; |
@@ -202,6 +203,9 @@ impl AnalysisHost { | |||
202 | pub fn per_query_memory_usage(&mut self) -> Vec<(String, ra_prof::Bytes)> { | 203 | pub fn per_query_memory_usage(&mut self) -> Vec<(String, ra_prof::Bytes)> { |
203 | self.db.per_query_memory_usage() | 204 | self.db.per_query_memory_usage() |
204 | } | 205 | } |
206 | pub fn request_cancellation(&mut self) { | ||
207 | self.db.request_cancellation(); | ||
208 | } | ||
205 | pub fn raw_database( | 209 | pub fn raw_database( |
206 | &self, | 210 | &self, |
207 | ) -> &(impl hir::db::HirDatabase + salsa::Database + ra_db::SourceDatabaseExt) { | 211 | ) -> &(impl hir::db::HirDatabase + salsa::Database + ra_db::SourceDatabaseExt) { |
diff --git a/crates/ra_ide/src/references.rs b/crates/ra_ide/src/references.rs index 5e2fe1905..ebded715d 100644 --- a/crates/ra_ide/src/references.rs +++ b/crates/ra_ide/src/references.rs | |||
@@ -112,25 +112,20 @@ impl IntoIterator for ReferenceSearchResult { | |||
112 | 112 | ||
113 | pub(crate) fn find_all_refs( | 113 | pub(crate) fn find_all_refs( |
114 | db: &RootDatabase, | 114 | db: &RootDatabase, |
115 | mut position: FilePosition, | 115 | position: FilePosition, |
116 | search_scope: Option<SearchScope>, | 116 | search_scope: Option<SearchScope>, |
117 | ) -> Option<RangeInfo<ReferenceSearchResult>> { | 117 | ) -> Option<RangeInfo<ReferenceSearchResult>> { |
118 | let parse = db.parse(position.file_id); | 118 | let parse = db.parse(position.file_id); |
119 | let syntax = parse.tree().syntax().clone(); | 119 | let syntax = parse.tree().syntax().clone(); |
120 | 120 | ||
121 | let token = syntax.token_at_offset(position.offset); | 121 | let (opt_name, search_kind) = |
122 | let mut search_kind = ReferenceKind::Other; | 122 | if let Some(name) = get_struct_def_name_for_struc_litetal_search(&syntax, position) { |
123 | (Some(name), ReferenceKind::StructLiteral) | ||
124 | } else { | ||
125 | (find_node_at_offset::<ast::Name>(&syntax, position.offset), ReferenceKind::Other) | ||
126 | }; | ||
123 | 127 | ||
124 | if let TokenAtOffset::Between(ref left, ref right) = token { | 128 | let RangeInfo { range, info: (name, def) } = find_name(db, &syntax, position, opt_name)?; |
125 | if (right.kind() == SyntaxKind::L_CURLY || right.kind() == SyntaxKind::L_PAREN) | ||
126 | && left.kind() != SyntaxKind::IDENT | ||
127 | { | ||
128 | position = FilePosition { offset: left.text_range().start(), ..position }; | ||
129 | search_kind = ReferenceKind::StructLiteral; | ||
130 | } | ||
131 | } | ||
132 | |||
133 | let RangeInfo { range, info: (name, def) } = find_name(db, &syntax, position)?; | ||
134 | 129 | ||
135 | let declaration = match def.kind { | 130 | let declaration = match def.kind { |
136 | NameKind::Macro(mac) => mac.to_nav(db), | 131 | NameKind::Macro(mac) => mac.to_nav(db), |
@@ -170,9 +165,10 @@ fn find_name( | |||
170 | db: &RootDatabase, | 165 | db: &RootDatabase, |
171 | syntax: &SyntaxNode, | 166 | syntax: &SyntaxNode, |
172 | position: FilePosition, | 167 | position: FilePosition, |
168 | opt_name: Option<ast::Name>, | ||
173 | ) -> Option<RangeInfo<(String, NameDefinition)>> { | 169 | ) -> Option<RangeInfo<(String, NameDefinition)>> { |
174 | let mut sb = SourceBinder::new(db); | 170 | let mut sb = SourceBinder::new(db); |
175 | if let Some(name) = find_node_at_offset::<ast::Name>(&syntax, position.offset) { | 171 | if let Some(name) = opt_name { |
176 | let def = classify_name(&mut sb, InFile::new(position.file_id.into(), &name))?; | 172 | let def = classify_name(&mut sb, InFile::new(position.file_id.into(), &name))?; |
177 | let range = name.syntax().text_range(); | 173 | let range = name.syntax().text_range(); |
178 | return Some(RangeInfo::new(range, (name.text().to_string(), def))); | 174 | return Some(RangeInfo::new(range, (name.text().to_string(), def))); |
@@ -218,15 +214,8 @@ fn process_definition( | |||
218 | if let Some(d) = classify_name_ref(&mut sb, InFile::new(file_id.into(), &name_ref)) | 214 | if let Some(d) = classify_name_ref(&mut sb, InFile::new(file_id.into(), &name_ref)) |
219 | { | 215 | { |
220 | if d == def { | 216 | if d == def { |
221 | let kind = if name_ref | 217 | let kind = if is_record_lit_name_ref(&name_ref) |
222 | .syntax() | 218 | || is_call_expr_name_ref(&name_ref) |
223 | .ancestors() | ||
224 | .find_map(ast::RecordLit::cast) | ||
225 | .and_then(|l| l.path()) | ||
226 | .and_then(|p| p.segment()) | ||
227 | .and_then(|p| p.name_ref()) | ||
228 | .map(|n| n == name_ref) | ||
229 | .unwrap_or(false) | ||
230 | { | 219 | { |
231 | ReferenceKind::StructLiteral | 220 | ReferenceKind::StructLiteral |
232 | } else { | 221 | } else { |
@@ -301,6 +290,49 @@ fn reference_access(kind: &NameKind, name_ref: &ast::NameRef) -> Option<Referenc | |||
301 | mode.or(Some(ReferenceAccess::Read)) | 290 | mode.or(Some(ReferenceAccess::Read)) |
302 | } | 291 | } |
303 | 292 | ||
293 | fn is_record_lit_name_ref(name_ref: &ast::NameRef) -> bool { | ||
294 | name_ref | ||
295 | .syntax() | ||
296 | .ancestors() | ||
297 | .find_map(ast::RecordLit::cast) | ||
298 | .and_then(|l| l.path()) | ||
299 | .and_then(|p| p.segment()) | ||
300 | .map(|p| p.name_ref().as_ref() == Some(name_ref)) | ||
301 | .unwrap_or(false) | ||
302 | } | ||
303 | |||
304 | fn get_struct_def_name_for_struc_litetal_search( | ||
305 | syntax: &SyntaxNode, | ||
306 | position: FilePosition, | ||
307 | ) -> Option<ast::Name> { | ||
308 | if let TokenAtOffset::Between(ref left, ref right) = syntax.token_at_offset(position.offset) { | ||
309 | if right.kind() != SyntaxKind::L_CURLY && right.kind() != SyntaxKind::L_PAREN { | ||
310 | return None; | ||
311 | } | ||
312 | if let Some(name) = find_node_at_offset::<ast::Name>(&syntax, left.text_range().start()) { | ||
313 | return name.syntax().ancestors().find_map(ast::StructDef::cast).and_then(|l| l.name()); | ||
314 | } | ||
315 | if find_node_at_offset::<ast::TypeParamList>(&syntax, left.text_range().start()).is_some() { | ||
316 | return left.ancestors().find_map(ast::StructDef::cast).and_then(|l| l.name()); | ||
317 | } | ||
318 | } | ||
319 | None | ||
320 | } | ||
321 | |||
322 | fn is_call_expr_name_ref(name_ref: &ast::NameRef) -> bool { | ||
323 | name_ref | ||
324 | .syntax() | ||
325 | .ancestors() | ||
326 | .find_map(ast::CallExpr::cast) | ||
327 | .and_then(|c| match c.expr()? { | ||
328 | ast::Expr::PathExpr(p) => { | ||
329 | Some(p.path()?.segment()?.name_ref().as_ref() == Some(name_ref)) | ||
330 | } | ||
331 | _ => None, | ||
332 | }) | ||
333 | .unwrap_or(false) | ||
334 | } | ||
335 | |||
304 | #[cfg(test)] | 336 | #[cfg(test)] |
305 | mod tests { | 337 | mod tests { |
306 | use crate::{ | 338 | use crate::{ |
@@ -309,7 +341,7 @@ mod tests { | |||
309 | }; | 341 | }; |
310 | 342 | ||
311 | #[test] | 343 | #[test] |
312 | fn test_struct_literal() { | 344 | fn test_struct_literal_after_space() { |
313 | let code = r#" | 345 | let code = r#" |
314 | struct Foo <|>{ | 346 | struct Foo <|>{ |
315 | a: i32, | 347 | a: i32, |
@@ -331,6 +363,58 @@ mod tests { | |||
331 | } | 363 | } |
332 | 364 | ||
333 | #[test] | 365 | #[test] |
366 | fn test_struct_literal_befor_space() { | ||
367 | let code = r#" | ||
368 | struct Foo<|> {} | ||
369 | fn main() { | ||
370 | let f: Foo; | ||
371 | f = Foo {}; | ||
372 | }"#; | ||
373 | |||
374 | let refs = get_all_refs(code); | ||
375 | check_result( | ||
376 | refs, | ||
377 | "Foo STRUCT_DEF FileId(1) [5; 18) [12; 15) Other", | ||
378 | &["FileId(1) [54; 57) Other", "FileId(1) [71; 74) StructLiteral"], | ||
379 | ); | ||
380 | } | ||
381 | |||
382 | #[test] | ||
383 | fn test_struct_literal_with_generic_type() { | ||
384 | let code = r#" | ||
385 | struct Foo<T> <|>{} | ||
386 | fn main() { | ||
387 | let f: Foo::<i32>; | ||
388 | f = Foo {}; | ||
389 | }"#; | ||
390 | |||
391 | let refs = get_all_refs(code); | ||
392 | check_result( | ||
393 | refs, | ||
394 | "Foo STRUCT_DEF FileId(1) [5; 21) [12; 15) Other", | ||
395 | &["FileId(1) [81; 84) StructLiteral"], | ||
396 | ); | ||
397 | } | ||
398 | |||
399 | #[test] | ||
400 | fn test_struct_literal_for_tuple() { | ||
401 | let code = r#" | ||
402 | struct Foo<|>(i32); | ||
403 | |||
404 | fn main() { | ||
405 | let f: Foo; | ||
406 | f = Foo(1); | ||
407 | }"#; | ||
408 | |||
409 | let refs = get_all_refs(code); | ||
410 | check_result( | ||
411 | refs, | ||
412 | "Foo STRUCT_DEF FileId(1) [5; 21) [12; 15) Other", | ||
413 | &["FileId(1) [71; 74) StructLiteral"], | ||
414 | ); | ||
415 | } | ||
416 | |||
417 | #[test] | ||
334 | fn test_find_all_refs_for_local() { | 418 | fn test_find_all_refs_for_local() { |
335 | let code = r#" | 419 | let code = r#" |
336 | fn main() { | 420 | fn main() { |
@@ -564,7 +648,7 @@ mod tests { | |||
564 | check_result( | 648 | check_result( |
565 | refs, | 649 | refs, |
566 | "quux FN_DEF FileId(1) [18; 34) [25; 29) Other", | 650 | "quux FN_DEF FileId(1) [18; 34) [25; 29) Other", |
567 | &["FileId(2) [16; 20) Other", "FileId(3) [16; 20) Other"], | 651 | &["FileId(2) [16; 20) StructLiteral", "FileId(3) [16; 20) StructLiteral"], |
568 | ); | 652 | ); |
569 | 653 | ||
570 | let refs = | 654 | let refs = |
@@ -572,7 +656,7 @@ mod tests { | |||
572 | check_result( | 656 | check_result( |
573 | refs, | 657 | refs, |
574 | "quux FN_DEF FileId(1) [18; 34) [25; 29) Other", | 658 | "quux FN_DEF FileId(1) [18; 34) [25; 29) Other", |
575 | &["FileId(3) [16; 20) Other"], | 659 | &["FileId(3) [16; 20) StructLiteral"], |
576 | ); | 660 | ); |
577 | } | 661 | } |
578 | 662 | ||
@@ -591,7 +675,7 @@ mod tests { | |||
591 | check_result( | 675 | check_result( |
592 | refs, | 676 | refs, |
593 | "m1 MACRO_CALL FileId(1) [9; 63) [46; 48) Other", | 677 | "m1 MACRO_CALL FileId(1) [9; 63) [46; 48) Other", |
594 | &["FileId(1) [96; 98) Other", "FileId(1) [114; 116) Other"], | 678 | &["FileId(1) [96; 98) StructLiteral", "FileId(1) [114; 116) StructLiteral"], |
595 | ); | 679 | ); |
596 | } | 680 | } |
597 | 681 | ||
diff --git a/crates/ra_ide/src/runnables.rs b/crates/ra_ide/src/runnables.rs index 7533692f6..8622dd956 100644 --- a/crates/ra_ide/src/runnables.rs +++ b/crates/ra_ide/src/runnables.rs | |||
@@ -43,7 +43,7 @@ fn runnable_fn(fn_def: ast::FnDef) -> Option<Runnable> { | |||
43 | let name = fn_def.name()?.text().clone(); | 43 | let name = fn_def.name()?.text().clone(); |
44 | let kind = if name == "main" { | 44 | let kind = if name == "main" { |
45 | RunnableKind::Bin | 45 | RunnableKind::Bin |
46 | } else if fn_def.has_atom_attr("test") { | 46 | } else if has_test_related_attribute(&fn_def) { |
47 | RunnableKind::Test { name: name.to_string() } | 47 | RunnableKind::Test { name: name.to_string() } |
48 | } else if fn_def.has_atom_attr("bench") { | 48 | } else if fn_def.has_atom_attr("bench") { |
49 | RunnableKind::Bench { name: name.to_string() } | 49 | RunnableKind::Bench { name: name.to_string() } |
@@ -53,6 +53,20 @@ fn runnable_fn(fn_def: ast::FnDef) -> Option<Runnable> { | |||
53 | Some(Runnable { range: fn_def.syntax().text_range(), kind }) | 53 | Some(Runnable { range: fn_def.syntax().text_range(), kind }) |
54 | } | 54 | } |
55 | 55 | ||
56 | /// This is a method with a heuristics to support test methods annotated with custom test annotations, such as | ||
57 | /// `#[test_case(...)]`, `#[tokio::test]` and similar. | ||
58 | /// Also a regular `#[test]` annotation is supported. | ||
59 | /// | ||
60 | /// It may produce false positives, for example, `#[wasm_bindgen_test]` requires a different command to run the test, | ||
61 | /// but it's better than not to have the runnables for the tests at all. | ||
62 | fn has_test_related_attribute(fn_def: &ast::FnDef) -> bool { | ||
63 | fn_def | ||
64 | .attrs() | ||
65 | .filter_map(|attr| attr.path()) | ||
66 | .map(|path| path.syntax().to_string().to_lowercase()) | ||
67 | .any(|attribute_text| attribute_text.contains("test")) | ||
68 | } | ||
69 | |||
56 | fn runnable_mod(db: &RootDatabase, file_id: FileId, module: ast::Module) -> Option<Runnable> { | 70 | fn runnable_mod(db: &RootDatabase, file_id: FileId, module: ast::Module) -> Option<Runnable> { |
57 | let has_test_function = module | 71 | let has_test_function = module |
58 | .item_list()? | 72 | .item_list()? |
diff --git a/crates/ra_ide/src/snapshots/highlighting.html b/crates/ra_ide/src/snapshots/highlighting.html index 1d130544f..1cc55e78b 100644 --- a/crates/ra_ide/src/snapshots/highlighting.html +++ b/crates/ra_ide/src/snapshots/highlighting.html | |||
@@ -34,6 +34,16 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
34 | <span class="function">foo</span>::<<span class="type.builtin">i32</span>>(); | 34 | <span class="function">foo</span>::<<span class="type.builtin">i32</span>>(); |
35 | } | 35 | } |
36 | 36 | ||
37 | <span class="macro">macro_rules</span><span class="macro">!</span> def_fn { | ||
38 | ($($tt:tt)*) => {$($tt)*} | ||
39 | } | ||
40 | |||
41 | <span class="macro">def_fn</span><span class="macro">!</span>{ | ||
42 | <span class="keyword">fn</span> <span class="function">bar</span>() -> <span class="type.builtin">u32</span> { | ||
43 | <span class="literal.numeric">100</span> | ||
44 | } | ||
45 | } | ||
46 | |||
37 | <span class="comment">// comment</span> | 47 | <span class="comment">// comment</span> |
38 | <span class="keyword">fn</span> <span class="function">main</span>() { | 48 | <span class="keyword">fn</span> <span class="function">main</span>() { |
39 | <span class="macro">println</span><span class="macro">!</span>(<span class="string">"Hello, {}!"</span>, <span class="literal.numeric">92</span>); | 49 | <span class="macro">println</span><span class="macro">!</span>(<span class="string">"Hello, {}!"</span>, <span class="literal.numeric">92</span>); |
diff --git a/crates/ra_ide/src/snapshots/rainbow_highlighting.html b/crates/ra_ide/src/snapshots/rainbow_highlighting.html index d90ee8540..918fd4b97 100644 --- a/crates/ra_ide/src/snapshots/rainbow_highlighting.html +++ b/crates/ra_ide/src/snapshots/rainbow_highlighting.html | |||
@@ -24,14 +24,14 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
24 | .keyword\.control { color: #F0DFAF; font-weight: bold; } | 24 | .keyword\.control { color: #F0DFAF; font-weight: bold; } |
25 | </style> | 25 | </style> |
26 | <pre><code><span class="keyword">fn</span> <span class="function">main</span>() { | 26 | <pre><code><span class="keyword">fn</span> <span class="function">main</span>() { |
27 | <span class="keyword">let</span> <span class="variable" data-binding-hash="8723171760279909834" style="color: hsl(307,91%,75%);">hello</span> = <span class="string">"hello"</span>; | 27 | <span class="keyword">let</span> <span class="variable" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span> = <span class="string">"hello"</span>; |
28 | <span class="keyword">let</span> <span class="variable" data-binding-hash="14702933417323009544" style="color: hsl(108,90%,49%);">x</span> = <span class="variable" data-binding-hash="8723171760279909834" style="color: hsl(307,91%,75%);">hello</span>.to_string(); | 28 | <span class="keyword">let</span> <span class="variable" data-binding-hash="4303609361109701698" style="color: hsl(242,75%,88%);">x</span> = <span class="variable" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span>.to_string(); |
29 | <span class="keyword">let</span> <span class="variable" data-binding-hash="5443150872754369068" style="color: hsl(215,43%,43%);">y</span> = <span class="variable" data-binding-hash="8723171760279909834" style="color: hsl(307,91%,75%);">hello</span>.to_string(); | 29 | <span class="keyword">let</span> <span class="variable" data-binding-hash="13865792086344377029" style="color: hsl(340,64%,86%);">y</span> = <span class="variable" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span>.to_string(); |
30 | 30 | ||
31 | <span class="keyword">let</span> <span class="variable" data-binding-hash="17358108296605513516" style="color: hsl(331,46%,60%);">x</span> = <span class="string">"other color please!"</span>; | 31 | <span class="keyword">let</span> <span class="variable" data-binding-hash="7011301204224269512" style="color: hsl(198,45%,40%);">x</span> = <span class="string">"other color please!"</span>; |
32 | <span class="keyword">let</span> <span class="variable" data-binding-hash="2073121142529774969" style="color: hsl(320,43%,74%);">y</span> = <span class="variable" data-binding-hash="17358108296605513516" style="color: hsl(331,46%,60%);">x</span>.to_string(); | 32 | <span class="keyword">let</span> <span class="variable" data-binding-hash="12461245066629867975" style="color: hsl(132,91%,68%);">y</span> = <span class="variable" data-binding-hash="7011301204224269512" style="color: hsl(198,45%,40%);">x</span>.to_string(); |
33 | } | 33 | } |
34 | 34 | ||
35 | <span class="keyword">fn</span> <span class="function">bar</span>() { | 35 | <span class="keyword">fn</span> <span class="function">bar</span>() { |
36 | <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable.mut" data-binding-hash="8723171760279909834" style="color: hsl(307,91%,75%);">hello</span> = <span class="string">"hello"</span>; | 36 | <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable.mut" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span> = <span class="string">"hello"</span>; |
37 | }</code></pre> \ No newline at end of file | 37 | }</code></pre> \ No newline at end of file |
diff --git a/crates/ra_ide/src/syntax_highlighting.rs b/crates/ra_ide/src/syntax_highlighting.rs index 0411977b9..530b984fc 100644 --- a/crates/ra_ide/src/syntax_highlighting.rs +++ b/crates/ra_ide/src/syntax_highlighting.rs | |||
@@ -1,14 +1,18 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use rustc_hash::{FxHashMap, FxHashSet}; | 3 | use rustc_hash::FxHashMap; |
4 | 4 | ||
5 | use hir::{InFile, Name, SourceBinder}; | 5 | use hir::{HirFileId, InFile, Name, SourceAnalyzer, SourceBinder}; |
6 | use ra_db::SourceDatabase; | 6 | use ra_db::SourceDatabase; |
7 | use ra_prof::profile; | 7 | use ra_prof::profile; |
8 | use ra_syntax::{ast, AstNode, Direction, SyntaxElement, SyntaxKind, SyntaxKind::*, TextRange, T}; | 8 | use ra_syntax::{ |
9 | ast, AstNode, Direction, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxToken, TextRange, | ||
10 | WalkEvent, T, | ||
11 | }; | ||
9 | 12 | ||
10 | use crate::{ | 13 | use crate::{ |
11 | db::RootDatabase, | 14 | db::RootDatabase, |
15 | expand::descend_into_macros_with_analyzer, | ||
12 | references::{ | 16 | references::{ |
13 | classify_name, classify_name_ref, | 17 | classify_name, classify_name_ref, |
14 | NameKind::{self, *}, | 18 | NameKind::{self, *}, |
@@ -72,121 +76,186 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa | |||
72 | let parse = db.parse(file_id); | 76 | let parse = db.parse(file_id); |
73 | let root = parse.tree().syntax().clone(); | 77 | let root = parse.tree().syntax().clone(); |
74 | 78 | ||
75 | fn calc_binding_hash(file_id: FileId, name: &Name, shadow_count: u32) -> u64 { | 79 | let mut sb = SourceBinder::new(db); |
76 | fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 { | 80 | let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default(); |
77 | use std::{collections::hash_map::DefaultHasher, hash::Hasher}; | 81 | let mut res = Vec::new(); |
82 | let analyzer = sb.analyze(InFile::new(file_id.into(), &root), None); | ||
78 | 83 | ||
79 | let mut hasher = DefaultHasher::new(); | 84 | let mut in_macro_call = None; |
80 | x.hash(&mut hasher); | 85 | |
81 | hasher.finish() | 86 | for event in root.preorder_with_tokens() { |
87 | match event { | ||
88 | WalkEvent::Enter(node) => match node.kind() { | ||
89 | MACRO_CALL => { | ||
90 | in_macro_call = Some(node.clone()); | ||
91 | if let Some(range) = highlight_macro(InFile::new(file_id.into(), node)) { | ||
92 | res.push(HighlightedRange { range, tag: tags::MACRO, binding_hash: None }); | ||
93 | } | ||
94 | } | ||
95 | _ if in_macro_call.is_some() => { | ||
96 | if let Some(token) = node.as_token() { | ||
97 | if let Some((tag, binding_hash)) = highlight_token_tree( | ||
98 | db, | ||
99 | &mut sb, | ||
100 | &analyzer, | ||
101 | &mut bindings_shadow_count, | ||
102 | InFile::new(file_id.into(), token.clone()), | ||
103 | ) { | ||
104 | res.push(HighlightedRange { | ||
105 | range: node.text_range(), | ||
106 | tag, | ||
107 | binding_hash, | ||
108 | }); | ||
109 | } | ||
110 | } | ||
111 | } | ||
112 | _ => { | ||
113 | if let Some((tag, binding_hash)) = highlight_node( | ||
114 | db, | ||
115 | &mut sb, | ||
116 | &mut bindings_shadow_count, | ||
117 | InFile::new(file_id.into(), node.clone()), | ||
118 | ) { | ||
119 | res.push(HighlightedRange { range: node.text_range(), tag, binding_hash }); | ||
120 | } | ||
121 | } | ||
122 | }, | ||
123 | WalkEvent::Leave(node) => { | ||
124 | if let Some(m) = in_macro_call.as_ref() { | ||
125 | if *m == node { | ||
126 | in_macro_call = None; | ||
127 | } | ||
128 | } | ||
129 | } | ||
82 | } | 130 | } |
131 | } | ||
83 | 132 | ||
84 | hash((file_id, name, shadow_count)) | 133 | res |
134 | } | ||
135 | |||
136 | fn highlight_macro(node: InFile<SyntaxElement>) -> Option<TextRange> { | ||
137 | let macro_call = ast::MacroCall::cast(node.value.as_node()?.clone())?; | ||
138 | let path = macro_call.path()?; | ||
139 | let name_ref = path.segment()?.name_ref()?; | ||
140 | |||
141 | let range_start = name_ref.syntax().text_range().start(); | ||
142 | let mut range_end = name_ref.syntax().text_range().end(); | ||
143 | for sibling in path.syntax().siblings_with_tokens(Direction::Next) { | ||
144 | match sibling.kind() { | ||
145 | T![!] | IDENT => range_end = sibling.text_range().end(), | ||
146 | _ => (), | ||
147 | } | ||
85 | } | 148 | } |
86 | 149 | ||
87 | let mut sb = SourceBinder::new(db); | 150 | Some(TextRange::from_to(range_start, range_end)) |
151 | } | ||
88 | 152 | ||
89 | // Visited nodes to handle highlighting priorities | 153 | fn highlight_token_tree( |
90 | // FIXME: retain only ranges here | 154 | db: &RootDatabase, |
91 | let mut highlighted: FxHashSet<SyntaxElement> = FxHashSet::default(); | 155 | sb: &mut SourceBinder<RootDatabase>, |
92 | let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default(); | 156 | analyzer: &SourceAnalyzer, |
157 | bindings_shadow_count: &mut FxHashMap<Name, u32>, | ||
158 | token: InFile<SyntaxToken>, | ||
159 | ) -> Option<(&'static str, Option<u64>)> { | ||
160 | if token.value.parent().kind() != TOKEN_TREE { | ||
161 | return None; | ||
162 | } | ||
163 | let token = descend_into_macros_with_analyzer(db, analyzer, token); | ||
164 | let expanded = { | ||
165 | let parent = token.value.parent(); | ||
166 | // We only care Name and Name_ref | ||
167 | match (token.value.kind(), parent.kind()) { | ||
168 | (IDENT, NAME) | (IDENT, NAME_REF) => token.with_value(parent.into()), | ||
169 | _ => token.map(|it| it.into()), | ||
170 | } | ||
171 | }; | ||
93 | 172 | ||
94 | let mut res = Vec::new(); | 173 | highlight_node(db, sb, bindings_shadow_count, expanded) |
95 | for node in root.descendants_with_tokens() { | 174 | } |
96 | if highlighted.contains(&node) { | 175 | |
97 | continue; | 176 | fn highlight_node( |
177 | db: &RootDatabase, | ||
178 | sb: &mut SourceBinder<RootDatabase>, | ||
179 | bindings_shadow_count: &mut FxHashMap<Name, u32>, | ||
180 | node: InFile<SyntaxElement>, | ||
181 | ) -> Option<(&'static str, Option<u64>)> { | ||
182 | let mut binding_hash = None; | ||
183 | let tag = match node.value.kind() { | ||
184 | FN_DEF => { | ||
185 | bindings_shadow_count.clear(); | ||
186 | return None; | ||
98 | } | 187 | } |
99 | let mut binding_hash = None; | 188 | COMMENT => tags::LITERAL_COMMENT, |
100 | let tag = match node.kind() { | 189 | STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => tags::LITERAL_STRING, |
101 | FN_DEF => { | 190 | ATTR => tags::LITERAL_ATTRIBUTE, |
102 | bindings_shadow_count.clear(); | 191 | // Special-case field init shorthand |
103 | continue; | 192 | NAME_REF if node.value.parent().and_then(ast::RecordField::cast).is_some() => tags::FIELD, |
104 | } | 193 | NAME_REF if node.value.ancestors().any(|it| it.kind() == ATTR) => return None, |
105 | COMMENT => tags::LITERAL_COMMENT, | 194 | NAME_REF => { |
106 | STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => tags::LITERAL_STRING, | 195 | let name_ref = node.value.as_node().cloned().and_then(ast::NameRef::cast).unwrap(); |
107 | ATTR => tags::LITERAL_ATTRIBUTE, | 196 | let name_kind = classify_name_ref(sb, node.with_value(&name_ref)).map(|d| d.kind); |
108 | // Special-case field init shorthand | 197 | match name_kind { |
109 | NAME_REF if node.parent().and_then(ast::RecordField::cast).is_some() => tags::FIELD, | 198 | Some(name_kind) => { |
110 | NAME_REF if node.ancestors().any(|it| it.kind() == ATTR) => continue, | 199 | if let Local(local) = &name_kind { |
111 | NAME_REF => { | 200 | if let Some(name) = local.name(db) { |
112 | let name_ref = node.as_node().cloned().and_then(ast::NameRef::cast).unwrap(); | 201 | let shadow_count = |
113 | let name_kind = classify_name_ref(&mut sb, InFile::new(file_id.into(), &name_ref)) | 202 | bindings_shadow_count.entry(name.clone()).or_default(); |
114 | .map(|d| d.kind); | 203 | binding_hash = |
115 | match name_kind { | 204 | Some(calc_binding_hash(node.file_id, &name, *shadow_count)) |
116 | Some(name_kind) => { | 205 | } |
117 | if let Local(local) = &name_kind { | 206 | }; |
118 | if let Some(name) = local.name(db) { | 207 | |
119 | let shadow_count = | 208 | highlight_name(db, name_kind) |
120 | bindings_shadow_count.entry(name.clone()).or_default(); | ||
121 | binding_hash = | ||
122 | Some(calc_binding_hash(file_id, &name, *shadow_count)) | ||
123 | } | ||
124 | }; | ||
125 | |||
126 | highlight_name(db, name_kind) | ||
127 | } | ||
128 | _ => continue, | ||
129 | } | ||
130 | } | ||
131 | NAME => { | ||
132 | let name = node.as_node().cloned().and_then(ast::Name::cast).unwrap(); | ||
133 | let name_kind = | ||
134 | classify_name(&mut sb, InFile::new(file_id.into(), &name)).map(|d| d.kind); | ||
135 | |||
136 | if let Some(Local(local)) = &name_kind { | ||
137 | if let Some(name) = local.name(db) { | ||
138 | let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); | ||
139 | *shadow_count += 1; | ||
140 | binding_hash = Some(calc_binding_hash(file_id, &name, *shadow_count)) | ||
141 | } | ||
142 | }; | ||
143 | |||
144 | match name_kind { | ||
145 | Some(name_kind) => highlight_name(db, name_kind), | ||
146 | None => name.syntax().parent().map_or(tags::FUNCTION, |x| match x.kind() { | ||
147 | STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_ALIAS_DEF => tags::TYPE, | ||
148 | TYPE_PARAM => tags::TYPE_PARAM, | ||
149 | RECORD_FIELD_DEF => tags::FIELD, | ||
150 | _ => tags::FUNCTION, | ||
151 | }), | ||
152 | } | 209 | } |
210 | _ => return None, | ||
153 | } | 211 | } |
154 | INT_NUMBER | FLOAT_NUMBER => tags::LITERAL_NUMERIC, | 212 | } |
155 | BYTE => tags::LITERAL_BYTE, | 213 | NAME => { |
156 | CHAR => tags::LITERAL_CHAR, | 214 | let name = node.value.as_node().cloned().and_then(ast::Name::cast).unwrap(); |
157 | LIFETIME => tags::TYPE_LIFETIME, | 215 | let name_kind = classify_name(sb, node.with_value(&name)).map(|d| d.kind); |
158 | T![unsafe] => tags::KEYWORD_UNSAFE, | 216 | |
159 | k if is_control_keyword(k) => tags::KEYWORD_CONTROL, | 217 | if let Some(Local(local)) = &name_kind { |
160 | k if k.is_keyword() => tags::KEYWORD, | 218 | if let Some(name) = local.name(db) { |
161 | _ => { | 219 | let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); |
162 | if let Some(macro_call) = node.as_node().cloned().and_then(ast::MacroCall::cast) { | 220 | *shadow_count += 1; |
163 | if let Some(path) = macro_call.path() { | 221 | binding_hash = Some(calc_binding_hash(node.file_id, &name, *shadow_count)) |
164 | if let Some(segment) = path.segment() { | ||
165 | if let Some(name_ref) = segment.name_ref() { | ||
166 | highlighted.insert(name_ref.syntax().clone().into()); | ||
167 | let range_start = name_ref.syntax().text_range().start(); | ||
168 | let mut range_end = name_ref.syntax().text_range().end(); | ||
169 | for sibling in path.syntax().siblings_with_tokens(Direction::Next) { | ||
170 | match sibling.kind() { | ||
171 | T![!] | IDENT => range_end = sibling.text_range().end(), | ||
172 | _ => (), | ||
173 | } | ||
174 | } | ||
175 | res.push(HighlightedRange { | ||
176 | range: TextRange::from_to(range_start, range_end), | ||
177 | tag: tags::MACRO, | ||
178 | binding_hash: None, | ||
179 | }) | ||
180 | } | ||
181 | } | ||
182 | } | ||
183 | } | 222 | } |
184 | continue; | 223 | }; |
224 | |||
225 | match name_kind { | ||
226 | Some(name_kind) => highlight_name(db, name_kind), | ||
227 | None => name.syntax().parent().map_or(tags::FUNCTION, |x| match x.kind() { | ||
228 | STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_ALIAS_DEF => tags::TYPE, | ||
229 | TYPE_PARAM => tags::TYPE_PARAM, | ||
230 | RECORD_FIELD_DEF => tags::FIELD, | ||
231 | _ => tags::FUNCTION, | ||
232 | }), | ||
185 | } | 233 | } |
186 | }; | 234 | } |
187 | res.push(HighlightedRange { range: node.text_range(), tag, binding_hash }) | 235 | INT_NUMBER | FLOAT_NUMBER => tags::LITERAL_NUMERIC, |
236 | BYTE => tags::LITERAL_BYTE, | ||
237 | CHAR => tags::LITERAL_CHAR, | ||
238 | LIFETIME => tags::TYPE_LIFETIME, | ||
239 | T![unsafe] => tags::KEYWORD_UNSAFE, | ||
240 | k if is_control_keyword(k) => tags::KEYWORD_CONTROL, | ||
241 | k if k.is_keyword() => tags::KEYWORD, | ||
242 | |||
243 | _ => return None, | ||
244 | }; | ||
245 | |||
246 | return Some((tag, binding_hash)); | ||
247 | |||
248 | fn calc_binding_hash(file_id: HirFileId, name: &Name, shadow_count: u32) -> u64 { | ||
249 | fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 { | ||
250 | use std::{collections::hash_map::DefaultHasher, hash::Hasher}; | ||
251 | |||
252 | let mut hasher = DefaultHasher::new(); | ||
253 | x.hash(&mut hasher); | ||
254 | hasher.finish() | ||
255 | } | ||
256 | |||
257 | hash((file_id, name, shadow_count)) | ||
188 | } | 258 | } |
189 | res | ||
190 | } | 259 | } |
191 | 260 | ||
192 | pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String { | 261 | pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String { |
@@ -331,6 +400,16 @@ fn foo<T>() -> T { | |||
331 | foo::<i32>(); | 400 | foo::<i32>(); |
332 | } | 401 | } |
333 | 402 | ||
403 | macro_rules! def_fn { | ||
404 | ($($tt:tt)*) => {$($tt)*} | ||
405 | } | ||
406 | |||
407 | def_fn!{ | ||
408 | fn bar() -> u32 { | ||
409 | 100 | ||
410 | } | ||
411 | } | ||
412 | |||
334 | // comment | 413 | // comment |
335 | fn main() { | 414 | fn main() { |
336 | println!("Hello, {}!", 92); | 415 | println!("Hello, {}!", 92); |
diff --git a/crates/ra_lsp_server/Cargo.toml b/crates/ra_lsp_server/Cargo.toml index 579158780..fdf81ed87 100644 --- a/crates/ra_lsp_server/Cargo.toml +++ b/crates/ra_lsp_server/Cargo.toml | |||
@@ -14,7 +14,7 @@ serde_json = "1.0.34" | |||
14 | serde = { version = "1.0.83", features = ["derive"] } | 14 | serde = { version = "1.0.83", features = ["derive"] } |
15 | crossbeam-channel = "0.4" | 15 | crossbeam-channel = "0.4" |
16 | log = "0.4.3" | 16 | log = "0.4.3" |
17 | lsp-types = { version = "0.69.0", features = ["proposed"] } | 17 | lsp-types = { version = "0.70.0", features = ["proposed"] } |
18 | rustc-hash = "1.0" | 18 | rustc-hash = "1.0" |
19 | parking_lot = "0.10.0" | 19 | parking_lot = "0.10.0" |
20 | jod-thread = "0.1.0" | 20 | jod-thread = "0.1.0" |
@@ -26,10 +26,13 @@ lsp-server = "0.3.0" | |||
26 | ra_project_model = { path = "../ra_project_model" } | 26 | ra_project_model = { path = "../ra_project_model" } |
27 | ra_prof = { path = "../ra_prof" } | 27 | ra_prof = { path = "../ra_prof" } |
28 | ra_vfs_glob = { path = "../ra_vfs_glob" } | 28 | ra_vfs_glob = { path = "../ra_vfs_glob" } |
29 | env_logger = { version = "0.7.1", default-features = false, features = ["humantime"] } | 29 | env_logger = { version = "0.7.1", default-features = false } |
30 | ra_cargo_watch = { path = "../ra_cargo_watch" } | 30 | ra_cargo_watch = { path = "../ra_cargo_watch" } |
31 | either = "1.5" | 31 | either = "1.5" |
32 | 32 | ||
33 | [target.'cfg(windows)'.dependencies] | ||
34 | winapi = "0.3" | ||
35 | |||
33 | [dev-dependencies] | 36 | [dev-dependencies] |
34 | tempfile = "3" | 37 | tempfile = "3" |
35 | test_utils = { path = "../test_utils" } | 38 | test_utils = { path = "../test_utils" } |
diff --git a/crates/ra_lsp_server/src/main_loop.rs b/crates/ra_lsp_server/src/main_loop.rs index 7822be2e2..d850ded37 100644 --- a/crates/ra_lsp_server/src/main_loop.rs +++ b/crates/ra_lsp_server/src/main_loop.rs | |||
@@ -5,11 +5,18 @@ mod handlers; | |||
5 | mod subscriptions; | 5 | mod subscriptions; |
6 | pub(crate) mod pending_requests; | 6 | pub(crate) mod pending_requests; |
7 | 7 | ||
8 | use std::{error::Error, fmt, panic, path::PathBuf, sync::Arc, time::Instant}; | 8 | use std::{ |
9 | env, | ||
10 | error::Error, | ||
11 | fmt, panic, | ||
12 | path::PathBuf, | ||
13 | sync::Arc, | ||
14 | time::{Duration, Instant}, | ||
15 | }; | ||
9 | 16 | ||
10 | use crossbeam_channel::{select, unbounded, RecvError, Sender}; | 17 | use crossbeam_channel::{select, unbounded, RecvError, Sender}; |
11 | use lsp_server::{Connection, ErrorCode, Message, Notification, Request, RequestId, Response}; | 18 | use lsp_server::{Connection, ErrorCode, Message, Notification, Request, RequestId, Response}; |
12 | use lsp_types::{ClientCapabilities, NumberOrString, Url}; | 19 | use lsp_types::{ClientCapabilities, NumberOrString}; |
13 | use ra_cargo_watch::{CheckOptions, CheckTask}; | 20 | use ra_cargo_watch::{CheckOptions, CheckTask}; |
14 | use ra_ide::{Canceled, FeatureFlags, FileId, LibraryData, SourceRootId}; | 21 | use ra_ide::{Canceled, FeatureFlags, FileId, LibraryData, SourceRootId}; |
15 | use ra_prof::profile; | 22 | use ra_prof::profile; |
@@ -29,9 +36,6 @@ use crate::{ | |||
29 | Result, ServerConfig, | 36 | Result, ServerConfig, |
30 | }; | 37 | }; |
31 | 38 | ||
32 | const THREADPOOL_SIZE: usize = 8; | ||
33 | const MAX_IN_FLIGHT_LIBS: usize = THREADPOOL_SIZE - 3; | ||
34 | |||
35 | #[derive(Debug)] | 39 | #[derive(Debug)] |
36 | pub struct LspError { | 40 | pub struct LspError { |
37 | pub code: i32, | 41 | pub code: i32, |
@@ -60,6 +64,25 @@ pub fn main_loop( | |||
60 | ) -> Result<()> { | 64 | ) -> Result<()> { |
61 | log::info!("server_config: {:#?}", config); | 65 | log::info!("server_config: {:#?}", config); |
62 | 66 | ||
67 | // Windows scheduler implements priority boosts: if thread waits for an | ||
68 | // event (like a condvar), and event fires, priority of the thread is | ||
69 | // temporary bumped. This optimization backfires in our case: each time the | ||
70 | // `main_loop` schedules a task to run on a threadpool, the worker threads | ||
71 | // gets a higher priority, and (on a machine with fewer cores) displaces the | ||
72 | // main loop! We work-around this by marking the main loop as a | ||
73 | // higher-priority thread. | ||
74 | // | ||
75 | // https://docs.microsoft.com/en-us/windows/win32/procthread/scheduling-priorities | ||
76 | // https://docs.microsoft.com/en-us/windows/win32/procthread/priority-boosts | ||
77 | // https://github.com/rust-analyzer/rust-analyzer/issues/2835 | ||
78 | #[cfg(windows)] | ||
79 | unsafe { | ||
80 | use winapi::um::processthreadsapi::*; | ||
81 | let thread = GetCurrentThread(); | ||
82 | let thread_priority_above_normal = 1; | ||
83 | SetThreadPriority(thread, thread_priority_above_normal); | ||
84 | } | ||
85 | |||
63 | let mut loop_state = LoopState::default(); | 86 | let mut loop_state = LoopState::default(); |
64 | let mut world_state = { | 87 | let mut world_state = { |
65 | let feature_flags = { | 88 | let feature_flags = { |
@@ -168,7 +191,7 @@ pub fn main_loop( | |||
168 | ) | 191 | ) |
169 | }; | 192 | }; |
170 | 193 | ||
171 | let pool = ThreadPool::new(THREADPOOL_SIZE); | 194 | let pool = ThreadPool::default(); |
172 | let (task_sender, task_receiver) = unbounded::<Task>(); | 195 | let (task_sender, task_receiver) = unbounded::<Task>(); |
173 | let (libdata_sender, libdata_receiver) = unbounded::<LibraryData>(); | 196 | let (libdata_sender, libdata_receiver) = unbounded::<LibraryData>(); |
174 | 197 | ||
@@ -210,7 +233,7 @@ pub fn main_loop( | |||
210 | )?; | 233 | )?; |
211 | } | 234 | } |
212 | } | 235 | } |
213 | 236 | world_state.analysis_host.request_cancellation(); | |
214 | log::info!("waiting for tasks to finish..."); | 237 | log::info!("waiting for tasks to finish..."); |
215 | task_receiver.into_iter().for_each(|task| { | 238 | task_receiver.into_iter().for_each(|task| { |
216 | on_task(task, &connection.sender, &mut loop_state.pending_requests, &mut world_state) | 239 | on_task(task, &connection.sender, &mut loop_state.pending_requests, &mut world_state) |
@@ -336,7 +359,7 @@ fn loop_turn( | |||
336 | world_state.maybe_collect_garbage(); | 359 | world_state.maybe_collect_garbage(); |
337 | loop_state.in_flight_libraries -= 1; | 360 | loop_state.in_flight_libraries -= 1; |
338 | } | 361 | } |
339 | Event::CheckWatcher(task) => on_check_task(task, world_state, task_sender)?, | 362 | Event::CheckWatcher(task) => on_check_task(pool, task, world_state, task_sender)?, |
340 | Event::Msg(msg) => match msg { | 363 | Event::Msg(msg) => match msg { |
341 | Message::Request(req) => on_request( | 364 | Message::Request(req) => on_request( |
342 | world_state, | 365 | world_state, |
@@ -371,7 +394,8 @@ fn loop_turn( | |||
371 | loop_state.pending_libraries.extend(changes); | 394 | loop_state.pending_libraries.extend(changes); |
372 | } | 395 | } |
373 | 396 | ||
374 | while loop_state.in_flight_libraries < MAX_IN_FLIGHT_LIBS | 397 | let max_in_flight_libs = pool.max_count().saturating_sub(2).max(1); |
398 | while loop_state.in_flight_libraries < max_in_flight_libs | ||
375 | && !loop_state.pending_libraries.is_empty() | 399 | && !loop_state.pending_libraries.is_empty() |
376 | { | 400 | { |
377 | let (root, files) = loop_state.pending_libraries.pop().unwrap(); | 401 | let (root, files) = loop_state.pending_libraries.pop().unwrap(); |
@@ -408,6 +432,19 @@ fn loop_turn( | |||
408 | loop_state.subscriptions.subscriptions(), | 432 | loop_state.subscriptions.subscriptions(), |
409 | ) | 433 | ) |
410 | } | 434 | } |
435 | |||
436 | let loop_duration = loop_start.elapsed(); | ||
437 | if loop_duration > Duration::from_millis(100) { | ||
438 | log::error!("overly long loop turn: {:?}", loop_duration); | ||
439 | if env::var("RA_PROFILE").is_ok() { | ||
440 | show_message( | ||
441 | req::MessageType::Error, | ||
442 | format!("overly long loop turn: {:?}", loop_duration), | ||
443 | &connection.sender, | ||
444 | ); | ||
445 | } | ||
446 | } | ||
447 | |||
411 | Ok(()) | 448 | Ok(()) |
412 | } | 449 | } |
413 | 450 | ||
@@ -435,7 +472,7 @@ fn on_request( | |||
435 | world: &mut WorldState, | 472 | world: &mut WorldState, |
436 | pending_requests: &mut PendingRequests, | 473 | pending_requests: &mut PendingRequests, |
437 | pool: &ThreadPool, | 474 | pool: &ThreadPool, |
438 | sender: &Sender<Task>, | 475 | task_sender: &Sender<Task>, |
439 | msg_sender: &Sender<Message>, | 476 | msg_sender: &Sender<Message>, |
440 | request_received: Instant, | 477 | request_received: Instant, |
441 | req: Request, | 478 | req: Request, |
@@ -444,7 +481,7 @@ fn on_request( | |||
444 | req: Some(req), | 481 | req: Some(req), |
445 | pool, | 482 | pool, |
446 | world, | 483 | world, |
447 | sender, | 484 | task_sender, |
448 | msg_sender, | 485 | msg_sender, |
449 | pending_requests, | 486 | pending_requests, |
450 | request_received, | 487 | request_received, |
@@ -585,31 +622,23 @@ fn on_notification( | |||
585 | } | 622 | } |
586 | 623 | ||
587 | fn on_check_task( | 624 | fn on_check_task( |
625 | pool: &ThreadPool, | ||
588 | task: CheckTask, | 626 | task: CheckTask, |
589 | world_state: &WorldState, | 627 | world_state: &mut WorldState, |
590 | task_sender: &Sender<Task>, | 628 | task_sender: &Sender<Task>, |
591 | ) -> Result<()> { | 629 | ) -> Result<()> { |
592 | match task { | 630 | let urls = match task { |
593 | CheckTask::ClearDiagnostics => { | 631 | CheckTask::ClearDiagnostics => { |
594 | let cleared_files = world_state.check_watcher.state.write().clear(); | 632 | let state = Arc::get_mut(&mut world_state.check_watcher.state) |
595 | 633 | .expect("couldn't get check watcher state as mutable"); | |
596 | // Send updated diagnostics for each cleared file | 634 | state.clear() |
597 | for url in cleared_files { | ||
598 | publish_diagnostics_for_url(&url, world_state, task_sender)?; | ||
599 | } | ||
600 | } | 635 | } |
601 | 636 | ||
602 | CheckTask::AddDiagnostic(url, diagnostic) => { | 637 | CheckTask::AddDiagnostic(url, diagnostic) => { |
603 | world_state | 638 | let state = Arc::get_mut(&mut world_state.check_watcher.state) |
604 | .check_watcher | 639 | .expect("couldn't get check watcher state as mutable"); |
605 | .state | 640 | state.add_diagnostic_with_fixes(url.clone(), diagnostic); |
606 | .write() | 641 | vec![url] |
607 | .add_diagnostic_with_fixes(url.clone(), diagnostic); | ||
608 | |||
609 | // We manually send a diagnostic update when the watcher asks | ||
610 | // us to, to avoid the issue of having to change the file to | ||
611 | // receive updated diagnostics. | ||
612 | publish_diagnostics_for_url(&url, world_state, task_sender)?; | ||
613 | } | 642 | } |
614 | 643 | ||
615 | CheckTask::Status(progress) => { | 644 | CheckTask::Status(progress) => { |
@@ -619,22 +648,30 @@ fn on_check_task( | |||
619 | }; | 648 | }; |
620 | let not = notification_new::<req::Progress>(params); | 649 | let not = notification_new::<req::Progress>(params); |
621 | task_sender.send(Task::Notify(not)).unwrap(); | 650 | task_sender.send(Task::Notify(not)).unwrap(); |
651 | Vec::new() | ||
622 | } | 652 | } |
623 | } | 653 | }; |
624 | Ok(()) | 654 | |
625 | } | 655 | let subscriptions = urls |
656 | .into_iter() | ||
657 | .map(|url| { | ||
658 | let path = url.to_file_path().map_err(|()| format!("invalid uri: {}", url))?; | ||
659 | Ok(world_state.vfs.read().path2file(&path).map(|it| FileId(it.0))) | ||
660 | }) | ||
661 | .filter_map(|res| res.transpose()) | ||
662 | .collect::<Result<Vec<_>>>()?; | ||
663 | |||
664 | // We manually send a diagnostic update when the watcher asks | ||
665 | // us to, to avoid the issue of having to change the file to | ||
666 | // receive updated diagnostics. | ||
667 | update_file_notifications_on_threadpool( | ||
668 | pool, | ||
669 | world_state.snapshot(), | ||
670 | false, | ||
671 | task_sender.clone(), | ||
672 | subscriptions, | ||
673 | ); | ||
626 | 674 | ||
627 | fn publish_diagnostics_for_url( | ||
628 | url: &Url, | ||
629 | world_state: &WorldState, | ||
630 | task_sender: &Sender<Task>, | ||
631 | ) -> Result<()> { | ||
632 | let path = url.to_file_path().map_err(|()| format!("invalid uri: {}", url))?; | ||
633 | if let Some(file_id) = world_state.vfs.read().path2file(&path) { | ||
634 | let params = handlers::publish_diagnostics(&world_state.snapshot(), FileId(file_id.0))?; | ||
635 | let not = notification_new::<req::PublishDiagnostics>(params); | ||
636 | task_sender.send(Task::Notify(not)).unwrap(); | ||
637 | } | ||
638 | Ok(()) | 675 | Ok(()) |
639 | } | 676 | } |
640 | 677 | ||
@@ -644,7 +681,7 @@ struct PoolDispatcher<'a> { | |||
644 | world: &'a mut WorldState, | 681 | world: &'a mut WorldState, |
645 | pending_requests: &'a mut PendingRequests, | 682 | pending_requests: &'a mut PendingRequests, |
646 | msg_sender: &'a Sender<Message>, | 683 | msg_sender: &'a Sender<Message>, |
647 | sender: &'a Sender<Task>, | 684 | task_sender: &'a Sender<Task>, |
648 | request_received: Instant, | 685 | request_received: Instant, |
649 | } | 686 | } |
650 | 687 | ||
@@ -691,7 +728,7 @@ impl<'a> PoolDispatcher<'a> { | |||
691 | 728 | ||
692 | self.pool.execute({ | 729 | self.pool.execute({ |
693 | let world = self.world.snapshot(); | 730 | let world = self.world.snapshot(); |
694 | let sender = self.sender.clone(); | 731 | let sender = self.task_sender.clone(); |
695 | move || { | 732 | move || { |
696 | let result = f(world, params); | 733 | let result = f(world, params); |
697 | let task = result_to_task::<R>(id, result); | 734 | let task = result_to_task::<R>(id, result); |
@@ -769,7 +806,7 @@ fn update_file_notifications_on_threadpool( | |||
769 | pool: &ThreadPool, | 806 | pool: &ThreadPool, |
770 | world: WorldSnapshot, | 807 | world: WorldSnapshot, |
771 | publish_decorations: bool, | 808 | publish_decorations: bool, |
772 | sender: Sender<Task>, | 809 | task_sender: Sender<Task>, |
773 | subscriptions: Vec<FileId>, | 810 | subscriptions: Vec<FileId>, |
774 | ) { | 811 | ) { |
775 | log::trace!("updating notifications for {:?}", subscriptions); | 812 | log::trace!("updating notifications for {:?}", subscriptions); |
@@ -785,7 +822,7 @@ fn update_file_notifications_on_threadpool( | |||
785 | } | 822 | } |
786 | Ok(params) => { | 823 | Ok(params) => { |
787 | let not = notification_new::<req::PublishDiagnostics>(params); | 824 | let not = notification_new::<req::PublishDiagnostics>(params); |
788 | sender.send(Task::Notify(not)).unwrap(); | 825 | task_sender.send(Task::Notify(not)).unwrap(); |
789 | } | 826 | } |
790 | } | 827 | } |
791 | } | 828 | } |
@@ -798,7 +835,7 @@ fn update_file_notifications_on_threadpool( | |||
798 | } | 835 | } |
799 | Ok(params) => { | 836 | Ok(params) => { |
800 | let not = notification_new::<req::PublishDecorations>(params); | 837 | let not = notification_new::<req::PublishDecorations>(params); |
801 | sender.send(Task::Notify(not)).unwrap(); | 838 | task_sender.send(Task::Notify(not)).unwrap(); |
802 | } | 839 | } |
803 | } | 840 | } |
804 | } | 841 | } |
diff --git a/crates/ra_lsp_server/src/main_loop/handlers.rs b/crates/ra_lsp_server/src/main_loop/handlers.rs index 8e43f0575..666f2ee29 100644 --- a/crates/ra_lsp_server/src/main_loop/handlers.rs +++ b/crates/ra_lsp_server/src/main_loop/handlers.rs | |||
@@ -674,8 +674,7 @@ pub fn handle_code_action( | |||
674 | res.push(action.into()); | 674 | res.push(action.into()); |
675 | } | 675 | } |
676 | 676 | ||
677 | for fix in world.check_watcher.read().fixes_for(¶ms.text_document.uri).into_iter().flatten() | 677 | for fix in world.check_watcher.fixes_for(¶ms.text_document.uri).into_iter().flatten() { |
678 | { | ||
679 | let fix_range = fix.location.range.conv_with(&line_index); | 678 | let fix_range = fix.location.range.conv_with(&line_index); |
680 | if fix_range.intersection(&range).is_none() { | 679 | if fix_range.intersection(&range).is_none() { |
681 | continue; | 680 | continue; |
@@ -895,7 +894,7 @@ pub fn publish_diagnostics( | |||
895 | tags: None, | 894 | tags: None, |
896 | }) | 895 | }) |
897 | .collect(); | 896 | .collect(); |
898 | if let Some(check_diags) = world.check_watcher.read().diagnostics_for(&uri) { | 897 | if let Some(check_diags) = world.check_watcher.diagnostics_for(&uri) { |
899 | diagnostics.extend(check_diags.iter().cloned()); | 898 | diagnostics.extend(check_diags.iter().cloned()); |
900 | } | 899 | } |
901 | Ok(req::PublishDiagnosticsParams { uri, diagnostics, version: None }) | 900 | Ok(req::PublishDiagnosticsParams { uri, diagnostics, version: None }) |
diff --git a/crates/ra_lsp_server/src/world.rs b/crates/ra_lsp_server/src/world.rs index e7a0acfc7..3059ef9ec 100644 --- a/crates/ra_lsp_server/src/world.rs +++ b/crates/ra_lsp_server/src/world.rs | |||
@@ -63,7 +63,7 @@ pub struct WorldSnapshot { | |||
63 | pub workspaces: Arc<Vec<ProjectWorkspace>>, | 63 | pub workspaces: Arc<Vec<ProjectWorkspace>>, |
64 | pub analysis: Analysis, | 64 | pub analysis: Analysis, |
65 | pub latest_requests: Arc<RwLock<LatestRequests>>, | 65 | pub latest_requests: Arc<RwLock<LatestRequests>>, |
66 | pub check_watcher: Arc<RwLock<CheckState>>, | 66 | pub check_watcher: CheckState, |
67 | vfs: Arc<RwLock<Vfs>>, | 67 | vfs: Arc<RwLock<Vfs>>, |
68 | } | 68 | } |
69 | 69 | ||
@@ -220,7 +220,7 @@ impl WorldState { | |||
220 | analysis: self.analysis_host.analysis(), | 220 | analysis: self.analysis_host.analysis(), |
221 | vfs: Arc::clone(&self.vfs), | 221 | vfs: Arc::clone(&self.vfs), |
222 | latest_requests: Arc::clone(&self.latest_requests), | 222 | latest_requests: Arc::clone(&self.latest_requests), |
223 | check_watcher: self.check_watcher.state.clone(), | 223 | check_watcher: (*self.check_watcher.state).clone(), |
224 | } | 224 | } |
225 | } | 225 | } |
226 | 226 | ||
diff --git a/crates/ra_prof/src/lib.rs b/crates/ra_prof/src/lib.rs index e2c32c130..4a49e9f95 100644 --- a/crates/ra_prof/src/lib.rs +++ b/crates/ra_prof/src/lib.rs | |||
@@ -105,6 +105,21 @@ pub fn profile(desc: &str) -> Profiler { | |||
105 | }) | 105 | }) |
106 | } | 106 | } |
107 | 107 | ||
108 | pub fn print_time(desc: &str) -> impl Drop + '_ { | ||
109 | struct Guard<'a> { | ||
110 | desc: &'a str, | ||
111 | start: Instant, | ||
112 | } | ||
113 | |||
114 | impl Drop for Guard<'_> { | ||
115 | fn drop(&mut self) { | ||
116 | eprintln!("{}: {:?}", self.desc, self.start.elapsed()) | ||
117 | } | ||
118 | } | ||
119 | |||
120 | Guard { desc, start: Instant::now() } | ||
121 | } | ||
122 | |||
108 | pub struct Profiler { | 123 | pub struct Profiler { |
109 | desc: Option<String>, | 124 | desc: Option<String>, |
110 | } | 125 | } |
diff --git a/crates/test_utils/src/lib.rs b/crates/test_utils/src/lib.rs index 659f77b71..5666445aa 100644 --- a/crates/test_utils/src/lib.rs +++ b/crates/test_utils/src/lib.rs | |||
@@ -21,6 +21,12 @@ pub use difference::Changeset as __Changeset; | |||
21 | 21 | ||
22 | pub const CURSOR_MARKER: &str = "<|>"; | 22 | pub const CURSOR_MARKER: &str = "<|>"; |
23 | 23 | ||
24 | /// Asserts that two strings are equal, otherwise displays a rich diff between them. | ||
25 | /// | ||
26 | /// The diff shows changes from the "original" left string to the "actual" right string. | ||
27 | /// | ||
28 | /// All arguments starting from and including the 3rd one are passed to | ||
29 | /// `eprintln!()` macro in case of text inequality. | ||
24 | #[macro_export] | 30 | #[macro_export] |
25 | macro_rules! assert_eq_text { | 31 | macro_rules! assert_eq_text { |
26 | ($left:expr, $right:expr) => { | 32 | ($left:expr, $right:expr) => { |
@@ -42,6 +48,7 @@ macro_rules! assert_eq_text { | |||
42 | }}; | 48 | }}; |
43 | } | 49 | } |
44 | 50 | ||
51 | /// Infallible version of `try_extract_offset()`. | ||
45 | pub fn extract_offset(text: &str) -> (TextUnit, String) { | 52 | pub fn extract_offset(text: &str) -> (TextUnit, String) { |
46 | match try_extract_offset(text) { | 53 | match try_extract_offset(text) { |
47 | None => panic!("text should contain cursor marker"), | 54 | None => panic!("text should contain cursor marker"), |
@@ -49,6 +56,8 @@ pub fn extract_offset(text: &str) -> (TextUnit, String) { | |||
49 | } | 56 | } |
50 | } | 57 | } |
51 | 58 | ||
59 | /// Returns the offset of the first occurence of `<|>` marker and the copy of `text` | ||
60 | /// without the marker. | ||
52 | fn try_extract_offset(text: &str) -> Option<(TextUnit, String)> { | 61 | fn try_extract_offset(text: &str) -> Option<(TextUnit, String)> { |
53 | let cursor_pos = text.find(CURSOR_MARKER)?; | 62 | let cursor_pos = text.find(CURSOR_MARKER)?; |
54 | let mut new_text = String::with_capacity(text.len() - CURSOR_MARKER.len()); | 63 | let mut new_text = String::with_capacity(text.len() - CURSOR_MARKER.len()); |
@@ -58,6 +67,7 @@ fn try_extract_offset(text: &str) -> Option<(TextUnit, String)> { | |||
58 | Some((cursor_pos, new_text)) | 67 | Some((cursor_pos, new_text)) |
59 | } | 68 | } |
60 | 69 | ||
70 | /// Infallible version of `try_extract_range()`. | ||
61 | pub fn extract_range(text: &str) -> (TextRange, String) { | 71 | pub fn extract_range(text: &str) -> (TextRange, String) { |
62 | match try_extract_range(text) { | 72 | match try_extract_range(text) { |
63 | None => panic!("text should contain cursor marker"), | 73 | None => panic!("text should contain cursor marker"), |
@@ -65,6 +75,8 @@ pub fn extract_range(text: &str) -> (TextRange, String) { | |||
65 | } | 75 | } |
66 | } | 76 | } |
67 | 77 | ||
78 | /// Returns `TextRange` between the first two markers `<|>...<|>` and the copy | ||
79 | /// of `text` without both of these markers. | ||
68 | fn try_extract_range(text: &str) -> Option<(TextRange, String)> { | 80 | fn try_extract_range(text: &str) -> Option<(TextRange, String)> { |
69 | let (start, text) = try_extract_offset(text)?; | 81 | let (start, text) = try_extract_offset(text)?; |
70 | let (end, text) = try_extract_offset(&text)?; | 82 | let (end, text) = try_extract_offset(&text)?; |
@@ -85,6 +97,11 @@ impl From<RangeOrOffset> for TextRange { | |||
85 | } | 97 | } |
86 | } | 98 | } |
87 | 99 | ||
100 | /// Extracts `TextRange` or `TextUnit` depending on the amount of `<|>` markers | ||
101 | /// found in `text`. | ||
102 | /// | ||
103 | /// # Panics | ||
104 | /// Panics if no `<|>` marker is present in the `text`. | ||
88 | pub fn extract_range_or_offset(text: &str) -> (RangeOrOffset, String) { | 105 | pub fn extract_range_or_offset(text: &str) -> (RangeOrOffset, String) { |
89 | if let Some((range, text)) = try_extract_range(text) { | 106 | if let Some((range, text)) = try_extract_range(text) { |
90 | return (RangeOrOffset::Range(range), text); | 107 | return (RangeOrOffset::Range(range), text); |
@@ -93,7 +110,7 @@ pub fn extract_range_or_offset(text: &str) -> (RangeOrOffset, String) { | |||
93 | (RangeOrOffset::Offset(offset), text) | 110 | (RangeOrOffset::Offset(offset), text) |
94 | } | 111 | } |
95 | 112 | ||
96 | /// Extracts ranges, marked with `<tag> </tag>` paris from the `text` | 113 | /// Extracts ranges, marked with `<tag> </tag>` pairs from the `text` |
97 | pub fn extract_ranges(mut text: &str, tag: &str) -> (Vec<TextRange>, String) { | 114 | pub fn extract_ranges(mut text: &str, tag: &str) -> (Vec<TextRange>, String) { |
98 | let open = format!("<{}>", tag); | 115 | let open = format!("<{}>", tag); |
99 | let close = format!("</{}>", tag); | 116 | let close = format!("</{}>", tag); |
@@ -127,9 +144,9 @@ pub fn extract_ranges(mut text: &str, tag: &str) -> (Vec<TextRange>, String) { | |||
127 | (ranges, res) | 144 | (ranges, res) |
128 | } | 145 | } |
129 | 146 | ||
147 | /// Inserts `<|>` marker into the `text` at `offset`. | ||
130 | pub fn add_cursor(text: &str, offset: TextUnit) -> String { | 148 | pub fn add_cursor(text: &str, offset: TextUnit) -> String { |
131 | let offset: u32 = offset.into(); | 149 | let offset: usize = offset.to_usize(); |
132 | let offset: usize = offset as usize; | ||
133 | let mut res = String::new(); | 150 | let mut res = String::new(); |
134 | res.push_str(&text[..offset]); | 151 | res.push_str(&text[..offset]); |
135 | res.push_str("<|>"); | 152 | res.push_str("<|>"); |
@@ -152,19 +169,6 @@ pub struct FixtureEntry { | |||
152 | /// // - other meta | 169 | /// // - other meta |
153 | /// ``` | 170 | /// ``` |
154 | pub fn parse_fixture(fixture: &str) -> Vec<FixtureEntry> { | 171 | pub fn parse_fixture(fixture: &str) -> Vec<FixtureEntry> { |
155 | let mut res = Vec::new(); | ||
156 | let mut buf = String::new(); | ||
157 | let mut meta: Option<&str> = None; | ||
158 | |||
159 | macro_rules! flush { | ||
160 | () => { | ||
161 | if let Some(meta) = meta { | ||
162 | res.push(FixtureEntry { meta: meta.to_string(), text: buf.clone() }); | ||
163 | buf.clear(); | ||
164 | } | ||
165 | }; | ||
166 | }; | ||
167 | |||
168 | let margin = fixture | 172 | let margin = fixture |
169 | .lines() | 173 | .lines() |
170 | .filter(|it| it.trim_start().starts_with("//-")) | 174 | .filter(|it| it.trim_start().starts_with("//-")) |
@@ -172,7 +176,7 @@ pub fn parse_fixture(fixture: &str) -> Vec<FixtureEntry> { | |||
172 | .next() | 176 | .next() |
173 | .expect("empty fixture"); | 177 | .expect("empty fixture"); |
174 | 178 | ||
175 | let lines = fixture | 179 | let mut lines = fixture |
176 | .split('\n') // don't use `.lines` to not drop `\r\n` | 180 | .split('\n') // don't use `.lines` to not drop `\r\n` |
177 | .filter_map(|line| { | 181 | .filter_map(|line| { |
178 | if line.len() >= margin { | 182 | if line.len() >= margin { |
@@ -184,17 +188,29 @@ pub fn parse_fixture(fixture: &str) -> Vec<FixtureEntry> { | |||
184 | } | 188 | } |
185 | }); | 189 | }); |
186 | 190 | ||
187 | for line in lines { | 191 | let mut res = Vec::new(); |
188 | if line.starts_with("//-") { | 192 | let mut meta = None; |
189 | flush!(); | 193 | loop { |
190 | buf.clear(); | 194 | let mut next_meta = None; |
191 | meta = Some(line["//-".len()..].trim()); | 195 | let mut text = String::new(); |
192 | continue; | 196 | for line in lines.by_ref() { |
197 | if line.starts_with("//-") { | ||
198 | next_meta = Some(line["//-".len()..].trim().to_string()); | ||
199 | break; | ||
200 | } | ||
201 | text.push_str(line); | ||
202 | text.push('\n'); | ||
203 | } | ||
204 | |||
205 | if let Some(meta) = meta { | ||
206 | res.push(FixtureEntry { meta, text }); | ||
207 | } | ||
208 | meta = next_meta; | ||
209 | if meta.is_none() { | ||
210 | break; | ||
193 | } | 211 | } |
194 | buf.push_str(line); | ||
195 | buf.push('\n'); | ||
196 | } | 212 | } |
197 | flush!(); | 213 | |
198 | res | 214 | res |
199 | } | 215 | } |
200 | 216 | ||
@@ -236,11 +252,10 @@ fn lines_match_works() { | |||
236 | assert!(!lines_match("b", "cb")); | 252 | assert!(!lines_match("b", "cb")); |
237 | } | 253 | } |
238 | 254 | ||
239 | // Compares JSON object for approximate equality. | 255 | /// Compares JSON object for approximate equality. |
240 | // You can use `[..]` wildcard in strings (useful for OS dependent things such | 256 | /// You can use `[..]` wildcard in strings (useful for OS dependent things such |
241 | // as paths). You can use a `"{...}"` string literal as a wildcard for | 257 | /// as paths). You can use a `"{...}"` string literal as a wildcard for |
242 | // arbitrary nested JSON (useful for parts of object emitted by other programs | 258 | /// arbitrary nested JSON. Arrays are sorted before comparison. |
243 | // (e.g. rustc) rather than Cargo itself). Arrays are sorted before comparison. | ||
244 | pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a Value, &'a Value)> { | 259 | pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a Value, &'a Value)> { |
245 | use serde_json::Value::*; | 260 | use serde_json::Value::*; |
246 | match (expected, actual) { | 261 | match (expected, actual) { |
@@ -286,6 +301,14 @@ pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a | |||
286 | } | 301 | } |
287 | } | 302 | } |
288 | 303 | ||
304 | /// Calls callback `f` with input code and file paths of all `.rs` files from `test_data_dir` | ||
305 | /// subdirectories defined by `paths`. | ||
306 | /// | ||
307 | /// If the content of the matching `.txt` file differs from the output of `f()` | ||
308 | /// the test will fail. | ||
309 | /// | ||
310 | /// If there is no matching `.txt` file it will be created and filled with the | ||
311 | /// output of `f()`, but the test will fail. | ||
289 | pub fn dir_tests<F>(test_data_dir: &Path, paths: &[&str], f: F) | 312 | pub fn dir_tests<F>(test_data_dir: &Path, paths: &[&str], f: F) |
290 | where | 313 | where |
291 | F: Fn(&str, &Path) -> String, | 314 | F: Fn(&str, &Path) -> String, |
@@ -307,6 +330,7 @@ where | |||
307 | } | 330 | } |
308 | } | 331 | } |
309 | 332 | ||
333 | /// Collects all `.rs` files from `test_data_dir` subdirectories defined by `paths`. | ||
310 | pub fn collect_tests(test_data_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, String)> { | 334 | pub fn collect_tests(test_data_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, String)> { |
311 | paths | 335 | paths |
312 | .iter() | 336 | .iter() |
@@ -321,6 +345,7 @@ pub fn collect_tests(test_data_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, Stri | |||
321 | .collect() | 345 | .collect() |
322 | } | 346 | } |
323 | 347 | ||
348 | /// Collects paths to all `.rs` files from `dir` in a sorted `Vec<PathBuf>`. | ||
324 | fn test_from_dir(dir: &Path) -> Vec<PathBuf> { | 349 | fn test_from_dir(dir: &Path) -> Vec<PathBuf> { |
325 | let mut acc = Vec::new(); | 350 | let mut acc = Vec::new(); |
326 | for file in fs::read_dir(&dir).unwrap() { | 351 | for file in fs::read_dir(&dir).unwrap() { |
@@ -334,6 +359,7 @@ fn test_from_dir(dir: &Path) -> Vec<PathBuf> { | |||
334 | acc | 359 | acc |
335 | } | 360 | } |
336 | 361 | ||
362 | /// Returns the path to the root directory of `rust-analyzer` project. | ||
337 | pub fn project_dir() -> PathBuf { | 363 | pub fn project_dir() -> PathBuf { |
338 | let dir = env!("CARGO_MANIFEST_DIR"); | 364 | let dir = env!("CARGO_MANIFEST_DIR"); |
339 | PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned() | 365 | PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned() |
@@ -356,6 +382,9 @@ pub fn read_text(path: &Path) -> String { | |||
356 | .replace("\r\n", "\n") | 382 | .replace("\r\n", "\n") |
357 | } | 383 | } |
358 | 384 | ||
385 | /// Returns `false` if slow tests should not run, otherwise returns `true` and | ||
386 | /// also creates a file at `./target/.slow_tests_cookie` which serves as a flag | ||
387 | /// that slow tests did run. | ||
359 | pub fn skip_slow_tests() -> bool { | 388 | pub fn skip_slow_tests() -> bool { |
360 | let should_skip = std::env::var("CI").is_err() && std::env::var("RUN_SLOW_TESTS").is_err(); | 389 | let should_skip = std::env::var("CI").is_err() && std::env::var("RUN_SLOW_TESTS").is_err(); |
361 | if should_skip { | 390 | if should_skip { |
@@ -367,8 +396,9 @@ pub fn skip_slow_tests() -> bool { | |||
367 | should_skip | 396 | should_skip |
368 | } | 397 | } |
369 | 398 | ||
370 | const REWRITE: bool = false; | 399 | /// Asserts that `expected` and `actual` strings are equal. If they differ only |
371 | 400 | /// in trailing or leading whitespace the test won't fail and | |
401 | /// the contents of `actual` will be written to the file located at `path`. | ||
372 | fn assert_equal_text(expected: &str, actual: &str, path: &Path) { | 402 | fn assert_equal_text(expected: &str, actual: &str, path: &Path) { |
373 | if expected == actual { | 403 | if expected == actual { |
374 | return; | 404 | return; |
@@ -381,6 +411,7 @@ fn assert_equal_text(expected: &str, actual: &str, path: &Path) { | |||
381 | fs::write(path, actual).unwrap(); | 411 | fs::write(path, actual).unwrap(); |
382 | return; | 412 | return; |
383 | } | 413 | } |
414 | const REWRITE: bool = false; | ||
384 | if REWRITE { | 415 | if REWRITE { |
385 | println!("rewriting {}", pretty_path.display()); | 416 | println!("rewriting {}", pretty_path.display()); |
386 | fs::write(path, actual).unwrap(); | 417 | fs::write(path, actual).unwrap(); |
diff --git a/crates/test_utils/src/marks.rs b/crates/test_utils/src/marks.rs index fe1813947..f8fabfaff 100644 --- a/crates/test_utils/src/marks.rs +++ b/crates/test_utils/src/marks.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | //! This module implements manually tracked test coverage, which useful for | 1 | //! This module implements manually tracked test coverage, which is useful for |
2 | //! quickly finding a test responsible for testing a particular bit of code. | 2 | //! quickly finding a test responsible for testing a particular bit of code. |
3 | //! | 3 | //! |
4 | //! See <https://matklad.github.io/2018/06/18/a-trick-for-test-maintenance.html> | 4 | //! See <https://matklad.github.io/2018/06/18/a-trick-for-test-maintenance.html> |