diff options
author | Aleksey Kladov <[email protected]> | 2019-01-08 19:33:36 +0000 |
---|---|---|
committer | Aleksey Kladov <[email protected]> | 2019-01-08 19:33:36 +0000 |
commit | 5b573deb20b15451788dd2861e9fc6e69ed0472e (patch) | |
tree | 0b9438789c69af28fd1d91ca3c25c268ef103bac | |
parent | 6bca91af532d79abbced5b151cb4188ff8625c04 (diff) |
fix usages after rename
30 files changed, 4280 insertions, 31 deletions
diff --git a/Cargo.lock b/Cargo.lock index f99a03424..354cc138b 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -637,27 +637,6 @@ dependencies = [ | |||
637 | ] | 637 | ] |
638 | 638 | ||
639 | [[package]] | 639 | [[package]] |
640 | name = "ra_analysis" | ||
641 | version = "0.1.0" | ||
642 | dependencies = [ | ||
643 | "fst 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", | ||
644 | "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
645 | "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", | ||
646 | "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", | ||
647 | "ra_db 0.1.0", | ||
648 | "ra_hir 0.1.0", | ||
649 | "ra_ide_api_light 0.1.0", | ||
650 | "ra_syntax 0.1.0", | ||
651 | "ra_text_edit 0.1.0", | ||
652 | "rayon 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", | ||
653 | "relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
654 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | ||
655 | "salsa 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", | ||
656 | "test_utils 0.1.0", | ||
657 | "unicase 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
658 | ] | ||
659 | |||
660 | [[package]] | ||
661 | name = "ra_arena" | 640 | name = "ra_arena" |
662 | version = "0.1.0" | 641 | version = "0.1.0" |
663 | 642 | ||
@@ -705,6 +684,27 @@ dependencies = [ | |||
705 | ] | 684 | ] |
706 | 685 | ||
707 | [[package]] | 686 | [[package]] |
687 | name = "ra_ide_api" | ||
688 | version = "0.1.0" | ||
689 | dependencies = [ | ||
690 | "fst 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", | ||
691 | "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
692 | "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", | ||
693 | "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", | ||
694 | "ra_db 0.1.0", | ||
695 | "ra_hir 0.1.0", | ||
696 | "ra_ide_api_light 0.1.0", | ||
697 | "ra_syntax 0.1.0", | ||
698 | "ra_text_edit 0.1.0", | ||
699 | "rayon 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", | ||
700 | "relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
701 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | ||
702 | "salsa 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", | ||
703 | "test_utils 0.1.0", | ||
704 | "unicase 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
705 | ] | ||
706 | |||
707 | [[package]] | ||
708 | name = "ra_ide_api_light" | 708 | name = "ra_ide_api_light" |
709 | version = "0.1.0" | 709 | version = "0.1.0" |
710 | dependencies = [ | 710 | dependencies = [ |
@@ -733,7 +733,7 @@ dependencies = [ | |||
733 | "languageserver-types 0.53.1 (registry+https://github.com/rust-lang/crates.io-index)", | 733 | "languageserver-types 0.53.1 (registry+https://github.com/rust-lang/crates.io-index)", |
734 | "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", | 734 | "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", |
735 | "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", | 735 | "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", |
736 | "ra_analysis 0.1.0", | 736 | "ra_ide_api 0.1.0", |
737 | "ra_syntax 0.1.0", | 737 | "ra_syntax 0.1.0", |
738 | "ra_text_edit 0.1.0", | 738 | "ra_text_edit 0.1.0", |
739 | "ra_vfs 0.1.0", | 739 | "ra_vfs 0.1.0", |
diff --git a/crates/ra_ide_api/Cargo.toml b/crates/ra_ide_api/Cargo.toml new file mode 100644 index 000000000..d42a664b6 --- /dev/null +++ b/crates/ra_ide_api/Cargo.toml | |||
@@ -0,0 +1,23 @@ | |||
1 | [package] | ||
2 | edition = "2018" | ||
3 | name = "ra_ide_api" | ||
4 | version = "0.1.0" | ||
5 | authors = ["Aleksey Kladov <[email protected]>"] | ||
6 | |||
7 | [dependencies] | ||
8 | itertools = "0.8.0" | ||
9 | log = "0.4.5" | ||
10 | relative-path = "0.4.0" | ||
11 | rayon = "1.0.2" | ||
12 | fst = "0.3.1" | ||
13 | salsa = "0.9.1" | ||
14 | rustc-hash = "1.0" | ||
15 | parking_lot = "0.7.0" | ||
16 | unicase = "2.2.0" | ||
17 | |||
18 | ra_syntax = { path = "../ra_syntax" } | ||
19 | ra_ide_api_light = { path = "../ra_ide_api_light" } | ||
20 | ra_text_edit = { path = "../ra_text_edit" } | ||
21 | ra_db = { path = "../ra_db" } | ||
22 | hir = { path = "../ra_hir", package = "ra_hir" } | ||
23 | test_utils = { path = "../test_utils" } | ||
diff --git a/crates/ra_ide_api/src/call_info.rs b/crates/ra_ide_api/src/call_info.rs new file mode 100644 index 000000000..27b760780 --- /dev/null +++ b/crates/ra_ide_api/src/call_info.rs | |||
@@ -0,0 +1,451 @@ | |||
1 | use std::cmp::{max, min}; | ||
2 | |||
3 | use ra_db::{SyntaxDatabase, Cancelable}; | ||
4 | use ra_syntax::{ | ||
5 | AstNode, SyntaxNode, TextUnit, TextRange, | ||
6 | SyntaxKind::FN_DEF, | ||
7 | ast::{self, ArgListOwner, DocCommentsOwner}, | ||
8 | algo::find_node_at_offset, | ||
9 | }; | ||
10 | |||
11 | use crate::{FilePosition, CallInfo, db::RootDatabase}; | ||
12 | |||
13 | /// Computes parameter information for the given call expression. | ||
14 | pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Cancelable<Option<CallInfo>> { | ||
15 | let file = db.source_file(position.file_id); | ||
16 | let syntax = file.syntax(); | ||
17 | |||
18 | // Find the calling expression and it's NameRef | ||
19 | let calling_node = ctry!(FnCallNode::with_node(syntax, position.offset)); | ||
20 | let name_ref = ctry!(calling_node.name_ref()); | ||
21 | |||
22 | // Resolve the function's NameRef (NOTE: this isn't entirely accurate). | ||
23 | let file_symbols = db.index_resolve(name_ref)?; | ||
24 | let symbol = ctry!(file_symbols.into_iter().find(|it| it.ptr.kind() == FN_DEF)); | ||
25 | let fn_file = db.source_file(symbol.file_id); | ||
26 | let fn_def = symbol.ptr.resolve(&fn_file); | ||
27 | let fn_def = ast::FnDef::cast(&fn_def).unwrap(); | ||
28 | let mut call_info = ctry!(CallInfo::new(fn_def)); | ||
29 | // If we have a calling expression let's find which argument we are on | ||
30 | let num_params = call_info.parameters.len(); | ||
31 | let has_self = fn_def.param_list().and_then(|l| l.self_param()).is_some(); | ||
32 | |||
33 | if num_params == 1 { | ||
34 | if !has_self { | ||
35 | call_info.active_parameter = Some(0); | ||
36 | } | ||
37 | } else if num_params > 1 { | ||
38 | // Count how many parameters into the call we are. | ||
39 | // TODO: This is best effort for now and should be fixed at some point. | ||
40 | // It may be better to see where we are in the arg_list and then check | ||
41 | // where offset is in that list (or beyond). | ||
42 | // Revisit this after we get documentation comments in. | ||
43 | if let Some(ref arg_list) = calling_node.arg_list() { | ||
44 | let start = arg_list.syntax().range().start(); | ||
45 | |||
46 | let range_search = TextRange::from_to(start, position.offset); | ||
47 | let mut commas: usize = arg_list | ||
48 | .syntax() | ||
49 | .text() | ||
50 | .slice(range_search) | ||
51 | .to_string() | ||
52 | .matches(',') | ||
53 | .count(); | ||
54 | |||
55 | // If we have a method call eat the first param since it's just self. | ||
56 | if has_self { | ||
57 | commas += 1; | ||
58 | } | ||
59 | |||
60 | call_info.active_parameter = Some(commas); | ||
61 | } | ||
62 | } | ||
63 | |||
64 | Ok(Some(call_info)) | ||
65 | } | ||
66 | |||
67 | enum FnCallNode<'a> { | ||
68 | CallExpr(&'a ast::CallExpr), | ||
69 | MethodCallExpr(&'a ast::MethodCallExpr), | ||
70 | } | ||
71 | |||
72 | impl<'a> FnCallNode<'a> { | ||
73 | pub fn with_node(syntax: &'a SyntaxNode, offset: TextUnit) -> Option<FnCallNode<'a>> { | ||
74 | if let Some(expr) = find_node_at_offset::<ast::CallExpr>(syntax, offset) { | ||
75 | return Some(FnCallNode::CallExpr(expr)); | ||
76 | } | ||
77 | if let Some(expr) = find_node_at_offset::<ast::MethodCallExpr>(syntax, offset) { | ||
78 | return Some(FnCallNode::MethodCallExpr(expr)); | ||
79 | } | ||
80 | None | ||
81 | } | ||
82 | |||
83 | pub fn name_ref(&self) -> Option<&'a ast::NameRef> { | ||
84 | match *self { | ||
85 | FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()?.kind() { | ||
86 | ast::ExprKind::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?, | ||
87 | _ => return None, | ||
88 | }), | ||
89 | |||
90 | FnCallNode::MethodCallExpr(call_expr) => call_expr | ||
91 | .syntax() | ||
92 | .children() | ||
93 | .filter_map(ast::NameRef::cast) | ||
94 | .nth(0), | ||
95 | } | ||
96 | } | ||
97 | |||
98 | pub fn arg_list(&self) -> Option<&'a ast::ArgList> { | ||
99 | match *self { | ||
100 | FnCallNode::CallExpr(expr) => expr.arg_list(), | ||
101 | FnCallNode::MethodCallExpr(expr) => expr.arg_list(), | ||
102 | } | ||
103 | } | ||
104 | } | ||
105 | |||
106 | impl CallInfo { | ||
107 | fn new(node: &ast::FnDef) -> Option<Self> { | ||
108 | let mut doc = None; | ||
109 | |||
110 | // Strip the body out for the label. | ||
111 | let mut label: String = if let Some(body) = node.body() { | ||
112 | let body_range = body.syntax().range(); | ||
113 | let label: String = node | ||
114 | .syntax() | ||
115 | .children() | ||
116 | .filter(|child| !child.range().is_subrange(&body_range)) | ||
117 | .map(|node| node.text().to_string()) | ||
118 | .collect(); | ||
119 | label | ||
120 | } else { | ||
121 | node.syntax().text().to_string() | ||
122 | }; | ||
123 | |||
124 | if let Some((comment_range, docs)) = extract_doc_comments(node) { | ||
125 | let comment_range = comment_range | ||
126 | .checked_sub(node.syntax().range().start()) | ||
127 | .unwrap(); | ||
128 | let start = comment_range.start().to_usize(); | ||
129 | let end = comment_range.end().to_usize(); | ||
130 | |||
131 | // Remove the comment from the label | ||
132 | label.replace_range(start..end, ""); | ||
133 | |||
134 | // Massage markdown | ||
135 | let mut processed_lines = Vec::new(); | ||
136 | let mut in_code_block = false; | ||
137 | for line in docs.lines() { | ||
138 | if line.starts_with("```") { | ||
139 | in_code_block = !in_code_block; | ||
140 | } | ||
141 | |||
142 | let line = if in_code_block && line.starts_with("```") && !line.contains("rust") { | ||
143 | "```rust".into() | ||
144 | } else { | ||
145 | line.to_string() | ||
146 | }; | ||
147 | |||
148 | processed_lines.push(line); | ||
149 | } | ||
150 | |||
151 | if !processed_lines.is_empty() { | ||
152 | doc = Some(processed_lines.join("\n")); | ||
153 | } | ||
154 | } | ||
155 | |||
156 | Some(CallInfo { | ||
157 | parameters: param_list(node), | ||
158 | label: label.trim().to_owned(), | ||
159 | doc, | ||
160 | active_parameter: None, | ||
161 | }) | ||
162 | } | ||
163 | } | ||
164 | |||
165 | fn extract_doc_comments(node: &ast::FnDef) -> Option<(TextRange, String)> { | ||
166 | if node.doc_comments().count() == 0 { | ||
167 | return None; | ||
168 | } | ||
169 | |||
170 | let comment_text = node.doc_comment_text(); | ||
171 | |||
172 | let (begin, end) = node | ||
173 | .doc_comments() | ||
174 | .map(|comment| comment.syntax().range()) | ||
175 | .map(|range| (range.start().to_usize(), range.end().to_usize())) | ||
176 | .fold((std::usize::MAX, std::usize::MIN), |acc, range| { | ||
177 | (min(acc.0, range.0), max(acc.1, range.1)) | ||
178 | }); | ||
179 | |||
180 | let range = TextRange::from_to(TextUnit::from_usize(begin), TextUnit::from_usize(end)); | ||
181 | |||
182 | Some((range, comment_text)) | ||
183 | } | ||
184 | |||
185 | fn param_list(node: &ast::FnDef) -> Vec<String> { | ||
186 | let mut res = vec![]; | ||
187 | if let Some(param_list) = node.param_list() { | ||
188 | if let Some(self_param) = param_list.self_param() { | ||
189 | res.push(self_param.syntax().text().to_string()) | ||
190 | } | ||
191 | |||
192 | // Maybe use param.pat here? See if we can just extract the name? | ||
193 | //res.extend(param_list.params().map(|p| p.syntax().text().to_string())); | ||
194 | res.extend( | ||
195 | param_list | ||
196 | .params() | ||
197 | .filter_map(|p| p.pat()) | ||
198 | .map(|pat| pat.syntax().text().to_string()), | ||
199 | ); | ||
200 | } | ||
201 | res | ||
202 | } | ||
203 | |||
204 | #[cfg(test)] | ||
205 | mod tests { | ||
206 | use super::*; | ||
207 | |||
208 | use crate::mock_analysis::single_file_with_position; | ||
209 | |||
210 | fn call_info(text: &str) -> CallInfo { | ||
211 | let (analysis, position) = single_file_with_position(text); | ||
212 | analysis.call_info(position).unwrap().unwrap() | ||
213 | } | ||
214 | |||
215 | #[test] | ||
216 | fn test_fn_signature_two_args_first() { | ||
217 | let info = call_info( | ||
218 | r#"fn foo(x: u32, y: u32) -> u32 {x + y} | ||
219 | fn bar() { foo(<|>3, ); }"#, | ||
220 | ); | ||
221 | |||
222 | assert_eq!(info.parameters, vec!("x".to_string(), "y".to_string())); | ||
223 | assert_eq!(info.active_parameter, Some(0)); | ||
224 | } | ||
225 | |||
226 | #[test] | ||
227 | fn test_fn_signature_two_args_second() { | ||
228 | let info = call_info( | ||
229 | r#"fn foo(x: u32, y: u32) -> u32 {x + y} | ||
230 | fn bar() { foo(3, <|>); }"#, | ||
231 | ); | ||
232 | |||
233 | assert_eq!(info.parameters, vec!("x".to_string(), "y".to_string())); | ||
234 | assert_eq!(info.active_parameter, Some(1)); | ||
235 | } | ||
236 | |||
237 | #[test] | ||
238 | fn test_fn_signature_for_impl() { | ||
239 | let info = call_info( | ||
240 | r#"struct F; impl F { pub fn new() { F{}} } | ||
241 | fn bar() {let _ : F = F::new(<|>);}"#, | ||
242 | ); | ||
243 | |||
244 | assert_eq!(info.parameters, Vec::<String>::new()); | ||
245 | assert_eq!(info.active_parameter, None); | ||
246 | } | ||
247 | |||
248 | #[test] | ||
249 | fn test_fn_signature_for_method_self() { | ||
250 | let info = call_info( | ||
251 | r#"struct F; | ||
252 | impl F { | ||
253 | pub fn new() -> F{ | ||
254 | F{} | ||
255 | } | ||
256 | |||
257 | pub fn do_it(&self) {} | ||
258 | } | ||
259 | |||
260 | fn bar() { | ||
261 | let f : F = F::new(); | ||
262 | f.do_it(<|>); | ||
263 | }"#, | ||
264 | ); | ||
265 | |||
266 | assert_eq!(info.parameters, vec!["&self".to_string()]); | ||
267 | assert_eq!(info.active_parameter, None); | ||
268 | } | ||
269 | |||
270 | #[test] | ||
271 | fn test_fn_signature_for_method_with_arg() { | ||
272 | let info = call_info( | ||
273 | r#"struct F; | ||
274 | impl F { | ||
275 | pub fn new() -> F{ | ||
276 | F{} | ||
277 | } | ||
278 | |||
279 | pub fn do_it(&self, x: i32) {} | ||
280 | } | ||
281 | |||
282 | fn bar() { | ||
283 | let f : F = F::new(); | ||
284 | f.do_it(<|>); | ||
285 | }"#, | ||
286 | ); | ||
287 | |||
288 | assert_eq!(info.parameters, vec!["&self".to_string(), "x".to_string()]); | ||
289 | assert_eq!(info.active_parameter, Some(1)); | ||
290 | } | ||
291 | |||
292 | #[test] | ||
293 | fn test_fn_signature_with_docs_simple() { | ||
294 | let info = call_info( | ||
295 | r#" | ||
296 | /// test | ||
297 | // non-doc-comment | ||
298 | fn foo(j: u32) -> u32 { | ||
299 | j | ||
300 | } | ||
301 | |||
302 | fn bar() { | ||
303 | let _ = foo(<|>); | ||
304 | } | ||
305 | "#, | ||
306 | ); | ||
307 | |||
308 | assert_eq!(info.parameters, vec!["j".to_string()]); | ||
309 | assert_eq!(info.active_parameter, Some(0)); | ||
310 | assert_eq!(info.label, "fn foo(j: u32) -> u32".to_string()); | ||
311 | assert_eq!(info.doc, Some("test".into())); | ||
312 | } | ||
313 | |||
314 | #[test] | ||
315 | fn test_fn_signature_with_docs() { | ||
316 | let info = call_info( | ||
317 | r#" | ||
318 | /// Adds one to the number given. | ||
319 | /// | ||
320 | /// # Examples | ||
321 | /// | ||
322 | /// ``` | ||
323 | /// let five = 5; | ||
324 | /// | ||
325 | /// assert_eq!(6, my_crate::add_one(5)); | ||
326 | /// ``` | ||
327 | pub fn add_one(x: i32) -> i32 { | ||
328 | x + 1 | ||
329 | } | ||
330 | |||
331 | pub fn do() { | ||
332 | add_one(<|> | ||
333 | }"#, | ||
334 | ); | ||
335 | |||
336 | assert_eq!(info.parameters, vec!["x".to_string()]); | ||
337 | assert_eq!(info.active_parameter, Some(0)); | ||
338 | assert_eq!(info.label, "pub fn add_one(x: i32) -> i32".to_string()); | ||
339 | assert_eq!( | ||
340 | info.doc, | ||
341 | Some( | ||
342 | r#"Adds one to the number given. | ||
343 | |||
344 | # Examples | ||
345 | |||
346 | ```rust | ||
347 | let five = 5; | ||
348 | |||
349 | assert_eq!(6, my_crate::add_one(5)); | ||
350 | ```"# | ||
351 | .into() | ||
352 | ) | ||
353 | ); | ||
354 | } | ||
355 | |||
356 | #[test] | ||
357 | fn test_fn_signature_with_docs_impl() { | ||
358 | let info = call_info( | ||
359 | r#" | ||
360 | struct addr; | ||
361 | impl addr { | ||
362 | /// Adds one to the number given. | ||
363 | /// | ||
364 | /// # Examples | ||
365 | /// | ||
366 | /// ``` | ||
367 | /// let five = 5; | ||
368 | /// | ||
369 | /// assert_eq!(6, my_crate::add_one(5)); | ||
370 | /// ``` | ||
371 | pub fn add_one(x: i32) -> i32 { | ||
372 | x + 1 | ||
373 | } | ||
374 | } | ||
375 | |||
376 | pub fn do_it() { | ||
377 | addr {}; | ||
378 | addr::add_one(<|>); | ||
379 | }"#, | ||
380 | ); | ||
381 | |||
382 | assert_eq!(info.parameters, vec!["x".to_string()]); | ||
383 | assert_eq!(info.active_parameter, Some(0)); | ||
384 | assert_eq!(info.label, "pub fn add_one(x: i32) -> i32".to_string()); | ||
385 | assert_eq!( | ||
386 | info.doc, | ||
387 | Some( | ||
388 | r#"Adds one to the number given. | ||
389 | |||
390 | # Examples | ||
391 | |||
392 | ```rust | ||
393 | let five = 5; | ||
394 | |||
395 | assert_eq!(6, my_crate::add_one(5)); | ||
396 | ```"# | ||
397 | .into() | ||
398 | ) | ||
399 | ); | ||
400 | } | ||
401 | |||
402 | #[test] | ||
403 | fn test_fn_signature_with_docs_from_actix() { | ||
404 | let info = call_info( | ||
405 | r#" | ||
406 | pub trait WriteHandler<E> | ||
407 | where | ||
408 | Self: Actor, | ||
409 | Self::Context: ActorContext, | ||
410 | { | ||
411 | /// Method is called when writer emits error. | ||
412 | /// | ||
413 | /// If this method returns `ErrorAction::Continue` writer processing | ||
414 | /// continues otherwise stream processing stops. | ||
415 | fn error(&mut self, err: E, ctx: &mut Self::Context) -> Running { | ||
416 | Running::Stop | ||
417 | } | ||
418 | |||
419 | /// Method is called when writer finishes. | ||
420 | /// | ||
421 | /// By default this method stops actor's `Context`. | ||
422 | fn finished(&mut self, ctx: &mut Self::Context) { | ||
423 | ctx.stop() | ||
424 | } | ||
425 | } | ||
426 | |||
427 | pub fn foo() { | ||
428 | WriteHandler r; | ||
429 | r.finished(<|>); | ||
430 | } | ||
431 | |||
432 | "#, | ||
433 | ); | ||
434 | |||
435 | assert_eq!( | ||
436 | info.parameters, | ||
437 | vec!["&mut self".to_string(), "ctx".to_string()] | ||
438 | ); | ||
439 | assert_eq!(info.active_parameter, Some(1)); | ||
440 | assert_eq!( | ||
441 | info.doc, | ||
442 | Some( | ||
443 | r#"Method is called when writer finishes. | ||
444 | |||
445 | By default this method stops actor's `Context`."# | ||
446 | .into() | ||
447 | ) | ||
448 | ); | ||
449 | } | ||
450 | |||
451 | } | ||
diff --git a/crates/ra_ide_api/src/completion.rs b/crates/ra_ide_api/src/completion.rs new file mode 100644 index 000000000..ce777a771 --- /dev/null +++ b/crates/ra_ide_api/src/completion.rs | |||
@@ -0,0 +1,77 @@ | |||
1 | mod completion_item; | ||
2 | mod completion_context; | ||
3 | |||
4 | mod complete_dot; | ||
5 | mod complete_fn_param; | ||
6 | mod complete_keyword; | ||
7 | mod complete_snippet; | ||
8 | mod complete_path; | ||
9 | mod complete_scope; | ||
10 | |||
11 | use ra_db::SyntaxDatabase; | ||
12 | |||
13 | use crate::{ | ||
14 | db, | ||
15 | Cancelable, FilePosition, | ||
16 | completion::{ | ||
17 | completion_item::{Completions, CompletionKind}, | ||
18 | completion_context::CompletionContext, | ||
19 | }, | ||
20 | }; | ||
21 | |||
22 | pub use crate::completion::completion_item::{CompletionItem, InsertText, CompletionItemKind}; | ||
23 | |||
24 | /// Main entry point for completion. We run completion as a two-phase process. | ||
25 | /// | ||
26 | /// First, we look at the position and collect a so-called `CompletionContext. | ||
27 | /// This is a somewhat messy process, because, during completion, syntax tree is | ||
28 | /// incomplete and can look really weird. | ||
29 | /// | ||
30 | /// Once the context is collected, we run a series of completion routines which | ||
31 | /// look at the context and produce completion items. One subtelty about this | ||
32 | /// phase is that completion engine should not filter by the substring which is | ||
33 | /// already present, it should give all possible variants for the identifier at | ||
34 | /// the caret. In other words, for | ||
35 | /// | ||
36 | /// ```no-run | ||
37 | /// fn f() { | ||
38 | /// let foo = 92; | ||
39 | /// let _ = bar<|> | ||
40 | /// } | ||
41 | /// ``` | ||
42 | /// | ||
43 | /// `foo` *should* be present among the completion variants. Filtering by | ||
44 | /// identifier prefix/fuzzy match should be done higher in the stack, together | ||
45 | /// with ordering of completions (currently this is done by the client). | ||
46 | pub(crate) fn completions( | ||
47 | db: &db::RootDatabase, | ||
48 | position: FilePosition, | ||
49 | ) -> Cancelable<Option<Completions>> { | ||
50 | let original_file = db.source_file(position.file_id); | ||
51 | let ctx = ctry!(CompletionContext::new(db, &original_file, position)?); | ||
52 | |||
53 | let mut acc = Completions::default(); | ||
54 | |||
55 | complete_fn_param::complete_fn_param(&mut acc, &ctx); | ||
56 | complete_keyword::complete_expr_keyword(&mut acc, &ctx); | ||
57 | complete_keyword::complete_use_tree_keyword(&mut acc, &ctx); | ||
58 | complete_snippet::complete_expr_snippet(&mut acc, &ctx); | ||
59 | complete_snippet::complete_item_snippet(&mut acc, &ctx); | ||
60 | complete_path::complete_path(&mut acc, &ctx)?; | ||
61 | complete_scope::complete_scope(&mut acc, &ctx)?; | ||
62 | complete_dot::complete_dot(&mut acc, &ctx)?; | ||
63 | |||
64 | Ok(Some(acc)) | ||
65 | } | ||
66 | |||
67 | #[cfg(test)] | ||
68 | fn check_completion(code: &str, expected_completions: &str, kind: CompletionKind) { | ||
69 | use crate::mock_analysis::{single_file_with_position, analysis_and_position}; | ||
70 | let (analysis, position) = if code.contains("//-") { | ||
71 | analysis_and_position(code) | ||
72 | } else { | ||
73 | single_file_with_position(code) | ||
74 | }; | ||
75 | let completions = completions(&analysis.db, position).unwrap().unwrap(); | ||
76 | completions.assert_match(expected_completions, kind); | ||
77 | } | ||
diff --git a/crates/ra_ide_api/src/completion/complete_dot.rs b/crates/ra_ide_api/src/completion/complete_dot.rs new file mode 100644 index 000000000..5d4e60dc5 --- /dev/null +++ b/crates/ra_ide_api/src/completion/complete_dot.rs | |||
@@ -0,0 +1,121 @@ | |||
1 | use hir::{Ty, Def}; | ||
2 | |||
3 | use crate::Cancelable; | ||
4 | use crate::completion::{CompletionContext, Completions, CompletionKind, CompletionItem, CompletionItemKind}; | ||
5 | |||
6 | /// Complete dot accesses, i.e. fields or methods (currently only fields). | ||
7 | pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) -> Cancelable<()> { | ||
8 | let (function, receiver) = match (&ctx.function, ctx.dot_receiver) { | ||
9 | (Some(function), Some(receiver)) => (function, receiver), | ||
10 | _ => return Ok(()), | ||
11 | }; | ||
12 | let infer_result = function.infer(ctx.db)?; | ||
13 | let syntax_mapping = function.body_syntax_mapping(ctx.db)?; | ||
14 | let expr = match syntax_mapping.node_expr(receiver) { | ||
15 | Some(expr) => expr, | ||
16 | None => return Ok(()), | ||
17 | }; | ||
18 | let receiver_ty = infer_result[expr].clone(); | ||
19 | if !ctx.is_method_call { | ||
20 | complete_fields(acc, ctx, receiver_ty)?; | ||
21 | } | ||
22 | Ok(()) | ||
23 | } | ||
24 | |||
25 | fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty) -> Cancelable<()> { | ||
26 | for receiver in receiver.autoderef(ctx.db) { | ||
27 | match receiver { | ||
28 | Ty::Adt { def_id, .. } => { | ||
29 | match def_id.resolve(ctx.db)? { | ||
30 | Def::Struct(s) => { | ||
31 | let variant_data = s.variant_data(ctx.db)?; | ||
32 | for field in variant_data.fields() { | ||
33 | CompletionItem::new( | ||
34 | CompletionKind::Reference, | ||
35 | field.name().to_string(), | ||
36 | ) | ||
37 | .kind(CompletionItemKind::Field) | ||
38 | .add_to(acc); | ||
39 | } | ||
40 | } | ||
41 | // TODO unions | ||
42 | _ => {} | ||
43 | } | ||
44 | } | ||
45 | Ty::Tuple(fields) => { | ||
46 | for (i, _ty) in fields.iter().enumerate() { | ||
47 | CompletionItem::new(CompletionKind::Reference, i.to_string()) | ||
48 | .kind(CompletionItemKind::Field) | ||
49 | .add_to(acc); | ||
50 | } | ||
51 | } | ||
52 | _ => {} | ||
53 | }; | ||
54 | } | ||
55 | Ok(()) | ||
56 | } | ||
57 | |||
58 | #[cfg(test)] | ||
59 | mod tests { | ||
60 | use crate::completion::*; | ||
61 | |||
62 | fn check_ref_completion(code: &str, expected_completions: &str) { | ||
63 | check_completion(code, expected_completions, CompletionKind::Reference); | ||
64 | } | ||
65 | |||
66 | #[test] | ||
67 | fn test_struct_field_completion() { | ||
68 | check_ref_completion( | ||
69 | r" | ||
70 | struct A { the_field: u32 } | ||
71 | fn foo(a: A) { | ||
72 | a.<|> | ||
73 | } | ||
74 | ", | ||
75 | r#"the_field"#, | ||
76 | ); | ||
77 | } | ||
78 | |||
79 | #[test] | ||
80 | fn test_struct_field_completion_self() { | ||
81 | check_ref_completion( | ||
82 | r" | ||
83 | struct A { the_field: u32 } | ||
84 | impl A { | ||
85 | fn foo(self) { | ||
86 | self.<|> | ||
87 | } | ||
88 | } | ||
89 | ", | ||
90 | r#"the_field"#, | ||
91 | ); | ||
92 | } | ||
93 | |||
94 | #[test] | ||
95 | fn test_struct_field_completion_autoderef() { | ||
96 | check_ref_completion( | ||
97 | r" | ||
98 | struct A { the_field: u32 } | ||
99 | impl A { | ||
100 | fn foo(&self) { | ||
101 | self.<|> | ||
102 | } | ||
103 | } | ||
104 | ", | ||
105 | r#"the_field"#, | ||
106 | ); | ||
107 | } | ||
108 | |||
109 | #[test] | ||
110 | fn test_no_struct_field_completion_for_method_call() { | ||
111 | check_ref_completion( | ||
112 | r" | ||
113 | struct A { the_field: u32 } | ||
114 | fn foo(a: A) { | ||
115 | a.<|>() | ||
116 | } | ||
117 | ", | ||
118 | r#""#, | ||
119 | ); | ||
120 | } | ||
121 | } | ||
diff --git a/crates/ra_ide_api/src/completion/complete_fn_param.rs b/crates/ra_ide_api/src/completion/complete_fn_param.rs new file mode 100644 index 000000000..c1739e47e --- /dev/null +++ b/crates/ra_ide_api/src/completion/complete_fn_param.rs | |||
@@ -0,0 +1,102 @@ | |||
1 | use ra_syntax::{ | ||
2 | algo::visit::{visitor_ctx, VisitorCtx}, | ||
3 | ast, | ||
4 | AstNode, | ||
5 | }; | ||
6 | use rustc_hash::FxHashMap; | ||
7 | |||
8 | use crate::completion::{CompletionContext, Completions, CompletionKind, CompletionItem}; | ||
9 | |||
10 | /// Complete repeated parametes, both name and type. For example, if all | ||
11 | /// functions in a file have a `spam: &mut Spam` parameter, a completion with | ||
12 | /// `spam: &mut Spam` insert text/label and `spam` lookup string will be | ||
13 | /// suggested. | ||
14 | pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext) { | ||
15 | if !ctx.is_param { | ||
16 | return; | ||
17 | } | ||
18 | |||
19 | let mut params = FxHashMap::default(); | ||
20 | for node in ctx.leaf.ancestors() { | ||
21 | let _ = visitor_ctx(&mut params) | ||
22 | .visit::<ast::SourceFile, _>(process) | ||
23 | .visit::<ast::ItemList, _>(process) | ||
24 | .accept(node); | ||
25 | } | ||
26 | params | ||
27 | .into_iter() | ||
28 | .filter_map(|(label, (count, param))| { | ||
29 | let lookup = param.pat()?.syntax().text().to_string(); | ||
30 | if count < 2 { | ||
31 | None | ||
32 | } else { | ||
33 | Some((label, lookup)) | ||
34 | } | ||
35 | }) | ||
36 | .for_each(|(label, lookup)| { | ||
37 | CompletionItem::new(CompletionKind::Magic, label) | ||
38 | .lookup_by(lookup) | ||
39 | .add_to(acc) | ||
40 | }); | ||
41 | |||
42 | fn process<'a, N: ast::FnDefOwner>( | ||
43 | node: &'a N, | ||
44 | params: &mut FxHashMap<String, (u32, &'a ast::Param)>, | ||
45 | ) { | ||
46 | node.functions() | ||
47 | .filter_map(|it| it.param_list()) | ||
48 | .flat_map(|it| it.params()) | ||
49 | .for_each(|param| { | ||
50 | let text = param.syntax().text().to_string(); | ||
51 | params.entry(text).or_insert((0, param)).0 += 1; | ||
52 | }) | ||
53 | } | ||
54 | } | ||
55 | |||
56 | #[cfg(test)] | ||
57 | mod tests { | ||
58 | use crate::completion::*; | ||
59 | |||
60 | fn check_magic_completion(code: &str, expected_completions: &str) { | ||
61 | check_completion(code, expected_completions, CompletionKind::Magic); | ||
62 | } | ||
63 | |||
64 | #[test] | ||
65 | fn test_param_completion_last_param() { | ||
66 | check_magic_completion( | ||
67 | r" | ||
68 | fn foo(file_id: FileId) {} | ||
69 | fn bar(file_id: FileId) {} | ||
70 | fn baz(file<|>) {} | ||
71 | ", | ||
72 | r#"file_id "file_id: FileId""#, | ||
73 | ); | ||
74 | } | ||
75 | |||
76 | #[test] | ||
77 | fn test_param_completion_nth_param() { | ||
78 | check_magic_completion( | ||
79 | r" | ||
80 | fn foo(file_id: FileId) {} | ||
81 | fn bar(file_id: FileId) {} | ||
82 | fn baz(file<|>, x: i32) {} | ||
83 | ", | ||
84 | r#"file_id "file_id: FileId""#, | ||
85 | ); | ||
86 | } | ||
87 | |||
88 | #[test] | ||
89 | fn test_param_completion_trait_param() { | ||
90 | check_magic_completion( | ||
91 | r" | ||
92 | pub(crate) trait SourceRoot { | ||
93 | pub fn contains(&self, file_id: FileId) -> bool; | ||
94 | pub fn module_map(&self) -> &ModuleMap; | ||
95 | pub fn lines(&self, file_id: FileId) -> &LineIndex; | ||
96 | pub fn syntax(&self, file<|>) | ||
97 | } | ||
98 | ", | ||
99 | r#"file_id "file_id: FileId""#, | ||
100 | ); | ||
101 | } | ||
102 | } | ||
diff --git a/crates/ra_ide_api/src/completion/complete_keyword.rs b/crates/ra_ide_api/src/completion/complete_keyword.rs new file mode 100644 index 000000000..d350f06ce --- /dev/null +++ b/crates/ra_ide_api/src/completion/complete_keyword.rs | |||
@@ -0,0 +1,339 @@ | |||
1 | use ra_syntax::{ | ||
2 | algo::visit::{visitor, Visitor}, | ||
3 | AstNode, | ||
4 | ast::{self, LoopBodyOwner}, | ||
5 | SyntaxKind::*, SyntaxNode, | ||
6 | }; | ||
7 | |||
8 | use crate::completion::{CompletionContext, CompletionItem, Completions, CompletionKind, CompletionItemKind}; | ||
9 | |||
10 | pub(super) fn complete_use_tree_keyword(acc: &mut Completions, ctx: &CompletionContext) { | ||
11 | // complete keyword "crate" in use stmt | ||
12 | match (ctx.use_item_syntax.as_ref(), ctx.path_prefix.as_ref()) { | ||
13 | (Some(_), None) => { | ||
14 | CompletionItem::new(CompletionKind::Keyword, "crate") | ||
15 | .kind(CompletionItemKind::Keyword) | ||
16 | .lookup_by("crate") | ||
17 | .snippet("crate::") | ||
18 | .add_to(acc); | ||
19 | CompletionItem::new(CompletionKind::Keyword, "self") | ||
20 | .kind(CompletionItemKind::Keyword) | ||
21 | .lookup_by("self") | ||
22 | .add_to(acc); | ||
23 | CompletionItem::new(CompletionKind::Keyword, "super") | ||
24 | .kind(CompletionItemKind::Keyword) | ||
25 | .lookup_by("super") | ||
26 | .add_to(acc); | ||
27 | } | ||
28 | (Some(_), Some(_)) => { | ||
29 | CompletionItem::new(CompletionKind::Keyword, "self") | ||
30 | .kind(CompletionItemKind::Keyword) | ||
31 | .lookup_by("self") | ||
32 | .add_to(acc); | ||
33 | CompletionItem::new(CompletionKind::Keyword, "super") | ||
34 | .kind(CompletionItemKind::Keyword) | ||
35 | .lookup_by("super") | ||
36 | .add_to(acc); | ||
37 | } | ||
38 | _ => {} | ||
39 | } | ||
40 | } | ||
41 | |||
42 | fn keyword(kw: &str, snippet: &str) -> CompletionItem { | ||
43 | CompletionItem::new(CompletionKind::Keyword, kw) | ||
44 | .kind(CompletionItemKind::Keyword) | ||
45 | .snippet(snippet) | ||
46 | .build() | ||
47 | } | ||
48 | |||
49 | pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionContext) { | ||
50 | if !ctx.is_trivial_path { | ||
51 | return; | ||
52 | } | ||
53 | |||
54 | let fn_def = match ctx.function_syntax { | ||
55 | Some(it) => it, | ||
56 | None => return, | ||
57 | }; | ||
58 | acc.add(keyword("if", "if $0 {}")); | ||
59 | acc.add(keyword("match", "match $0 {}")); | ||
60 | acc.add(keyword("while", "while $0 {}")); | ||
61 | acc.add(keyword("loop", "loop {$0}")); | ||
62 | |||
63 | if ctx.after_if { | ||
64 | acc.add(keyword("else", "else {$0}")); | ||
65 | acc.add(keyword("else if", "else if $0 {}")); | ||
66 | } | ||
67 | if is_in_loop_body(ctx.leaf) { | ||
68 | if ctx.can_be_stmt { | ||
69 | acc.add(keyword("continue", "continue;")); | ||
70 | acc.add(keyword("break", "break;")); | ||
71 | } else { | ||
72 | acc.add(keyword("continue", "continue")); | ||
73 | acc.add(keyword("break", "break")); | ||
74 | } | ||
75 | } | ||
76 | acc.add_all(complete_return(fn_def, ctx.can_be_stmt)); | ||
77 | } | ||
78 | |||
79 | fn is_in_loop_body(leaf: &SyntaxNode) -> bool { | ||
80 | for node in leaf.ancestors() { | ||
81 | if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR { | ||
82 | break; | ||
83 | } | ||
84 | let loop_body = visitor() | ||
85 | .visit::<ast::ForExpr, _>(LoopBodyOwner::loop_body) | ||
86 | .visit::<ast::WhileExpr, _>(LoopBodyOwner::loop_body) | ||
87 | .visit::<ast::LoopExpr, _>(LoopBodyOwner::loop_body) | ||
88 | .accept(node); | ||
89 | if let Some(Some(body)) = loop_body { | ||
90 | if leaf.range().is_subrange(&body.syntax().range()) { | ||
91 | return true; | ||
92 | } | ||
93 | } | ||
94 | } | ||
95 | false | ||
96 | } | ||
97 | |||
98 | fn complete_return(fn_def: &ast::FnDef, can_be_stmt: bool) -> Option<CompletionItem> { | ||
99 | let snip = match (can_be_stmt, fn_def.ret_type().is_some()) { | ||
100 | (true, true) => "return $0;", | ||
101 | (true, false) => "return;", | ||
102 | (false, true) => "return $0", | ||
103 | (false, false) => "return", | ||
104 | }; | ||
105 | Some(keyword("return", snip)) | ||
106 | } | ||
107 | |||
108 | #[cfg(test)] | ||
109 | mod tests { | ||
110 | use crate::completion::{CompletionKind, check_completion}; | ||
111 | fn check_keyword_completion(code: &str, expected_completions: &str) { | ||
112 | check_completion(code, expected_completions, CompletionKind::Keyword); | ||
113 | } | ||
114 | |||
115 | #[test] | ||
116 | fn completes_keywords_in_use_stmt() { | ||
117 | check_keyword_completion( | ||
118 | r" | ||
119 | use <|> | ||
120 | ", | ||
121 | r#" | ||
122 | crate "crate" "crate::" | ||
123 | self "self" | ||
124 | super "super" | ||
125 | "#, | ||
126 | ); | ||
127 | |||
128 | check_keyword_completion( | ||
129 | r" | ||
130 | use a::<|> | ||
131 | ", | ||
132 | r#" | ||
133 | self "self" | ||
134 | super "super" | ||
135 | "#, | ||
136 | ); | ||
137 | |||
138 | check_keyword_completion( | ||
139 | r" | ||
140 | use a::{b, <|>} | ||
141 | ", | ||
142 | r#" | ||
143 | self "self" | ||
144 | super "super" | ||
145 | "#, | ||
146 | ); | ||
147 | } | ||
148 | |||
149 | #[test] | ||
150 | fn completes_various_keywords_in_function() { | ||
151 | check_keyword_completion( | ||
152 | r" | ||
153 | fn quux() { | ||
154 | <|> | ||
155 | } | ||
156 | ", | ||
157 | r#" | ||
158 | if "if $0 {}" | ||
159 | match "match $0 {}" | ||
160 | while "while $0 {}" | ||
161 | loop "loop {$0}" | ||
162 | return "return;" | ||
163 | "#, | ||
164 | ); | ||
165 | } | ||
166 | |||
167 | #[test] | ||
168 | fn completes_else_after_if() { | ||
169 | check_keyword_completion( | ||
170 | r" | ||
171 | fn quux() { | ||
172 | if true { | ||
173 | () | ||
174 | } <|> | ||
175 | } | ||
176 | ", | ||
177 | r#" | ||
178 | if "if $0 {}" | ||
179 | match "match $0 {}" | ||
180 | while "while $0 {}" | ||
181 | loop "loop {$0}" | ||
182 | else "else {$0}" | ||
183 | else if "else if $0 {}" | ||
184 | return "return;" | ||
185 | "#, | ||
186 | ); | ||
187 | } | ||
188 | |||
189 | #[test] | ||
190 | fn test_completion_return_value() { | ||
191 | check_keyword_completion( | ||
192 | r" | ||
193 | fn quux() -> i32 { | ||
194 | <|> | ||
195 | 92 | ||
196 | } | ||
197 | ", | ||
198 | r#" | ||
199 | if "if $0 {}" | ||
200 | match "match $0 {}" | ||
201 | while "while $0 {}" | ||
202 | loop "loop {$0}" | ||
203 | return "return $0;" | ||
204 | "#, | ||
205 | ); | ||
206 | check_keyword_completion( | ||
207 | r" | ||
208 | fn quux() { | ||
209 | <|> | ||
210 | 92 | ||
211 | } | ||
212 | ", | ||
213 | r#" | ||
214 | if "if $0 {}" | ||
215 | match "match $0 {}" | ||
216 | while "while $0 {}" | ||
217 | loop "loop {$0}" | ||
218 | return "return;" | ||
219 | "#, | ||
220 | ); | ||
221 | } | ||
222 | |||
223 | #[test] | ||
224 | fn dont_add_semi_after_return_if_not_a_statement() { | ||
225 | check_keyword_completion( | ||
226 | r" | ||
227 | fn quux() -> i32 { | ||
228 | match () { | ||
229 | () => <|> | ||
230 | } | ||
231 | } | ||
232 | ", | ||
233 | r#" | ||
234 | if "if $0 {}" | ||
235 | match "match $0 {}" | ||
236 | while "while $0 {}" | ||
237 | loop "loop {$0}" | ||
238 | return "return $0" | ||
239 | "#, | ||
240 | ); | ||
241 | } | ||
242 | |||
243 | #[test] | ||
244 | fn last_return_in_block_has_semi() { | ||
245 | check_keyword_completion( | ||
246 | r" | ||
247 | fn quux() -> i32 { | ||
248 | if condition { | ||
249 | <|> | ||
250 | } | ||
251 | } | ||
252 | ", | ||
253 | r#" | ||
254 | if "if $0 {}" | ||
255 | match "match $0 {}" | ||
256 | while "while $0 {}" | ||
257 | loop "loop {$0}" | ||
258 | return "return $0;" | ||
259 | "#, | ||
260 | ); | ||
261 | check_keyword_completion( | ||
262 | r" | ||
263 | fn quux() -> i32 { | ||
264 | if condition { | ||
265 | <|> | ||
266 | } | ||
267 | let x = 92; | ||
268 | x | ||
269 | } | ||
270 | ", | ||
271 | r#" | ||
272 | if "if $0 {}" | ||
273 | match "match $0 {}" | ||
274 | while "while $0 {}" | ||
275 | loop "loop {$0}" | ||
276 | return "return $0;" | ||
277 | "#, | ||
278 | ); | ||
279 | } | ||
280 | |||
281 | #[test] | ||
282 | fn completes_break_and_continue_in_loops() { | ||
283 | check_keyword_completion( | ||
284 | r" | ||
285 | fn quux() -> i32 { | ||
286 | loop { <|> } | ||
287 | } | ||
288 | ", | ||
289 | r#" | ||
290 | if "if $0 {}" | ||
291 | match "match $0 {}" | ||
292 | while "while $0 {}" | ||
293 | loop "loop {$0}" | ||
294 | continue "continue;" | ||
295 | break "break;" | ||
296 | return "return $0;" | ||
297 | "#, | ||
298 | ); | ||
299 | // No completion: lambda isolates control flow | ||
300 | check_keyword_completion( | ||
301 | r" | ||
302 | fn quux() -> i32 { | ||
303 | loop { || { <|> } } | ||
304 | } | ||
305 | ", | ||
306 | r#" | ||
307 | if "if $0 {}" | ||
308 | match "match $0 {}" | ||
309 | while "while $0 {}" | ||
310 | loop "loop {$0}" | ||
311 | return "return $0;" | ||
312 | "#, | ||
313 | ); | ||
314 | } | ||
315 | |||
316 | #[test] | ||
317 | fn no_semi_after_break_continue_in_expr() { | ||
318 | check_keyword_completion( | ||
319 | r" | ||
320 | fn f() { | ||
321 | loop { | ||
322 | match () { | ||
323 | () => br<|> | ||
324 | } | ||
325 | } | ||
326 | } | ||
327 | ", | ||
328 | r#" | ||
329 | if "if $0 {}" | ||
330 | match "match $0 {}" | ||
331 | while "while $0 {}" | ||
332 | loop "loop {$0}" | ||
333 | continue "continue" | ||
334 | break "break" | ||
335 | return "return" | ||
336 | "#, | ||
337 | ) | ||
338 | } | ||
339 | } | ||
diff --git a/crates/ra_ide_api/src/completion/complete_path.rs b/crates/ra_ide_api/src/completion/complete_path.rs new file mode 100644 index 000000000..4723a65a6 --- /dev/null +++ b/crates/ra_ide_api/src/completion/complete_path.rs | |||
@@ -0,0 +1,128 @@ | |||
1 | use crate::{ | ||
2 | Cancelable, | ||
3 | completion::{CompletionItem, CompletionItemKind, Completions, CompletionKind, CompletionContext}, | ||
4 | }; | ||
5 | |||
6 | pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) -> Cancelable<()> { | ||
7 | let (path, module) = match (&ctx.path_prefix, &ctx.module) { | ||
8 | (Some(path), Some(module)) => (path.clone(), module), | ||
9 | _ => return Ok(()), | ||
10 | }; | ||
11 | let def_id = match module.resolve_path(ctx.db, &path)?.take_types() { | ||
12 | Some(it) => it, | ||
13 | None => return Ok(()), | ||
14 | }; | ||
15 | match def_id.resolve(ctx.db)? { | ||
16 | hir::Def::Module(module) => { | ||
17 | let module_scope = module.scope(ctx.db)?; | ||
18 | module_scope.entries().for_each(|(name, res)| { | ||
19 | CompletionItem::new(CompletionKind::Reference, name.to_string()) | ||
20 | .from_resolution(ctx, res) | ||
21 | .add_to(acc) | ||
22 | }); | ||
23 | } | ||
24 | hir::Def::Enum(e) => e | ||
25 | .variants(ctx.db)? | ||
26 | .into_iter() | ||
27 | .for_each(|(name, _variant)| { | ||
28 | CompletionItem::new(CompletionKind::Reference, name.to_string()) | ||
29 | .kind(CompletionItemKind::EnumVariant) | ||
30 | .add_to(acc) | ||
31 | }), | ||
32 | _ => return Ok(()), | ||
33 | }; | ||
34 | Ok(()) | ||
35 | } | ||
36 | |||
37 | #[cfg(test)] | ||
38 | mod tests { | ||
39 | use crate::completion::{CompletionKind, check_completion}; | ||
40 | |||
41 | fn check_reference_completion(code: &str, expected_completions: &str) { | ||
42 | check_completion(code, expected_completions, CompletionKind::Reference); | ||
43 | } | ||
44 | |||
45 | #[test] | ||
46 | fn completes_use_item_starting_with_self() { | ||
47 | check_reference_completion( | ||
48 | r" | ||
49 | use self::m::<|>; | ||
50 | |||
51 | mod m { | ||
52 | struct Bar; | ||
53 | } | ||
54 | ", | ||
55 | "Bar", | ||
56 | ); | ||
57 | } | ||
58 | |||
59 | #[test] | ||
60 | fn completes_use_item_starting_with_crate() { | ||
61 | check_reference_completion( | ||
62 | " | ||
63 | //- /lib.rs | ||
64 | mod foo; | ||
65 | struct Spam; | ||
66 | //- /foo.rs | ||
67 | use crate::Sp<|> | ||
68 | ", | ||
69 | "Spam;foo", | ||
70 | ); | ||
71 | } | ||
72 | |||
73 | #[test] | ||
74 | fn completes_nested_use_tree() { | ||
75 | check_reference_completion( | ||
76 | " | ||
77 | //- /lib.rs | ||
78 | mod foo; | ||
79 | struct Spam; | ||
80 | //- /foo.rs | ||
81 | use crate::{Sp<|>}; | ||
82 | ", | ||
83 | "Spam;foo", | ||
84 | ); | ||
85 | } | ||
86 | |||
87 | #[test] | ||
88 | fn completes_deeply_nested_use_tree() { | ||
89 | check_reference_completion( | ||
90 | " | ||
91 | //- /lib.rs | ||
92 | mod foo; | ||
93 | pub mod bar { | ||
94 | pub mod baz { | ||
95 | pub struct Spam; | ||
96 | } | ||
97 | } | ||
98 | //- /foo.rs | ||
99 | use crate::{bar::{baz::Sp<|>}}; | ||
100 | ", | ||
101 | "Spam", | ||
102 | ); | ||
103 | } | ||
104 | |||
105 | #[test] | ||
106 | fn completes_enum_variant() { | ||
107 | check_reference_completion( | ||
108 | " | ||
109 | //- /lib.rs | ||
110 | enum E { Foo, Bar(i32) } | ||
111 | fn foo() { let _ = E::<|> } | ||
112 | ", | ||
113 | "Foo;Bar", | ||
114 | ); | ||
115 | } | ||
116 | |||
117 | #[test] | ||
118 | fn dont_render_function_parens_in_use_item() { | ||
119 | check_reference_completion( | ||
120 | " | ||
121 | //- /lib.rs | ||
122 | mod m { pub fn foo() {} } | ||
123 | use crate::m::f<|>; | ||
124 | ", | ||
125 | "foo", | ||
126 | ) | ||
127 | } | ||
128 | } | ||
diff --git a/crates/ra_ide_api/src/completion/complete_scope.rs b/crates/ra_ide_api/src/completion/complete_scope.rs new file mode 100644 index 000000000..ee9052d3d --- /dev/null +++ b/crates/ra_ide_api/src/completion/complete_scope.rs | |||
@@ -0,0 +1,192 @@ | |||
1 | use rustc_hash::FxHashSet; | ||
2 | use ra_syntax::TextUnit; | ||
3 | |||
4 | use crate::{ | ||
5 | Cancelable, | ||
6 | completion::{CompletionItem, CompletionItemKind, Completions, CompletionKind, CompletionContext}, | ||
7 | }; | ||
8 | |||
9 | pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) -> Cancelable<()> { | ||
10 | if !ctx.is_trivial_path { | ||
11 | return Ok(()); | ||
12 | } | ||
13 | let module = match &ctx.module { | ||
14 | Some(it) => it, | ||
15 | None => return Ok(()), | ||
16 | }; | ||
17 | if let Some(function) = &ctx.function { | ||
18 | let scopes = function.scopes(ctx.db)?; | ||
19 | complete_fn(acc, &scopes, ctx.offset); | ||
20 | } | ||
21 | |||
22 | let module_scope = module.scope(ctx.db)?; | ||
23 | let (file_id, _) = module.defenition_source(ctx.db)?; | ||
24 | module_scope | ||
25 | .entries() | ||
26 | .filter(|(_name, res)| { | ||
27 | // Don't expose this item | ||
28 | // FIXME: this penetrates through all kinds of abstractions, | ||
29 | // we need to figura out the way to do it less ugly. | ||
30 | match res.import { | ||
31 | None => true, | ||
32 | Some(import) => { | ||
33 | let range = import.range(ctx.db, file_id); | ||
34 | !range.is_subrange(&ctx.leaf.range()) | ||
35 | } | ||
36 | } | ||
37 | }) | ||
38 | .for_each(|(name, res)| { | ||
39 | CompletionItem::new(CompletionKind::Reference, name.to_string()) | ||
40 | .from_resolution(ctx, res) | ||
41 | .add_to(acc) | ||
42 | }); | ||
43 | Ok(()) | ||
44 | } | ||
45 | |||
46 | fn complete_fn(acc: &mut Completions, scopes: &hir::ScopesWithSyntaxMapping, offset: TextUnit) { | ||
47 | let mut shadowed = FxHashSet::default(); | ||
48 | scopes | ||
49 | .scope_chain_for_offset(offset) | ||
50 | .flat_map(|scope| scopes.scopes.entries(scope).iter()) | ||
51 | .filter(|entry| shadowed.insert(entry.name())) | ||
52 | .for_each(|entry| { | ||
53 | CompletionItem::new(CompletionKind::Reference, entry.name().to_string()) | ||
54 | .kind(CompletionItemKind::Binding) | ||
55 | .add_to(acc) | ||
56 | }); | ||
57 | } | ||
58 | |||
59 | #[cfg(test)] | ||
60 | mod tests { | ||
61 | use crate::completion::{CompletionKind, check_completion}; | ||
62 | |||
63 | fn check_reference_completion(code: &str, expected_completions: &str) { | ||
64 | check_completion(code, expected_completions, CompletionKind::Reference); | ||
65 | } | ||
66 | |||
67 | #[test] | ||
68 | fn completes_bindings_from_let() { | ||
69 | check_reference_completion( | ||
70 | r" | ||
71 | fn quux(x: i32) { | ||
72 | let y = 92; | ||
73 | 1 + <|>; | ||
74 | let z = (); | ||
75 | } | ||
76 | ", | ||
77 | r#"y;x;quux "quux($0)""#, | ||
78 | ); | ||
79 | } | ||
80 | |||
81 | #[test] | ||
82 | fn completes_bindings_from_if_let() { | ||
83 | check_reference_completion( | ||
84 | r" | ||
85 | fn quux() { | ||
86 | if let Some(x) = foo() { | ||
87 | let y = 92; | ||
88 | }; | ||
89 | if let Some(a) = bar() { | ||
90 | let b = 62; | ||
91 | 1 + <|> | ||
92 | } | ||
93 | } | ||
94 | ", | ||
95 | r#"b;a;quux "quux()$0""#, | ||
96 | ); | ||
97 | } | ||
98 | |||
99 | #[test] | ||
100 | fn completes_bindings_from_for() { | ||
101 | check_reference_completion( | ||
102 | r" | ||
103 | fn quux() { | ||
104 | for x in &[1, 2, 3] { | ||
105 | <|> | ||
106 | } | ||
107 | } | ||
108 | ", | ||
109 | r#"x;quux "quux()$0""#, | ||
110 | ); | ||
111 | } | ||
112 | |||
113 | #[test] | ||
114 | fn completes_module_items() { | ||
115 | check_reference_completion( | ||
116 | r" | ||
117 | struct Foo; | ||
118 | enum Baz {} | ||
119 | fn quux() { | ||
120 | <|> | ||
121 | } | ||
122 | ", | ||
123 | r#"quux "quux()$0";Foo;Baz"#, | ||
124 | ); | ||
125 | } | ||
126 | |||
127 | #[test] | ||
128 | fn completes_module_items_in_nested_modules() { | ||
129 | check_reference_completion( | ||
130 | r" | ||
131 | struct Foo; | ||
132 | mod m { | ||
133 | struct Bar; | ||
134 | fn quux() { <|> } | ||
135 | } | ||
136 | ", | ||
137 | r#"quux "quux()$0";Bar"#, | ||
138 | ); | ||
139 | } | ||
140 | |||
141 | #[test] | ||
142 | fn completes_return_type() { | ||
143 | check_reference_completion( | ||
144 | r" | ||
145 | struct Foo; | ||
146 | fn x() -> <|> | ||
147 | ", | ||
148 | r#"Foo;x "x()$0""#, | ||
149 | ) | ||
150 | } | ||
151 | |||
152 | #[test] | ||
153 | fn dont_show_both_completions_for_shadowing() { | ||
154 | check_reference_completion( | ||
155 | r" | ||
156 | fn foo() -> { | ||
157 | let bar = 92; | ||
158 | { | ||
159 | let bar = 62; | ||
160 | <|> | ||
161 | } | ||
162 | } | ||
163 | ", | ||
164 | r#"bar;foo "foo()$0""#, | ||
165 | ) | ||
166 | } | ||
167 | |||
168 | #[test] | ||
169 | fn completes_self_in_methods() { | ||
170 | check_reference_completion(r"impl S { fn foo(&self) { <|> } }", "self") | ||
171 | } | ||
172 | |||
173 | #[test] | ||
174 | fn inserts_parens_for_function_calls() { | ||
175 | check_reference_completion( | ||
176 | r" | ||
177 | fn no_args() {} | ||
178 | fn main() { no_<|> } | ||
179 | ", | ||
180 | r#"no_args "no_args()$0" | ||
181 | main "main()$0""#, | ||
182 | ); | ||
183 | check_reference_completion( | ||
184 | r" | ||
185 | fn with_args(x: i32, y: String) {} | ||
186 | fn main() { with_<|> } | ||
187 | ", | ||
188 | r#"main "main()$0" | ||
189 | with_args "with_args($0)""#, | ||
190 | ); | ||
191 | } | ||
192 | } | ||
diff --git a/crates/ra_ide_api/src/completion/complete_snippet.rs b/crates/ra_ide_api/src/completion/complete_snippet.rs new file mode 100644 index 000000000..a495751dd --- /dev/null +++ b/crates/ra_ide_api/src/completion/complete_snippet.rs | |||
@@ -0,0 +1,73 @@ | |||
1 | use crate::completion::{CompletionItem, Completions, CompletionKind, CompletionItemKind, CompletionContext, completion_item::Builder}; | ||
2 | |||
3 | fn snippet(label: &str, snippet: &str) -> Builder { | ||
4 | CompletionItem::new(CompletionKind::Snippet, label) | ||
5 | .snippet(snippet) | ||
6 | .kind(CompletionItemKind::Snippet) | ||
7 | } | ||
8 | |||
9 | pub(super) fn complete_expr_snippet(acc: &mut Completions, ctx: &CompletionContext) { | ||
10 | if !(ctx.is_trivial_path && ctx.function_syntax.is_some()) { | ||
11 | return; | ||
12 | } | ||
13 | snippet("pd", "eprintln!(\"$0 = {:?}\", $0);").add_to(acc); | ||
14 | snippet("ppd", "eprintln!(\"$0 = {:#?}\", $0);").add_to(acc); | ||
15 | } | ||
16 | |||
17 | pub(super) fn complete_item_snippet(acc: &mut Completions, ctx: &CompletionContext) { | ||
18 | if !ctx.is_new_item { | ||
19 | return; | ||
20 | } | ||
21 | snippet( | ||
22 | "Test function", | ||
23 | "\ | ||
24 | #[test] | ||
25 | fn ${1:feature}() { | ||
26 | $0 | ||
27 | }", | ||
28 | ) | ||
29 | .lookup_by("tfn") | ||
30 | .add_to(acc); | ||
31 | |||
32 | snippet("pub(crate)", "pub(crate) $0").add_to(acc); | ||
33 | } | ||
34 | |||
35 | #[cfg(test)] | ||
36 | mod tests { | ||
37 | use crate::completion::{CompletionKind, check_completion}; | ||
38 | fn check_snippet_completion(code: &str, expected_completions: &str) { | ||
39 | check_completion(code, expected_completions, CompletionKind::Snippet); | ||
40 | } | ||
41 | |||
42 | #[test] | ||
43 | fn completes_snippets_in_expressions() { | ||
44 | check_snippet_completion( | ||
45 | r"fn foo(x: i32) { <|> }", | ||
46 | r##" | ||
47 | pd "eprintln!(\"$0 = {:?}\", $0);" | ||
48 | ppd "eprintln!(\"$0 = {:#?}\", $0);" | ||
49 | "##, | ||
50 | ); | ||
51 | } | ||
52 | |||
53 | #[test] | ||
54 | fn completes_snippets_in_items() { | ||
55 | // check_snippet_completion(r" | ||
56 | // <|> | ||
57 | // ", | ||
58 | // r##"[CompletionItem { label: "Test function", lookup: None, snippet: Some("#[test]\nfn test_${1:feature}() {\n$0\n}"##, | ||
59 | // ); | ||
60 | check_snippet_completion( | ||
61 | r" | ||
62 | #[cfg(test)] | ||
63 | mod tests { | ||
64 | <|> | ||
65 | } | ||
66 | ", | ||
67 | r##" | ||
68 | tfn "Test function" "#[test]\nfn ${1:feature}() {\n $0\n}" | ||
69 | pub(crate) "pub(crate) $0" | ||
70 | "##, | ||
71 | ); | ||
72 | } | ||
73 | } | ||
diff --git a/crates/ra_ide_api/src/completion/completion_context.rs b/crates/ra_ide_api/src/completion/completion_context.rs new file mode 100644 index 000000000..01786bb69 --- /dev/null +++ b/crates/ra_ide_api/src/completion/completion_context.rs | |||
@@ -0,0 +1,205 @@ | |||
1 | use ra_text_edit::AtomTextEdit; | ||
2 | use ra_syntax::{ | ||
3 | AstNode, SyntaxNode, SourceFile, TextUnit, TextRange, | ||
4 | ast, | ||
5 | algo::{find_leaf_at_offset, find_covering_node, find_node_at_offset}, | ||
6 | SyntaxKind::*, | ||
7 | }; | ||
8 | use hir::source_binder; | ||
9 | |||
10 | use crate::{db, FilePosition, Cancelable}; | ||
11 | |||
12 | /// `CompletionContext` is created early during completion to figure out, where | ||
13 | /// exactly is the cursor, syntax-wise. | ||
14 | #[derive(Debug)] | ||
15 | pub(super) struct CompletionContext<'a> { | ||
16 | pub(super) db: &'a db::RootDatabase, | ||
17 | pub(super) offset: TextUnit, | ||
18 | pub(super) leaf: &'a SyntaxNode, | ||
19 | pub(super) module: Option<hir::Module>, | ||
20 | pub(super) function: Option<hir::Function>, | ||
21 | pub(super) function_syntax: Option<&'a ast::FnDef>, | ||
22 | pub(super) use_item_syntax: Option<&'a ast::UseItem>, | ||
23 | pub(super) is_param: bool, | ||
24 | /// A single-indent path, like `foo`. | ||
25 | pub(super) is_trivial_path: bool, | ||
26 | /// If not a trivial, path, the prefix (qualifier). | ||
27 | pub(super) path_prefix: Option<hir::Path>, | ||
28 | pub(super) after_if: bool, | ||
29 | /// `true` if we are a statement or a last expr in the block. | ||
30 | pub(super) can_be_stmt: bool, | ||
31 | /// Something is typed at the "top" level, in module or impl/trait. | ||
32 | pub(super) is_new_item: bool, | ||
33 | /// The receiver if this is a field or method access, i.e. writing something.<|> | ||
34 | pub(super) dot_receiver: Option<&'a ast::Expr>, | ||
35 | /// If this is a method call in particular, i.e. the () are already there. | ||
36 | pub(super) is_method_call: bool, | ||
37 | } | ||
38 | |||
39 | impl<'a> CompletionContext<'a> { | ||
40 | pub(super) fn new( | ||
41 | db: &'a db::RootDatabase, | ||
42 | original_file: &'a SourceFile, | ||
43 | position: FilePosition, | ||
44 | ) -> Cancelable<Option<CompletionContext<'a>>> { | ||
45 | let module = source_binder::module_from_position(db, position)?; | ||
46 | let leaf = | ||
47 | ctry!(find_leaf_at_offset(original_file.syntax(), position.offset).left_biased()); | ||
48 | let mut ctx = CompletionContext { | ||
49 | db, | ||
50 | leaf, | ||
51 | offset: position.offset, | ||
52 | module, | ||
53 | function: None, | ||
54 | function_syntax: None, | ||
55 | use_item_syntax: None, | ||
56 | is_param: false, | ||
57 | is_trivial_path: false, | ||
58 | path_prefix: None, | ||
59 | after_if: false, | ||
60 | can_be_stmt: false, | ||
61 | is_new_item: false, | ||
62 | dot_receiver: None, | ||
63 | is_method_call: false, | ||
64 | }; | ||
65 | ctx.fill(original_file, position.offset); | ||
66 | Ok(Some(ctx)) | ||
67 | } | ||
68 | |||
69 | fn fill(&mut self, original_file: &'a SourceFile, offset: TextUnit) { | ||
70 | // Insert a fake ident to get a valid parse tree. We will use this file | ||
71 | // to determine context, though the original_file will be used for | ||
72 | // actual completion. | ||
73 | let file = { | ||
74 | let edit = AtomTextEdit::insert(offset, "intellijRulezz".to_string()); | ||
75 | original_file.reparse(&edit) | ||
76 | }; | ||
77 | |||
78 | // First, let's try to complete a reference to some declaration. | ||
79 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), offset) { | ||
80 | // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`. | ||
81 | // See RFC#1685. | ||
82 | if is_node::<ast::Param>(name_ref.syntax()) { | ||
83 | self.is_param = true; | ||
84 | return; | ||
85 | } | ||
86 | self.classify_name_ref(original_file, name_ref); | ||
87 | } | ||
88 | |||
89 | // Otherwise, see if this is a declaration. We can use heuristics to | ||
90 | // suggest declaration names, see `CompletionKind::Magic`. | ||
91 | if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), offset) { | ||
92 | if is_node::<ast::Param>(name.syntax()) { | ||
93 | self.is_param = true; | ||
94 | return; | ||
95 | } | ||
96 | } | ||
97 | } | ||
98 | fn classify_name_ref(&mut self, original_file: &'a SourceFile, name_ref: &ast::NameRef) { | ||
99 | let name_range = name_ref.syntax().range(); | ||
100 | let top_node = name_ref | ||
101 | .syntax() | ||
102 | .ancestors() | ||
103 | .take_while(|it| it.range() == name_range) | ||
104 | .last() | ||
105 | .unwrap(); | ||
106 | |||
107 | match top_node.parent().map(|it| it.kind()) { | ||
108 | Some(SOURCE_FILE) | Some(ITEM_LIST) => { | ||
109 | self.is_new_item = true; | ||
110 | return; | ||
111 | } | ||
112 | _ => (), | ||
113 | } | ||
114 | |||
115 | self.use_item_syntax = self.leaf.ancestors().find_map(ast::UseItem::cast); | ||
116 | |||
117 | self.function_syntax = self | ||
118 | .leaf | ||
119 | .ancestors() | ||
120 | .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) | ||
121 | .find_map(ast::FnDef::cast); | ||
122 | match (&self.module, self.function_syntax) { | ||
123 | (Some(module), Some(fn_def)) => { | ||
124 | let function = source_binder::function_from_module(self.db, module, fn_def); | ||
125 | self.function = Some(function); | ||
126 | } | ||
127 | _ => (), | ||
128 | } | ||
129 | |||
130 | let parent = match name_ref.syntax().parent() { | ||
131 | Some(it) => it, | ||
132 | None => return, | ||
133 | }; | ||
134 | if let Some(segment) = ast::PathSegment::cast(parent) { | ||
135 | let path = segment.parent_path(); | ||
136 | if let Some(mut path) = hir::Path::from_ast(path) { | ||
137 | if !path.is_ident() { | ||
138 | path.segments.pop().unwrap(); | ||
139 | self.path_prefix = Some(path); | ||
140 | return; | ||
141 | } | ||
142 | } | ||
143 | if path.qualifier().is_none() { | ||
144 | self.is_trivial_path = true; | ||
145 | |||
146 | // Find either enclosing expr statement (thing with `;`) or a | ||
147 | // block. If block, check that we are the last expr. | ||
148 | self.can_be_stmt = name_ref | ||
149 | .syntax() | ||
150 | .ancestors() | ||
151 | .find_map(|node| { | ||
152 | if let Some(stmt) = ast::ExprStmt::cast(node) { | ||
153 | return Some(stmt.syntax().range() == name_ref.syntax().range()); | ||
154 | } | ||
155 | if let Some(block) = ast::Block::cast(node) { | ||
156 | return Some( | ||
157 | block.expr().map(|e| e.syntax().range()) | ||
158 | == Some(name_ref.syntax().range()), | ||
159 | ); | ||
160 | } | ||
161 | None | ||
162 | }) | ||
163 | .unwrap_or(false); | ||
164 | |||
165 | if let Some(off) = name_ref.syntax().range().start().checked_sub(2.into()) { | ||
166 | if let Some(if_expr) = | ||
167 | find_node_at_offset::<ast::IfExpr>(original_file.syntax(), off) | ||
168 | { | ||
169 | if if_expr.syntax().range().end() < name_ref.syntax().range().start() { | ||
170 | self.after_if = true; | ||
171 | } | ||
172 | } | ||
173 | } | ||
174 | } | ||
175 | } | ||
176 | if let Some(field_expr) = ast::FieldExpr::cast(parent) { | ||
177 | // The receiver comes before the point of insertion of the fake | ||
178 | // ident, so it should have the same range in the non-modified file | ||
179 | self.dot_receiver = field_expr | ||
180 | .expr() | ||
181 | .map(|e| e.syntax().range()) | ||
182 | .and_then(|r| find_node_with_range(original_file.syntax(), r)); | ||
183 | } | ||
184 | if let Some(method_call_expr) = ast::MethodCallExpr::cast(parent) { | ||
185 | // As above | ||
186 | self.dot_receiver = method_call_expr | ||
187 | .expr() | ||
188 | .map(|e| e.syntax().range()) | ||
189 | .and_then(|r| find_node_with_range(original_file.syntax(), r)); | ||
190 | self.is_method_call = true; | ||
191 | } | ||
192 | } | ||
193 | } | ||
194 | |||
195 | fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<&N> { | ||
196 | let node = find_covering_node(syntax, range); | ||
197 | node.ancestors().find_map(N::cast) | ||
198 | } | ||
199 | |||
200 | fn is_node<N: AstNode>(node: &SyntaxNode) -> bool { | ||
201 | match node.ancestors().filter_map(N::cast).next() { | ||
202 | None => false, | ||
203 | Some(n) => n.syntax().range() == node.range(), | ||
204 | } | ||
205 | } | ||
diff --git a/crates/ra_ide_api/src/completion/completion_item.rs b/crates/ra_ide_api/src/completion/completion_item.rs new file mode 100644 index 000000000..a25b87bee --- /dev/null +++ b/crates/ra_ide_api/src/completion/completion_item.rs | |||
@@ -0,0 +1,244 @@ | |||
1 | use hir::PerNs; | ||
2 | |||
3 | use crate::completion::CompletionContext; | ||
4 | |||
5 | /// `CompletionItem` describes a single completion variant in the editor pop-up. | ||
6 | /// It is basically a POD with various properties. To construct a | ||
7 | /// `CompletionItem`, use `new` method and the `Builder` struct. | ||
8 | #[derive(Debug)] | ||
9 | pub struct CompletionItem { | ||
10 | /// Used only internally in tests, to check only specific kind of | ||
11 | /// completion. | ||
12 | completion_kind: CompletionKind, | ||
13 | label: String, | ||
14 | lookup: Option<String>, | ||
15 | snippet: Option<String>, | ||
16 | kind: Option<CompletionItemKind>, | ||
17 | } | ||
18 | |||
19 | pub enum InsertText { | ||
20 | PlainText { text: String }, | ||
21 | Snippet { text: String }, | ||
22 | } | ||
23 | |||
24 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] | ||
25 | pub enum CompletionItemKind { | ||
26 | Snippet, | ||
27 | Keyword, | ||
28 | Module, | ||
29 | Function, | ||
30 | Struct, | ||
31 | Enum, | ||
32 | EnumVariant, | ||
33 | Binding, | ||
34 | Field, | ||
35 | } | ||
36 | |||
37 | #[derive(Debug, PartialEq, Eq)] | ||
38 | pub(crate) enum CompletionKind { | ||
39 | /// Parser-based keyword completion. | ||
40 | Keyword, | ||
41 | /// Your usual "complete all valid identifiers". | ||
42 | Reference, | ||
43 | /// "Secret sauce" completions. | ||
44 | Magic, | ||
45 | Snippet, | ||
46 | } | ||
47 | |||
48 | impl CompletionItem { | ||
49 | pub(crate) fn new(completion_kind: CompletionKind, label: impl Into<String>) -> Builder { | ||
50 | let label = label.into(); | ||
51 | Builder { | ||
52 | completion_kind, | ||
53 | label, | ||
54 | lookup: None, | ||
55 | snippet: None, | ||
56 | kind: None, | ||
57 | } | ||
58 | } | ||
59 | /// What user sees in pop-up in the UI. | ||
60 | pub fn label(&self) -> &str { | ||
61 | &self.label | ||
62 | } | ||
63 | /// What string is used for filtering. | ||
64 | pub fn lookup(&self) -> &str { | ||
65 | self.lookup | ||
66 | .as_ref() | ||
67 | .map(|it| it.as_str()) | ||
68 | .unwrap_or(self.label()) | ||
69 | } | ||
70 | /// What is inserted. | ||
71 | pub fn insert_text(&self) -> InsertText { | ||
72 | match &self.snippet { | ||
73 | None => InsertText::PlainText { | ||
74 | text: self.label.clone(), | ||
75 | }, | ||
76 | Some(it) => InsertText::Snippet { text: it.clone() }, | ||
77 | } | ||
78 | } | ||
79 | |||
80 | pub fn kind(&self) -> Option<CompletionItemKind> { | ||
81 | self.kind | ||
82 | } | ||
83 | } | ||
84 | |||
85 | /// A helper to make `CompletionItem`s. | ||
86 | #[must_use] | ||
87 | pub(crate) struct Builder { | ||
88 | completion_kind: CompletionKind, | ||
89 | label: String, | ||
90 | lookup: Option<String>, | ||
91 | snippet: Option<String>, | ||
92 | kind: Option<CompletionItemKind>, | ||
93 | } | ||
94 | |||
95 | impl Builder { | ||
96 | pub(crate) fn add_to(self, acc: &mut Completions) { | ||
97 | acc.add(self.build()) | ||
98 | } | ||
99 | |||
100 | pub(crate) fn build(self) -> CompletionItem { | ||
101 | CompletionItem { | ||
102 | label: self.label, | ||
103 | lookup: self.lookup, | ||
104 | snippet: self.snippet, | ||
105 | kind: self.kind, | ||
106 | completion_kind: self.completion_kind, | ||
107 | } | ||
108 | } | ||
109 | pub(crate) fn lookup_by(mut self, lookup: impl Into<String>) -> Builder { | ||
110 | self.lookup = Some(lookup.into()); | ||
111 | self | ||
112 | } | ||
113 | pub(crate) fn snippet(mut self, snippet: impl Into<String>) -> Builder { | ||
114 | self.snippet = Some(snippet.into()); | ||
115 | self | ||
116 | } | ||
117 | pub(crate) fn kind(mut self, kind: CompletionItemKind) -> Builder { | ||
118 | self.kind = Some(kind); | ||
119 | self | ||
120 | } | ||
121 | pub(super) fn from_resolution( | ||
122 | mut self, | ||
123 | ctx: &CompletionContext, | ||
124 | resolution: &hir::Resolution, | ||
125 | ) -> Builder { | ||
126 | let resolved = resolution.def_id.and_then(|d| d.resolve(ctx.db).ok()); | ||
127 | let kind = match resolved { | ||
128 | PerNs { | ||
129 | types: Some(hir::Def::Module(..)), | ||
130 | .. | ||
131 | } => CompletionItemKind::Module, | ||
132 | PerNs { | ||
133 | types: Some(hir::Def::Struct(..)), | ||
134 | .. | ||
135 | } => CompletionItemKind::Struct, | ||
136 | PerNs { | ||
137 | types: Some(hir::Def::Enum(..)), | ||
138 | .. | ||
139 | } => CompletionItemKind::Enum, | ||
140 | PerNs { | ||
141 | values: Some(hir::Def::Function(function)), | ||
142 | .. | ||
143 | } => return self.from_function(ctx, function), | ||
144 | _ => return self, | ||
145 | }; | ||
146 | self.kind = Some(kind); | ||
147 | self | ||
148 | } | ||
149 | |||
150 | fn from_function(mut self, ctx: &CompletionContext, function: hir::Function) -> Builder { | ||
151 | // If not an import, add parenthesis automatically. | ||
152 | if ctx.use_item_syntax.is_none() { | ||
153 | if function.signature(ctx.db).args().is_empty() { | ||
154 | self.snippet = Some(format!("{}()$0", self.label)); | ||
155 | } else { | ||
156 | self.snippet = Some(format!("{}($0)", self.label)); | ||
157 | } | ||
158 | } | ||
159 | self.kind = Some(CompletionItemKind::Function); | ||
160 | self | ||
161 | } | ||
162 | } | ||
163 | |||
164 | impl Into<CompletionItem> for Builder { | ||
165 | fn into(self) -> CompletionItem { | ||
166 | self.build() | ||
167 | } | ||
168 | } | ||
169 | |||
170 | /// Represents an in-progress set of completions being built. | ||
171 | #[derive(Debug, Default)] | ||
172 | pub(crate) struct Completions { | ||
173 | buf: Vec<CompletionItem>, | ||
174 | } | ||
175 | |||
176 | impl Completions { | ||
177 | pub(crate) fn add(&mut self, item: impl Into<CompletionItem>) { | ||
178 | self.buf.push(item.into()) | ||
179 | } | ||
180 | pub(crate) fn add_all<I>(&mut self, items: I) | ||
181 | where | ||
182 | I: IntoIterator, | ||
183 | I::Item: Into<CompletionItem>, | ||
184 | { | ||
185 | items.into_iter().for_each(|item| self.add(item.into())) | ||
186 | } | ||
187 | |||
188 | #[cfg(test)] | ||
189 | pub(crate) fn assert_match(&self, expected: &str, kind: CompletionKind) { | ||
190 | let expected = normalize(expected); | ||
191 | let actual = self.debug_render(kind); | ||
192 | test_utils::assert_eq_text!(expected.as_str(), actual.as_str(),); | ||
193 | |||
194 | /// Normalize the textual representation of `Completions`: | ||
195 | /// replace `;` with newlines, normalize whitespace | ||
196 | fn normalize(expected: &str) -> String { | ||
197 | use ra_syntax::{tokenize, TextUnit, TextRange, SyntaxKind::SEMI}; | ||
198 | let mut res = String::new(); | ||
199 | for line in expected.trim().lines() { | ||
200 | let line = line.trim(); | ||
201 | let mut start_offset: TextUnit = 0.into(); | ||
202 | // Yep, we use rust tokenize in completion tests :-) | ||
203 | for token in tokenize(line) { | ||
204 | let range = TextRange::offset_len(start_offset, token.len); | ||
205 | start_offset += token.len; | ||
206 | if token.kind == SEMI { | ||
207 | res.push('\n'); | ||
208 | } else { | ||
209 | res.push_str(&line[range]); | ||
210 | } | ||
211 | } | ||
212 | |||
213 | res.push('\n'); | ||
214 | } | ||
215 | res | ||
216 | } | ||
217 | } | ||
218 | |||
219 | #[cfg(test)] | ||
220 | fn debug_render(&self, kind: CompletionKind) -> String { | ||
221 | let mut res = String::new(); | ||
222 | for c in self.buf.iter() { | ||
223 | if c.completion_kind == kind { | ||
224 | if let Some(lookup) = &c.lookup { | ||
225 | res.push_str(lookup); | ||
226 | res.push_str(&format!(" {:?}", c.label)); | ||
227 | } else { | ||
228 | res.push_str(&c.label); | ||
229 | } | ||
230 | if let Some(snippet) = &c.snippet { | ||
231 | res.push_str(&format!(" {:?}", snippet)); | ||
232 | } | ||
233 | res.push('\n'); | ||
234 | } | ||
235 | } | ||
236 | res | ||
237 | } | ||
238 | } | ||
239 | |||
240 | impl Into<Vec<CompletionItem>> for Completions { | ||
241 | fn into(self) -> Vec<CompletionItem> { | ||
242 | self.buf | ||
243 | } | ||
244 | } | ||
diff --git a/crates/ra_ide_api/src/db.rs b/crates/ra_ide_api/src/db.rs new file mode 100644 index 000000000..9d46609ec --- /dev/null +++ b/crates/ra_ide_api/src/db.rs | |||
@@ -0,0 +1,128 @@ | |||
1 | use std::{fmt, sync::Arc}; | ||
2 | |||
3 | use salsa::{self, Database}; | ||
4 | use ra_db::{LocationIntener, BaseDatabase, FileId}; | ||
5 | |||
6 | use crate::{symbol_index, LineIndex}; | ||
7 | |||
8 | #[derive(Debug)] | ||
9 | pub(crate) struct RootDatabase { | ||
10 | runtime: salsa::Runtime<RootDatabase>, | ||
11 | id_maps: Arc<IdMaps>, | ||
12 | } | ||
13 | |||
14 | #[derive(Default)] | ||
15 | struct IdMaps { | ||
16 | defs: LocationIntener<hir::DefLoc, hir::DefId>, | ||
17 | macros: LocationIntener<hir::MacroCallLoc, hir::MacroCallId>, | ||
18 | } | ||
19 | |||
20 | impl fmt::Debug for IdMaps { | ||
21 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
22 | f.debug_struct("IdMaps") | ||
23 | .field("n_defs", &self.defs.len()) | ||
24 | .finish() | ||
25 | } | ||
26 | } | ||
27 | |||
28 | impl salsa::Database for RootDatabase { | ||
29 | fn salsa_runtime(&self) -> &salsa::Runtime<RootDatabase> { | ||
30 | &self.runtime | ||
31 | } | ||
32 | } | ||
33 | |||
34 | impl Default for RootDatabase { | ||
35 | fn default() -> RootDatabase { | ||
36 | let mut db = RootDatabase { | ||
37 | runtime: salsa::Runtime::default(), | ||
38 | id_maps: Default::default(), | ||
39 | }; | ||
40 | db.query_mut(ra_db::CrateGraphQuery) | ||
41 | .set((), Default::default()); | ||
42 | db.query_mut(ra_db::LocalRootsQuery) | ||
43 | .set((), Default::default()); | ||
44 | db.query_mut(ra_db::LibraryRootsQuery) | ||
45 | .set((), Default::default()); | ||
46 | db | ||
47 | } | ||
48 | } | ||
49 | |||
50 | impl salsa::ParallelDatabase for RootDatabase { | ||
51 | fn snapshot(&self) -> salsa::Snapshot<RootDatabase> { | ||
52 | salsa::Snapshot::new(RootDatabase { | ||
53 | runtime: self.runtime.snapshot(self), | ||
54 | id_maps: self.id_maps.clone(), | ||
55 | }) | ||
56 | } | ||
57 | } | ||
58 | |||
59 | impl BaseDatabase for RootDatabase {} | ||
60 | |||
61 | impl AsRef<LocationIntener<hir::DefLoc, hir::DefId>> for RootDatabase { | ||
62 | fn as_ref(&self) -> &LocationIntener<hir::DefLoc, hir::DefId> { | ||
63 | &self.id_maps.defs | ||
64 | } | ||
65 | } | ||
66 | |||
67 | impl AsRef<LocationIntener<hir::MacroCallLoc, hir::MacroCallId>> for RootDatabase { | ||
68 | fn as_ref(&self) -> &LocationIntener<hir::MacroCallLoc, hir::MacroCallId> { | ||
69 | &self.id_maps.macros | ||
70 | } | ||
71 | } | ||
72 | |||
73 | salsa::query_group! { | ||
74 | pub(crate) trait LineIndexDatabase: ra_db::FilesDatabase + BaseDatabase { | ||
75 | fn line_index(file_id: FileId) -> Arc<LineIndex> { | ||
76 | type LineIndexQuery; | ||
77 | } | ||
78 | } | ||
79 | } | ||
80 | |||
81 | fn line_index(db: &impl ra_db::FilesDatabase, file_id: FileId) -> Arc<LineIndex> { | ||
82 | let text = db.file_text(file_id); | ||
83 | Arc::new(LineIndex::new(&*text)) | ||
84 | } | ||
85 | |||
86 | salsa::database_storage! { | ||
87 | pub(crate) struct RootDatabaseStorage for RootDatabase { | ||
88 | impl ra_db::FilesDatabase { | ||
89 | fn file_text() for ra_db::FileTextQuery; | ||
90 | fn file_relative_path() for ra_db::FileRelativePathQuery; | ||
91 | fn file_source_root() for ra_db::FileSourceRootQuery; | ||
92 | fn source_root() for ra_db::SourceRootQuery; | ||
93 | fn local_roots() for ra_db::LocalRootsQuery; | ||
94 | fn library_roots() for ra_db::LibraryRootsQuery; | ||
95 | fn crate_graph() for ra_db::CrateGraphQuery; | ||
96 | } | ||
97 | impl ra_db::SyntaxDatabase { | ||
98 | fn source_file() for ra_db::SourceFileQuery; | ||
99 | } | ||
100 | impl LineIndexDatabase { | ||
101 | fn line_index() for LineIndexQuery; | ||
102 | } | ||
103 | impl symbol_index::SymbolsDatabase { | ||
104 | fn file_symbols() for symbol_index::FileSymbolsQuery; | ||
105 | fn library_symbols() for symbol_index::LibrarySymbolsQuery; | ||
106 | } | ||
107 | impl hir::db::HirDatabase { | ||
108 | fn hir_source_file() for hir::db::HirSourceFileQuery; | ||
109 | fn expand_macro_invocation() for hir::db::ExpandMacroCallQuery; | ||
110 | fn module_tree() for hir::db::ModuleTreeQuery; | ||
111 | fn fn_scopes() for hir::db::FnScopesQuery; | ||
112 | fn file_items() for hir::db::SourceFileItemsQuery; | ||
113 | fn file_item() for hir::db::FileItemQuery; | ||
114 | fn input_module_items() for hir::db::InputModuleItemsQuery; | ||
115 | fn item_map() for hir::db::ItemMapQuery; | ||
116 | fn submodules() for hir::db::SubmodulesQuery; | ||
117 | fn infer() for hir::db::InferQuery; | ||
118 | fn type_for_def() for hir::db::TypeForDefQuery; | ||
119 | fn type_for_field() for hir::db::TypeForFieldQuery; | ||
120 | fn struct_data() for hir::db::StructDataQuery; | ||
121 | fn enum_data() for hir::db::EnumDataQuery; | ||
122 | fn impls_in_module() for hir::db::ImplsInModuleQuery; | ||
123 | fn body_hir() for hir::db::BodyHirQuery; | ||
124 | fn body_syntax_mapping() for hir::db::BodySyntaxMappingQuery; | ||
125 | fn fn_signature() for hir::db::FnSignatureQuery; | ||
126 | } | ||
127 | } | ||
128 | } | ||
diff --git a/crates/ra_ide_api/src/extend_selection.rs b/crates/ra_ide_api/src/extend_selection.rs new file mode 100644 index 000000000..c3c809c9f --- /dev/null +++ b/crates/ra_ide_api/src/extend_selection.rs | |||
@@ -0,0 +1,56 @@ | |||
1 | use ra_db::SyntaxDatabase; | ||
2 | use ra_syntax::{ | ||
3 | SyntaxNode, AstNode, SourceFile, | ||
4 | ast, algo::find_covering_node, | ||
5 | }; | ||
6 | |||
7 | use crate::{ | ||
8 | TextRange, FileRange, | ||
9 | db::RootDatabase, | ||
10 | }; | ||
11 | |||
12 | pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { | ||
13 | let source_file = db.source_file(frange.file_id); | ||
14 | if let Some(range) = extend_selection_in_macro(db, &source_file, frange) { | ||
15 | return range; | ||
16 | } | ||
17 | ra_ide_api_light::extend_selection(source_file.syntax(), frange.range).unwrap_or(frange.range) | ||
18 | } | ||
19 | |||
20 | fn extend_selection_in_macro( | ||
21 | _db: &RootDatabase, | ||
22 | source_file: &SourceFile, | ||
23 | frange: FileRange, | ||
24 | ) -> Option<TextRange> { | ||
25 | let macro_call = find_macro_call(source_file.syntax(), frange.range)?; | ||
26 | let (off, exp) = hir::MacroDef::ast_expand(macro_call)?; | ||
27 | let dst_range = exp.map_range_forward(frange.range - off)?; | ||
28 | let dst_range = ra_ide_api_light::extend_selection(&exp.syntax(), dst_range)?; | ||
29 | let src_range = exp.map_range_back(dst_range)? + off; | ||
30 | Some(src_range) | ||
31 | } | ||
32 | |||
33 | fn find_macro_call(node: &SyntaxNode, range: TextRange) -> Option<&ast::MacroCall> { | ||
34 | find_covering_node(node, range) | ||
35 | .ancestors() | ||
36 | .find_map(ast::MacroCall::cast) | ||
37 | } | ||
38 | |||
39 | #[cfg(test)] | ||
40 | mod tests { | ||
41 | use crate::mock_analysis::single_file_with_range; | ||
42 | use test_utils::assert_eq_dbg; | ||
43 | |||
44 | #[test] | ||
45 | fn extend_selection_inside_macros() { | ||
46 | let (analysis, frange) = single_file_with_range( | ||
47 | " | ||
48 | fn main() { | ||
49 | ctry!(foo(|x| <|>x<|>)); | ||
50 | } | ||
51 | ", | ||
52 | ); | ||
53 | let r = analysis.extend_selection(frange); | ||
54 | assert_eq_dbg("[51; 56)", &r); | ||
55 | } | ||
56 | } | ||
diff --git a/crates/ra_ide_api/src/goto_defenition.rs b/crates/ra_ide_api/src/goto_defenition.rs new file mode 100644 index 000000000..fcd8d315e --- /dev/null +++ b/crates/ra_ide_api/src/goto_defenition.rs | |||
@@ -0,0 +1,139 @@ | |||
1 | use ra_db::{FileId, Cancelable, SyntaxDatabase}; | ||
2 | use ra_syntax::{ | ||
3 | TextRange, AstNode, ast, SyntaxKind::{NAME, MODULE}, | ||
4 | algo::find_node_at_offset, | ||
5 | }; | ||
6 | |||
7 | use crate::{FilePosition, NavigationTarget, db::RootDatabase}; | ||
8 | |||
9 | pub(crate) fn goto_defenition( | ||
10 | db: &RootDatabase, | ||
11 | position: FilePosition, | ||
12 | ) -> Cancelable<Option<Vec<NavigationTarget>>> { | ||
13 | let file = db.source_file(position.file_id); | ||
14 | let syntax = file.syntax(); | ||
15 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) { | ||
16 | return Ok(Some(reference_defenition(db, position.file_id, name_ref)?)); | ||
17 | } | ||
18 | if let Some(name) = find_node_at_offset::<ast::Name>(syntax, position.offset) { | ||
19 | return name_defenition(db, position.file_id, name); | ||
20 | } | ||
21 | Ok(None) | ||
22 | } | ||
23 | |||
24 | pub(crate) fn reference_defenition( | ||
25 | db: &RootDatabase, | ||
26 | file_id: FileId, | ||
27 | name_ref: &ast::NameRef, | ||
28 | ) -> Cancelable<Vec<NavigationTarget>> { | ||
29 | if let Some(fn_descr) = | ||
30 | hir::source_binder::function_from_child_node(db, file_id, name_ref.syntax())? | ||
31 | { | ||
32 | let scope = fn_descr.scopes(db)?; | ||
33 | // First try to resolve the symbol locally | ||
34 | if let Some(entry) = scope.resolve_local_name(name_ref) { | ||
35 | let nav = NavigationTarget { | ||
36 | file_id, | ||
37 | name: entry.name().to_string().into(), | ||
38 | range: entry.ptr().range(), | ||
39 | kind: NAME, | ||
40 | ptr: None, | ||
41 | }; | ||
42 | return Ok(vec![nav]); | ||
43 | }; | ||
44 | } | ||
45 | // If that fails try the index based approach. | ||
46 | let navs = db | ||
47 | .index_resolve(name_ref)? | ||
48 | .into_iter() | ||
49 | .map(NavigationTarget::from_symbol) | ||
50 | .collect(); | ||
51 | Ok(navs) | ||
52 | } | ||
53 | |||
54 | fn name_defenition( | ||
55 | db: &RootDatabase, | ||
56 | file_id: FileId, | ||
57 | name: &ast::Name, | ||
58 | ) -> Cancelable<Option<Vec<NavigationTarget>>> { | ||
59 | if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) { | ||
60 | if module.has_semi() { | ||
61 | if let Some(child_module) = | ||
62 | hir::source_binder::module_from_declaration(db, file_id, module)? | ||
63 | { | ||
64 | let (file_id, _) = child_module.defenition_source(db)?; | ||
65 | let name = match child_module.name(db)? { | ||
66 | Some(name) => name.to_string().into(), | ||
67 | None => "".into(), | ||
68 | }; | ||
69 | let nav = NavigationTarget { | ||
70 | file_id, | ||
71 | name, | ||
72 | range: TextRange::offset_len(0.into(), 0.into()), | ||
73 | kind: MODULE, | ||
74 | ptr: None, | ||
75 | }; | ||
76 | return Ok(Some(vec![nav])); | ||
77 | } | ||
78 | } | ||
79 | } | ||
80 | Ok(None) | ||
81 | } | ||
82 | |||
83 | #[cfg(test)] | ||
84 | mod tests { | ||
85 | use test_utils::assert_eq_dbg; | ||
86 | use crate::mock_analysis::analysis_and_position; | ||
87 | |||
88 | #[test] | ||
89 | fn goto_defenition_works_in_items() { | ||
90 | let (analysis, pos) = analysis_and_position( | ||
91 | " | ||
92 | //- /lib.rs | ||
93 | struct Foo; | ||
94 | enum E { X(Foo<|>) } | ||
95 | ", | ||
96 | ); | ||
97 | |||
98 | let symbols = analysis.goto_defenition(pos).unwrap().unwrap(); | ||
99 | assert_eq_dbg( | ||
100 | r#"[NavigationTarget { file_id: FileId(1), name: "Foo", | ||
101 | kind: STRUCT_DEF, range: [0; 11), | ||
102 | ptr: Some(LocalSyntaxPtr { range: [0; 11), kind: STRUCT_DEF }) }]"#, | ||
103 | &symbols, | ||
104 | ); | ||
105 | } | ||
106 | |||
107 | #[test] | ||
108 | fn goto_defenition_works_for_module_declaration() { | ||
109 | let (analysis, pos) = analysis_and_position( | ||
110 | " | ||
111 | //- /lib.rs | ||
112 | mod <|>foo; | ||
113 | //- /foo.rs | ||
114 | // empty | ||
115 | ", | ||
116 | ); | ||
117 | |||
118 | let symbols = analysis.goto_defenition(pos).unwrap().unwrap(); | ||
119 | assert_eq_dbg( | ||
120 | r#"[NavigationTarget { file_id: FileId(2), name: "foo", kind: MODULE, range: [0; 0), ptr: None }]"#, | ||
121 | &symbols, | ||
122 | ); | ||
123 | |||
124 | let (analysis, pos) = analysis_and_position( | ||
125 | " | ||
126 | //- /lib.rs | ||
127 | mod <|>foo; | ||
128 | //- /foo/mod.rs | ||
129 | // empty | ||
130 | ", | ||
131 | ); | ||
132 | |||
133 | let symbols = analysis.goto_defenition(pos).unwrap().unwrap(); | ||
134 | assert_eq_dbg( | ||
135 | r#"[NavigationTarget { file_id: FileId(2), name: "foo", kind: MODULE, range: [0; 0), ptr: None }]"#, | ||
136 | &symbols, | ||
137 | ); | ||
138 | } | ||
139 | } | ||
diff --git a/crates/ra_ide_api/src/hover.rs b/crates/ra_ide_api/src/hover.rs new file mode 100644 index 000000000..475524ee1 --- /dev/null +++ b/crates/ra_ide_api/src/hover.rs | |||
@@ -0,0 +1,257 @@ | |||
1 | use ra_db::{Cancelable, SyntaxDatabase}; | ||
2 | use ra_syntax::{ | ||
3 | AstNode, SyntaxNode, TreePtr, | ||
4 | ast::{self, NameOwner}, | ||
5 | algo::{find_covering_node, find_node_at_offset, find_leaf_at_offset, visit::{visitor, Visitor}}, | ||
6 | }; | ||
7 | |||
8 | use crate::{db::RootDatabase, RangeInfo, FilePosition, FileRange, NavigationTarget}; | ||
9 | |||
10 | pub(crate) fn hover( | ||
11 | db: &RootDatabase, | ||
12 | position: FilePosition, | ||
13 | ) -> Cancelable<Option<RangeInfo<String>>> { | ||
14 | let file = db.source_file(position.file_id); | ||
15 | let mut res = Vec::new(); | ||
16 | |||
17 | let mut range = None; | ||
18 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), position.offset) { | ||
19 | let navs = crate::goto_defenition::reference_defenition(db, position.file_id, name_ref)?; | ||
20 | for nav in navs { | ||
21 | res.extend(doc_text_for(db, nav)?) | ||
22 | } | ||
23 | if !res.is_empty() { | ||
24 | range = Some(name_ref.syntax().range()) | ||
25 | } | ||
26 | } | ||
27 | if range.is_none() { | ||
28 | let node = find_leaf_at_offset(file.syntax(), position.offset).find_map(|leaf| { | ||
29 | leaf.ancestors() | ||
30 | .find(|n| ast::Expr::cast(*n).is_some() || ast::Pat::cast(*n).is_some()) | ||
31 | }); | ||
32 | let node = ctry!(node); | ||
33 | let frange = FileRange { | ||
34 | file_id: position.file_id, | ||
35 | range: node.range(), | ||
36 | }; | ||
37 | res.extend(type_of(db, frange)?); | ||
38 | range = Some(node.range()); | ||
39 | }; | ||
40 | |||
41 | let range = ctry!(range); | ||
42 | if res.is_empty() { | ||
43 | return Ok(None); | ||
44 | } | ||
45 | let res = RangeInfo::new(range, res.join("\n\n---\n")); | ||
46 | Ok(Some(res)) | ||
47 | } | ||
48 | |||
49 | pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Cancelable<Option<String>> { | ||
50 | let file = db.source_file(frange.file_id); | ||
51 | let syntax = file.syntax(); | ||
52 | let leaf_node = find_covering_node(syntax, frange.range); | ||
53 | // if we picked identifier, expand to pattern/expression | ||
54 | let node = leaf_node | ||
55 | .ancestors() | ||
56 | .take_while(|it| it.range() == leaf_node.range()) | ||
57 | .find(|&it| ast::Expr::cast(it).is_some() || ast::Pat::cast(it).is_some()) | ||
58 | .unwrap_or(leaf_node); | ||
59 | let parent_fn = ctry!(node.ancestors().find_map(ast::FnDef::cast)); | ||
60 | let function = ctry!(hir::source_binder::function_from_source( | ||
61 | db, | ||
62 | frange.file_id, | ||
63 | parent_fn | ||
64 | )?); | ||
65 | let infer = function.infer(db)?; | ||
66 | let syntax_mapping = function.body_syntax_mapping(db)?; | ||
67 | if let Some(expr) = ast::Expr::cast(node).and_then(|e| syntax_mapping.node_expr(e)) { | ||
68 | Ok(Some(infer[expr].to_string())) | ||
69 | } else if let Some(pat) = ast::Pat::cast(node).and_then(|p| syntax_mapping.node_pat(p)) { | ||
70 | Ok(Some(infer[pat].to_string())) | ||
71 | } else { | ||
72 | Ok(None) | ||
73 | } | ||
74 | } | ||
75 | |||
76 | // FIXME: this should not really use navigation target. Rather, approximatelly | ||
77 | // resovled symbol should return a `DefId`. | ||
78 | fn doc_text_for(db: &RootDatabase, nav: NavigationTarget) -> Cancelable<Option<String>> { | ||
79 | let result = match (nav.description(db), nav.docs(db)) { | ||
80 | (Some(desc), Some(docs)) => Some("```rust\n".to_string() + &*desc + "\n```\n\n" + &*docs), | ||
81 | (Some(desc), None) => Some("```rust\n".to_string() + &*desc + "\n```"), | ||
82 | (None, Some(docs)) => Some(docs), | ||
83 | _ => None, | ||
84 | }; | ||
85 | |||
86 | Ok(result) | ||
87 | } | ||
88 | |||
89 | impl NavigationTarget { | ||
90 | fn node(&self, db: &RootDatabase) -> Option<TreePtr<SyntaxNode>> { | ||
91 | let source_file = db.source_file(self.file_id); | ||
92 | let source_file = source_file.syntax(); | ||
93 | let node = source_file | ||
94 | .descendants() | ||
95 | .find(|node| node.kind() == self.kind && node.range() == self.range)? | ||
96 | .to_owned(); | ||
97 | Some(node) | ||
98 | } | ||
99 | |||
100 | fn docs(&self, db: &RootDatabase) -> Option<String> { | ||
101 | let node = self.node(db)?; | ||
102 | fn doc_comments<N: ast::DocCommentsOwner>(node: &N) -> Option<String> { | ||
103 | let comments = node.doc_comment_text(); | ||
104 | if comments.is_empty() { | ||
105 | None | ||
106 | } else { | ||
107 | Some(comments) | ||
108 | } | ||
109 | } | ||
110 | |||
111 | visitor() | ||
112 | .visit(doc_comments::<ast::FnDef>) | ||
113 | .visit(doc_comments::<ast::StructDef>) | ||
114 | .visit(doc_comments::<ast::EnumDef>) | ||
115 | .visit(doc_comments::<ast::TraitDef>) | ||
116 | .visit(doc_comments::<ast::Module>) | ||
117 | .visit(doc_comments::<ast::TypeDef>) | ||
118 | .visit(doc_comments::<ast::ConstDef>) | ||
119 | .visit(doc_comments::<ast::StaticDef>) | ||
120 | .accept(&node)? | ||
121 | } | ||
122 | |||
123 | /// Get a description of this node. | ||
124 | /// | ||
125 | /// e.g. `struct Name`, `enum Name`, `fn Name` | ||
126 | fn description(&self, db: &RootDatabase) -> Option<String> { | ||
127 | // TODO: After type inference is done, add type information to improve the output | ||
128 | let node = self.node(db)?; | ||
129 | // TODO: Refactor to be have less repetition | ||
130 | visitor() | ||
131 | .visit(|node: &ast::FnDef| { | ||
132 | let mut string = "fn ".to_string(); | ||
133 | node.name()?.syntax().text().push_to(&mut string); | ||
134 | Some(string) | ||
135 | }) | ||
136 | .visit(|node: &ast::StructDef| { | ||
137 | let mut string = "struct ".to_string(); | ||
138 | node.name()?.syntax().text().push_to(&mut string); | ||
139 | Some(string) | ||
140 | }) | ||
141 | .visit(|node: &ast::EnumDef| { | ||
142 | let mut string = "enum ".to_string(); | ||
143 | node.name()?.syntax().text().push_to(&mut string); | ||
144 | Some(string) | ||
145 | }) | ||
146 | .visit(|node: &ast::TraitDef| { | ||
147 | let mut string = "trait ".to_string(); | ||
148 | node.name()?.syntax().text().push_to(&mut string); | ||
149 | Some(string) | ||
150 | }) | ||
151 | .visit(|node: &ast::Module| { | ||
152 | let mut string = "mod ".to_string(); | ||
153 | node.name()?.syntax().text().push_to(&mut string); | ||
154 | Some(string) | ||
155 | }) | ||
156 | .visit(|node: &ast::TypeDef| { | ||
157 | let mut string = "type ".to_string(); | ||
158 | node.name()?.syntax().text().push_to(&mut string); | ||
159 | Some(string) | ||
160 | }) | ||
161 | .visit(|node: &ast::ConstDef| { | ||
162 | let mut string = "const ".to_string(); | ||
163 | node.name()?.syntax().text().push_to(&mut string); | ||
164 | Some(string) | ||
165 | }) | ||
166 | .visit(|node: &ast::StaticDef| { | ||
167 | let mut string = "static ".to_string(); | ||
168 | node.name()?.syntax().text().push_to(&mut string); | ||
169 | Some(string) | ||
170 | }) | ||
171 | .accept(&node)? | ||
172 | } | ||
173 | } | ||
174 | |||
175 | #[cfg(test)] | ||
176 | mod tests { | ||
177 | use ra_syntax::TextRange; | ||
178 | use crate::mock_analysis::{single_file_with_position, single_file_with_range}; | ||
179 | |||
180 | #[test] | ||
181 | fn hover_shows_type_of_an_expression() { | ||
182 | let (analysis, position) = single_file_with_position( | ||
183 | " | ||
184 | pub fn foo() -> u32 { 1 } | ||
185 | |||
186 | fn main() { | ||
187 | let foo_test = foo()<|>; | ||
188 | } | ||
189 | ", | ||
190 | ); | ||
191 | let hover = analysis.hover(position).unwrap().unwrap(); | ||
192 | assert_eq!(hover.range, TextRange::from_to(95.into(), 100.into())); | ||
193 | assert_eq!(hover.info, "u32"); | ||
194 | } | ||
195 | |||
196 | #[test] | ||
197 | fn hover_for_local_variable() { | ||
198 | let (analysis, position) = single_file_with_position("fn func(foo: i32) { fo<|>o; }"); | ||
199 | let hover = analysis.hover(position).unwrap().unwrap(); | ||
200 | assert_eq!(hover.info, "i32"); | ||
201 | } | ||
202 | |||
203 | #[test] | ||
204 | fn hover_for_local_variable_pat() { | ||
205 | let (analysis, position) = single_file_with_position("fn func(fo<|>o: i32) {}"); | ||
206 | let hover = analysis.hover(position).unwrap().unwrap(); | ||
207 | assert_eq!(hover.info, "i32"); | ||
208 | } | ||
209 | |||
210 | #[test] | ||
211 | fn test_type_of_for_function() { | ||
212 | let (analysis, range) = single_file_with_range( | ||
213 | " | ||
214 | pub fn foo() -> u32 { 1 }; | ||
215 | |||
216 | fn main() { | ||
217 | let foo_test = <|>foo()<|>; | ||
218 | } | ||
219 | ", | ||
220 | ); | ||
221 | |||
222 | let type_name = analysis.type_of(range).unwrap().unwrap(); | ||
223 | assert_eq!("u32", &type_name); | ||
224 | } | ||
225 | |||
226 | // FIXME: improve type_of to make this work | ||
227 | #[test] | ||
228 | fn test_type_of_for_expr_1() { | ||
229 | let (analysis, range) = single_file_with_range( | ||
230 | " | ||
231 | fn main() { | ||
232 | let foo = <|>1 + foo_test<|>; | ||
233 | } | ||
234 | ", | ||
235 | ); | ||
236 | |||
237 | let type_name = analysis.type_of(range).unwrap().unwrap(); | ||
238 | assert_eq!("[unknown]", &type_name); | ||
239 | } | ||
240 | |||
241 | // FIXME: improve type_of to make this work | ||
242 | #[test] | ||
243 | fn test_type_of_for_expr_2() { | ||
244 | let (analysis, range) = single_file_with_range( | ||
245 | " | ||
246 | fn main() { | ||
247 | let foo: usize = 1; | ||
248 | let bar = <|>1 + foo_test<|>; | ||
249 | } | ||
250 | ", | ||
251 | ); | ||
252 | |||
253 | let type_name = analysis.type_of(range).unwrap().unwrap(); | ||
254 | assert_eq!("[unknown]", &type_name); | ||
255 | } | ||
256 | |||
257 | } | ||
diff --git a/crates/ra_ide_api/src/imp.rs b/crates/ra_ide_api/src/imp.rs new file mode 100644 index 000000000..7c60ab7d6 --- /dev/null +++ b/crates/ra_ide_api/src/imp.rs | |||
@@ -0,0 +1,309 @@ | |||
1 | use std::sync::Arc; | ||
2 | |||
3 | use salsa::Database; | ||
4 | |||
5 | use hir::{ | ||
6 | self, Problem, source_binder, | ||
7 | }; | ||
8 | use ra_db::{FilesDatabase, SourceRoot, SourceRootId, SyntaxDatabase}; | ||
9 | use ra_ide_api_light::{self, assists, LocalEdit, Severity}; | ||
10 | use ra_syntax::{ | ||
11 | TextRange, AstNode, SourceFile, | ||
12 | ast::{self, NameOwner}, | ||
13 | algo::find_node_at_offset, | ||
14 | SyntaxKind::*, | ||
15 | }; | ||
16 | |||
17 | use crate::{ | ||
18 | AnalysisChange, | ||
19 | Cancelable, NavigationTarget, | ||
20 | CrateId, db, Diagnostic, FileId, FilePosition, FileRange, FileSystemEdit, | ||
21 | Query, RootChange, SourceChange, SourceFileEdit, | ||
22 | symbol_index::{LibrarySymbolsQuery, FileSymbol}, | ||
23 | }; | ||
24 | |||
25 | impl db::RootDatabase { | ||
26 | pub(crate) fn apply_change(&mut self, change: AnalysisChange) { | ||
27 | log::info!("apply_change {:?}", change); | ||
28 | // self.gc_syntax_trees(); | ||
29 | if !change.new_roots.is_empty() { | ||
30 | let mut local_roots = Vec::clone(&self.local_roots()); | ||
31 | for (root_id, is_local) in change.new_roots { | ||
32 | self.query_mut(ra_db::SourceRootQuery) | ||
33 | .set(root_id, Default::default()); | ||
34 | if is_local { | ||
35 | local_roots.push(root_id); | ||
36 | } | ||
37 | } | ||
38 | self.query_mut(ra_db::LocalRootsQuery) | ||
39 | .set((), Arc::new(local_roots)); | ||
40 | } | ||
41 | |||
42 | for (root_id, root_change) in change.roots_changed { | ||
43 | self.apply_root_change(root_id, root_change); | ||
44 | } | ||
45 | for (file_id, text) in change.files_changed { | ||
46 | self.query_mut(ra_db::FileTextQuery).set(file_id, text) | ||
47 | } | ||
48 | if !change.libraries_added.is_empty() { | ||
49 | let mut libraries = Vec::clone(&self.library_roots()); | ||
50 | for library in change.libraries_added { | ||
51 | libraries.push(library.root_id); | ||
52 | self.query_mut(ra_db::SourceRootQuery) | ||
53 | .set(library.root_id, Default::default()); | ||
54 | self.query_mut(LibrarySymbolsQuery) | ||
55 | .set_constant(library.root_id, Arc::new(library.symbol_index)); | ||
56 | self.apply_root_change(library.root_id, library.root_change); | ||
57 | } | ||
58 | self.query_mut(ra_db::LibraryRootsQuery) | ||
59 | .set((), Arc::new(libraries)); | ||
60 | } | ||
61 | if let Some(crate_graph) = change.crate_graph { | ||
62 | self.query_mut(ra_db::CrateGraphQuery) | ||
63 | .set((), Arc::new(crate_graph)) | ||
64 | } | ||
65 | } | ||
66 | |||
67 | fn apply_root_change(&mut self, root_id: SourceRootId, root_change: RootChange) { | ||
68 | let mut source_root = SourceRoot::clone(&self.source_root(root_id)); | ||
69 | for add_file in root_change.added { | ||
70 | self.query_mut(ra_db::FileTextQuery) | ||
71 | .set(add_file.file_id, add_file.text); | ||
72 | self.query_mut(ra_db::FileRelativePathQuery) | ||
73 | .set(add_file.file_id, add_file.path.clone()); | ||
74 | self.query_mut(ra_db::FileSourceRootQuery) | ||
75 | .set(add_file.file_id, root_id); | ||
76 | source_root.files.insert(add_file.path, add_file.file_id); | ||
77 | } | ||
78 | for remove_file in root_change.removed { | ||
79 | self.query_mut(ra_db::FileTextQuery) | ||
80 | .set(remove_file.file_id, Default::default()); | ||
81 | source_root.files.remove(&remove_file.path); | ||
82 | } | ||
83 | self.query_mut(ra_db::SourceRootQuery) | ||
84 | .set(root_id, Arc::new(source_root)); | ||
85 | } | ||
86 | |||
87 | #[allow(unused)] | ||
88 | /// Ideally, we should call this function from time to time to collect heavy | ||
89 | /// syntax trees. However, if we actually do that, everything is recomputed | ||
90 | /// for some reason. Needs investigation. | ||
91 | fn gc_syntax_trees(&mut self) { | ||
92 | self.query(ra_db::SourceFileQuery) | ||
93 | .sweep(salsa::SweepStrategy::default().discard_values()); | ||
94 | self.query(hir::db::SourceFileItemsQuery) | ||
95 | .sweep(salsa::SweepStrategy::default().discard_values()); | ||
96 | self.query(hir::db::FileItemQuery) | ||
97 | .sweep(salsa::SweepStrategy::default().discard_values()); | ||
98 | } | ||
99 | } | ||
100 | |||
101 | impl db::RootDatabase { | ||
102 | /// This returns `Vec` because a module may be included from several places. We | ||
103 | /// don't handle this case yet though, so the Vec has length at most one. | ||
104 | pub(crate) fn parent_module( | ||
105 | &self, | ||
106 | position: FilePosition, | ||
107 | ) -> Cancelable<Vec<NavigationTarget>> { | ||
108 | let module = match source_binder::module_from_position(self, position)? { | ||
109 | None => return Ok(Vec::new()), | ||
110 | Some(it) => it, | ||
111 | }; | ||
112 | let (file_id, ast_module) = match module.declaration_source(self)? { | ||
113 | None => return Ok(Vec::new()), | ||
114 | Some(it) => it, | ||
115 | }; | ||
116 | let name = ast_module.name().unwrap(); | ||
117 | Ok(vec![NavigationTarget { | ||
118 | file_id, | ||
119 | name: name.text().clone(), | ||
120 | range: name.syntax().range(), | ||
121 | kind: MODULE, | ||
122 | ptr: None, | ||
123 | }]) | ||
124 | } | ||
125 | /// Returns `Vec` for the same reason as `parent_module` | ||
126 | pub(crate) fn crate_for(&self, file_id: FileId) -> Cancelable<Vec<CrateId>> { | ||
127 | let module = match source_binder::module_from_file_id(self, file_id)? { | ||
128 | Some(it) => it, | ||
129 | None => return Ok(Vec::new()), | ||
130 | }; | ||
131 | let krate = match module.krate(self)? { | ||
132 | Some(it) => it, | ||
133 | None => return Ok(Vec::new()), | ||
134 | }; | ||
135 | Ok(vec![krate.crate_id()]) | ||
136 | } | ||
137 | pub(crate) fn find_all_refs( | ||
138 | &self, | ||
139 | position: FilePosition, | ||
140 | ) -> Cancelable<Vec<(FileId, TextRange)>> { | ||
141 | let file = self.source_file(position.file_id); | ||
142 | // Find the binding associated with the offset | ||
143 | let (binding, descr) = match find_binding(self, &file, position)? { | ||
144 | None => return Ok(Vec::new()), | ||
145 | Some(it) => it, | ||
146 | }; | ||
147 | |||
148 | let mut ret = binding | ||
149 | .name() | ||
150 | .into_iter() | ||
151 | .map(|name| (position.file_id, name.syntax().range())) | ||
152 | .collect::<Vec<_>>(); | ||
153 | ret.extend( | ||
154 | descr | ||
155 | .scopes(self)? | ||
156 | .find_all_refs(binding) | ||
157 | .into_iter() | ||
158 | .map(|ref_desc| (position.file_id, ref_desc.range)), | ||
159 | ); | ||
160 | |||
161 | return Ok(ret); | ||
162 | |||
163 | fn find_binding<'a>( | ||
164 | db: &db::RootDatabase, | ||
165 | source_file: &'a SourceFile, | ||
166 | position: FilePosition, | ||
167 | ) -> Cancelable<Option<(&'a ast::BindPat, hir::Function)>> { | ||
168 | let syntax = source_file.syntax(); | ||
169 | if let Some(binding) = find_node_at_offset::<ast::BindPat>(syntax, position.offset) { | ||
170 | let descr = ctry!(source_binder::function_from_child_node( | ||
171 | db, | ||
172 | position.file_id, | ||
173 | binding.syntax(), | ||
174 | )?); | ||
175 | return Ok(Some((binding, descr))); | ||
176 | }; | ||
177 | let name_ref = ctry!(find_node_at_offset::<ast::NameRef>(syntax, position.offset)); | ||
178 | let descr = ctry!(source_binder::function_from_child_node( | ||
179 | db, | ||
180 | position.file_id, | ||
181 | name_ref.syntax(), | ||
182 | )?); | ||
183 | let scope = descr.scopes(db)?; | ||
184 | let resolved = ctry!(scope.resolve_local_name(name_ref)); | ||
185 | let resolved = resolved.ptr().resolve(source_file); | ||
186 | let binding = ctry!(find_node_at_offset::<ast::BindPat>( | ||
187 | syntax, | ||
188 | resolved.range().end() | ||
189 | )); | ||
190 | Ok(Some((binding, descr))) | ||
191 | } | ||
192 | } | ||
193 | |||
194 | pub(crate) fn diagnostics(&self, file_id: FileId) -> Cancelable<Vec<Diagnostic>> { | ||
195 | let syntax = self.source_file(file_id); | ||
196 | |||
197 | let mut res = ra_ide_api_light::diagnostics(&syntax) | ||
198 | .into_iter() | ||
199 | .map(|d| Diagnostic { | ||
200 | range: d.range, | ||
201 | message: d.msg, | ||
202 | severity: d.severity, | ||
203 | fix: d.fix.map(|fix| SourceChange::from_local_edit(file_id, fix)), | ||
204 | }) | ||
205 | .collect::<Vec<_>>(); | ||
206 | if let Some(m) = source_binder::module_from_file_id(self, file_id)? { | ||
207 | for (name_node, problem) in m.problems(self)? { | ||
208 | let source_root = self.file_source_root(file_id); | ||
209 | let diag = match problem { | ||
210 | Problem::UnresolvedModule { candidate } => { | ||
211 | let create_file = FileSystemEdit::CreateFile { | ||
212 | source_root, | ||
213 | path: candidate.clone(), | ||
214 | }; | ||
215 | let fix = SourceChange { | ||
216 | label: "create module".to_string(), | ||
217 | source_file_edits: Vec::new(), | ||
218 | file_system_edits: vec![create_file], | ||
219 | cursor_position: None, | ||
220 | }; | ||
221 | Diagnostic { | ||
222 | range: name_node.range(), | ||
223 | message: "unresolved module".to_string(), | ||
224 | severity: Severity::Error, | ||
225 | fix: Some(fix), | ||
226 | } | ||
227 | } | ||
228 | Problem::NotDirOwner { move_to, candidate } => { | ||
229 | let move_file = FileSystemEdit::MoveFile { | ||
230 | src: file_id, | ||
231 | dst_source_root: source_root, | ||
232 | dst_path: move_to.clone(), | ||
233 | }; | ||
234 | let create_file = FileSystemEdit::CreateFile { | ||
235 | source_root, | ||
236 | path: move_to.join(candidate), | ||
237 | }; | ||
238 | let fix = SourceChange { | ||
239 | label: "move file and create module".to_string(), | ||
240 | source_file_edits: Vec::new(), | ||
241 | file_system_edits: vec![move_file, create_file], | ||
242 | cursor_position: None, | ||
243 | }; | ||
244 | Diagnostic { | ||
245 | range: name_node.range(), | ||
246 | message: "can't declare module at this location".to_string(), | ||
247 | severity: Severity::Error, | ||
248 | fix: Some(fix), | ||
249 | } | ||
250 | } | ||
251 | }; | ||
252 | res.push(diag) | ||
253 | } | ||
254 | }; | ||
255 | Ok(res) | ||
256 | } | ||
257 | |||
258 | pub(crate) fn assists(&self, frange: FileRange) -> Vec<SourceChange> { | ||
259 | let file = self.source_file(frange.file_id); | ||
260 | assists::assists(&file, frange.range) | ||
261 | .into_iter() | ||
262 | .map(|local_edit| SourceChange::from_local_edit(frange.file_id, local_edit)) | ||
263 | .collect() | ||
264 | } | ||
265 | |||
266 | pub(crate) fn rename( | ||
267 | &self, | ||
268 | position: FilePosition, | ||
269 | new_name: &str, | ||
270 | ) -> Cancelable<Vec<SourceFileEdit>> { | ||
271 | let res = self | ||
272 | .find_all_refs(position)? | ||
273 | .iter() | ||
274 | .map(|(file_id, text_range)| SourceFileEdit { | ||
275 | file_id: *file_id, | ||
276 | edit: { | ||
277 | let mut builder = ra_text_edit::TextEditBuilder::default(); | ||
278 | builder.replace(*text_range, new_name.into()); | ||
279 | builder.finish() | ||
280 | }, | ||
281 | }) | ||
282 | .collect::<Vec<_>>(); | ||
283 | Ok(res) | ||
284 | } | ||
285 | pub(crate) fn index_resolve(&self, name_ref: &ast::NameRef) -> Cancelable<Vec<FileSymbol>> { | ||
286 | let name = name_ref.text(); | ||
287 | let mut query = Query::new(name.to_string()); | ||
288 | query.exact(); | ||
289 | query.limit(4); | ||
290 | crate::symbol_index::world_symbols(self, query) | ||
291 | } | ||
292 | } | ||
293 | |||
294 | impl SourceChange { | ||
295 | pub(crate) fn from_local_edit(file_id: FileId, edit: LocalEdit) -> SourceChange { | ||
296 | let file_edit = SourceFileEdit { | ||
297 | file_id, | ||
298 | edit: edit.edit, | ||
299 | }; | ||
300 | SourceChange { | ||
301 | label: edit.label, | ||
302 | source_file_edits: vec![file_edit], | ||
303 | file_system_edits: vec![], | ||
304 | cursor_position: edit | ||
305 | .cursor_position | ||
306 | .map(|offset| FilePosition { offset, file_id }), | ||
307 | } | ||
308 | } | ||
309 | } | ||
diff --git a/crates/ra_ide_api/src/lib.rs b/crates/ra_ide_api/src/lib.rs new file mode 100644 index 000000000..183e36706 --- /dev/null +++ b/crates/ra_ide_api/src/lib.rs | |||
@@ -0,0 +1,509 @@ | |||
1 | //! ra_analyzer crate provides "ide-centric" APIs for the rust-analyzer. What | ||
2 | //! powers this API are the `RootDatabase` struct, which defines a `salsa` | ||
3 | //! database, and the `ra_hir` crate, where majority of the analysis happens. | ||
4 | //! However, IDE specific bits of the analysis (most notably completion) happen | ||
5 | //! in this crate. | ||
6 | macro_rules! ctry { | ||
7 | ($expr:expr) => { | ||
8 | match $expr { | ||
9 | None => return Ok(None), | ||
10 | Some(it) => it, | ||
11 | } | ||
12 | }; | ||
13 | } | ||
14 | |||
15 | mod completion; | ||
16 | mod db; | ||
17 | mod goto_defenition; | ||
18 | mod imp; | ||
19 | pub mod mock_analysis; | ||
20 | mod runnables; | ||
21 | mod symbol_index; | ||
22 | |||
23 | mod extend_selection; | ||
24 | mod hover; | ||
25 | mod call_info; | ||
26 | mod syntax_highlighting; | ||
27 | |||
28 | use std::{fmt, sync::Arc}; | ||
29 | |||
30 | use ra_syntax::{SmolStr, SourceFile, TreePtr, SyntaxKind, TextRange, TextUnit}; | ||
31 | use ra_text_edit::TextEdit; | ||
32 | use ra_db::{SyntaxDatabase, FilesDatabase, LocalSyntaxPtr}; | ||
33 | use rayon::prelude::*; | ||
34 | use relative_path::RelativePathBuf; | ||
35 | use rustc_hash::FxHashMap; | ||
36 | use salsa::ParallelDatabase; | ||
37 | |||
38 | use crate::{ | ||
39 | symbol_index::{FileSymbol, SymbolIndex}, | ||
40 | db::LineIndexDatabase, | ||
41 | }; | ||
42 | |||
43 | pub use crate::{ | ||
44 | completion::{CompletionItem, CompletionItemKind, InsertText}, | ||
45 | runnables::{Runnable, RunnableKind}, | ||
46 | }; | ||
47 | pub use ra_ide_api_light::{ | ||
48 | Fold, FoldKind, HighlightedRange, Severity, StructureNode, | ||
49 | LineIndex, LineCol, translate_offset_with_edit, | ||
50 | }; | ||
51 | pub use ra_db::{ | ||
52 | Cancelable, Canceled, CrateGraph, CrateId, FileId, FilePosition, FileRange, SourceRootId | ||
53 | }; | ||
54 | |||
55 | #[derive(Default)] | ||
56 | pub struct AnalysisChange { | ||
57 | new_roots: Vec<(SourceRootId, bool)>, | ||
58 | roots_changed: FxHashMap<SourceRootId, RootChange>, | ||
59 | files_changed: Vec<(FileId, Arc<String>)>, | ||
60 | libraries_added: Vec<LibraryData>, | ||
61 | crate_graph: Option<CrateGraph>, | ||
62 | } | ||
63 | |||
64 | #[derive(Default)] | ||
65 | struct RootChange { | ||
66 | added: Vec<AddFile>, | ||
67 | removed: Vec<RemoveFile>, | ||
68 | } | ||
69 | |||
70 | #[derive(Debug)] | ||
71 | struct AddFile { | ||
72 | file_id: FileId, | ||
73 | path: RelativePathBuf, | ||
74 | text: Arc<String>, | ||
75 | } | ||
76 | |||
77 | #[derive(Debug)] | ||
78 | struct RemoveFile { | ||
79 | file_id: FileId, | ||
80 | path: RelativePathBuf, | ||
81 | } | ||
82 | |||
83 | impl fmt::Debug for AnalysisChange { | ||
84 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | ||
85 | let mut d = fmt.debug_struct("AnalysisChange"); | ||
86 | if !self.new_roots.is_empty() { | ||
87 | d.field("new_roots", &self.new_roots); | ||
88 | } | ||
89 | if !self.roots_changed.is_empty() { | ||
90 | d.field("roots_changed", &self.roots_changed); | ||
91 | } | ||
92 | if !self.files_changed.is_empty() { | ||
93 | d.field("files_changed", &self.files_changed.len()); | ||
94 | } | ||
95 | if !self.libraries_added.is_empty() { | ||
96 | d.field("libraries_added", &self.libraries_added.len()); | ||
97 | } | ||
98 | if !self.crate_graph.is_some() { | ||
99 | d.field("crate_graph", &self.crate_graph); | ||
100 | } | ||
101 | d.finish() | ||
102 | } | ||
103 | } | ||
104 | |||
105 | impl fmt::Debug for RootChange { | ||
106 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | ||
107 | fmt.debug_struct("AnalysisChange") | ||
108 | .field("added", &self.added.len()) | ||
109 | .field("removed", &self.removed.len()) | ||
110 | .finish() | ||
111 | } | ||
112 | } | ||
113 | |||
114 | impl AnalysisChange { | ||
115 | pub fn new() -> AnalysisChange { | ||
116 | AnalysisChange::default() | ||
117 | } | ||
118 | pub fn add_root(&mut self, root_id: SourceRootId, is_local: bool) { | ||
119 | self.new_roots.push((root_id, is_local)); | ||
120 | } | ||
121 | pub fn add_file( | ||
122 | &mut self, | ||
123 | root_id: SourceRootId, | ||
124 | file_id: FileId, | ||
125 | path: RelativePathBuf, | ||
126 | text: Arc<String>, | ||
127 | ) { | ||
128 | let file = AddFile { | ||
129 | file_id, | ||
130 | path, | ||
131 | text, | ||
132 | }; | ||
133 | self.roots_changed | ||
134 | .entry(root_id) | ||
135 | .or_default() | ||
136 | .added | ||
137 | .push(file); | ||
138 | } | ||
139 | pub fn change_file(&mut self, file_id: FileId, new_text: Arc<String>) { | ||
140 | self.files_changed.push((file_id, new_text)) | ||
141 | } | ||
142 | pub fn remove_file(&mut self, root_id: SourceRootId, file_id: FileId, path: RelativePathBuf) { | ||
143 | let file = RemoveFile { file_id, path }; | ||
144 | self.roots_changed | ||
145 | .entry(root_id) | ||
146 | .or_default() | ||
147 | .removed | ||
148 | .push(file); | ||
149 | } | ||
150 | pub fn add_library(&mut self, data: LibraryData) { | ||
151 | self.libraries_added.push(data) | ||
152 | } | ||
153 | pub fn set_crate_graph(&mut self, graph: CrateGraph) { | ||
154 | self.crate_graph = Some(graph); | ||
155 | } | ||
156 | } | ||
157 | |||
158 | #[derive(Debug)] | ||
159 | pub struct SourceChange { | ||
160 | pub label: String, | ||
161 | pub source_file_edits: Vec<SourceFileEdit>, | ||
162 | pub file_system_edits: Vec<FileSystemEdit>, | ||
163 | pub cursor_position: Option<FilePosition>, | ||
164 | } | ||
165 | |||
166 | #[derive(Debug)] | ||
167 | pub struct SourceFileEdit { | ||
168 | pub file_id: FileId, | ||
169 | pub edit: TextEdit, | ||
170 | } | ||
171 | |||
172 | #[derive(Debug)] | ||
173 | pub enum FileSystemEdit { | ||
174 | CreateFile { | ||
175 | source_root: SourceRootId, | ||
176 | path: RelativePathBuf, | ||
177 | }, | ||
178 | MoveFile { | ||
179 | src: FileId, | ||
180 | dst_source_root: SourceRootId, | ||
181 | dst_path: RelativePathBuf, | ||
182 | }, | ||
183 | } | ||
184 | |||
185 | #[derive(Debug)] | ||
186 | pub struct Diagnostic { | ||
187 | pub message: String, | ||
188 | pub range: TextRange, | ||
189 | pub fix: Option<SourceChange>, | ||
190 | pub severity: Severity, | ||
191 | } | ||
192 | |||
193 | #[derive(Debug)] | ||
194 | pub struct Query { | ||
195 | query: String, | ||
196 | lowercased: String, | ||
197 | only_types: bool, | ||
198 | libs: bool, | ||
199 | exact: bool, | ||
200 | limit: usize, | ||
201 | } | ||
202 | |||
203 | impl Query { | ||
204 | pub fn new(query: String) -> Query { | ||
205 | let lowercased = query.to_lowercase(); | ||
206 | Query { | ||
207 | query, | ||
208 | lowercased, | ||
209 | only_types: false, | ||
210 | libs: false, | ||
211 | exact: false, | ||
212 | limit: usize::max_value(), | ||
213 | } | ||
214 | } | ||
215 | pub fn only_types(&mut self) { | ||
216 | self.only_types = true; | ||
217 | } | ||
218 | pub fn libs(&mut self) { | ||
219 | self.libs = true; | ||
220 | } | ||
221 | pub fn exact(&mut self) { | ||
222 | self.exact = true; | ||
223 | } | ||
224 | pub fn limit(&mut self, limit: usize) { | ||
225 | self.limit = limit | ||
226 | } | ||
227 | } | ||
228 | |||
229 | /// `NavigationTarget` represents and element in the editor's UI whihc you can | ||
230 | /// click on to navigate to a particular piece of code. | ||
231 | /// | ||
232 | /// Typically, a `NavigationTarget` corresponds to some element in the source | ||
233 | /// code, like a function or a struct, but this is not strictly required. | ||
234 | #[derive(Debug, Clone)] | ||
235 | pub struct NavigationTarget { | ||
236 | file_id: FileId, | ||
237 | name: SmolStr, | ||
238 | kind: SyntaxKind, | ||
239 | range: TextRange, | ||
240 | // Should be DefId ideally | ||
241 | ptr: Option<LocalSyntaxPtr>, | ||
242 | } | ||
243 | |||
244 | impl NavigationTarget { | ||
245 | fn from_symbol(symbol: FileSymbol) -> NavigationTarget { | ||
246 | NavigationTarget { | ||
247 | file_id: symbol.file_id, | ||
248 | name: symbol.name.clone(), | ||
249 | kind: symbol.ptr.kind(), | ||
250 | range: symbol.ptr.range(), | ||
251 | ptr: Some(symbol.ptr.clone()), | ||
252 | } | ||
253 | } | ||
254 | pub fn name(&self) -> &SmolStr { | ||
255 | &self.name | ||
256 | } | ||
257 | pub fn kind(&self) -> SyntaxKind { | ||
258 | self.kind | ||
259 | } | ||
260 | pub fn file_id(&self) -> FileId { | ||
261 | self.file_id | ||
262 | } | ||
263 | pub fn range(&self) -> TextRange { | ||
264 | self.range | ||
265 | } | ||
266 | } | ||
267 | |||
268 | #[derive(Debug)] | ||
269 | pub struct RangeInfo<T> { | ||
270 | pub range: TextRange, | ||
271 | pub info: T, | ||
272 | } | ||
273 | |||
274 | impl<T> RangeInfo<T> { | ||
275 | fn new(range: TextRange, info: T) -> RangeInfo<T> { | ||
276 | RangeInfo { range, info } | ||
277 | } | ||
278 | } | ||
279 | |||
280 | #[derive(Debug)] | ||
281 | pub struct CallInfo { | ||
282 | pub label: String, | ||
283 | pub doc: Option<String>, | ||
284 | pub parameters: Vec<String>, | ||
285 | pub active_parameter: Option<usize>, | ||
286 | } | ||
287 | |||
288 | /// `AnalysisHost` stores the current state of the world. | ||
289 | #[derive(Debug, Default)] | ||
290 | pub struct AnalysisHost { | ||
291 | db: db::RootDatabase, | ||
292 | } | ||
293 | |||
294 | impl AnalysisHost { | ||
295 | /// Returns a snapshot of the current state, which you can query for | ||
296 | /// semantic information. | ||
297 | pub fn analysis(&self) -> Analysis { | ||
298 | Analysis { | ||
299 | db: self.db.snapshot(), | ||
300 | } | ||
301 | } | ||
302 | /// Applies changes to the current state of the world. If there are | ||
303 | /// outstanding snapshots, they will be canceled. | ||
304 | pub fn apply_change(&mut self, change: AnalysisChange) { | ||
305 | self.db.apply_change(change) | ||
306 | } | ||
307 | } | ||
308 | |||
309 | /// Analysis is a snapshot of a world state at a moment in time. It is the main | ||
310 | /// entry point for asking semantic information about the world. When the world | ||
311 | /// state is advanced using `AnalysisHost::apply_change` method, all existing | ||
312 | /// `Analysis` are canceled (most method return `Err(Canceled)`). | ||
313 | #[derive(Debug)] | ||
314 | pub struct Analysis { | ||
315 | db: salsa::Snapshot<db::RootDatabase>, | ||
316 | } | ||
317 | |||
318 | impl Analysis { | ||
319 | /// Gets the text of the source file. | ||
320 | pub fn file_text(&self, file_id: FileId) -> Arc<String> { | ||
321 | self.db.file_text(file_id) | ||
322 | } | ||
323 | /// Gets the syntax tree of the file. | ||
324 | pub fn file_syntax(&self, file_id: FileId) -> TreePtr<SourceFile> { | ||
325 | self.db.source_file(file_id).clone() | ||
326 | } | ||
327 | /// Gets the file's `LineIndex`: data structure to convert between absolute | ||
328 | /// offsets and line/column representation. | ||
329 | pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> { | ||
330 | self.db.line_index(file_id) | ||
331 | } | ||
332 | /// Selects the next syntactic nodes encopasing the range. | ||
333 | pub fn extend_selection(&self, frange: FileRange) -> TextRange { | ||
334 | extend_selection::extend_selection(&self.db, frange) | ||
335 | } | ||
336 | /// Returns position of the mathcing brace (all types of braces are | ||
337 | /// supported). | ||
338 | pub fn matching_brace(&self, file: &SourceFile, offset: TextUnit) -> Option<TextUnit> { | ||
339 | ra_ide_api_light::matching_brace(file, offset) | ||
340 | } | ||
341 | /// Returns a syntax tree represented as `String`, for debug purposes. | ||
342 | // FIXME: use a better name here. | ||
343 | pub fn syntax_tree(&self, file_id: FileId) -> String { | ||
344 | let file = self.db.source_file(file_id); | ||
345 | ra_ide_api_light::syntax_tree(&file) | ||
346 | } | ||
347 | /// Returns an edit to remove all newlines in the range, cleaning up minor | ||
348 | /// stuff like trailing commas. | ||
349 | pub fn join_lines(&self, frange: FileRange) -> SourceChange { | ||
350 | let file = self.db.source_file(frange.file_id); | ||
351 | SourceChange::from_local_edit( | ||
352 | frange.file_id, | ||
353 | ra_ide_api_light::join_lines(&file, frange.range), | ||
354 | ) | ||
355 | } | ||
356 | /// Returns an edit which should be applied when opening a new line, fixing | ||
357 | /// up minor stuff like continuing the comment. | ||
358 | pub fn on_enter(&self, position: FilePosition) -> Option<SourceChange> { | ||
359 | let file = self.db.source_file(position.file_id); | ||
360 | let edit = ra_ide_api_light::on_enter(&file, position.offset)?; | ||
361 | Some(SourceChange::from_local_edit(position.file_id, edit)) | ||
362 | } | ||
363 | /// Returns an edit which should be applied after `=` was typed. Primarily, | ||
364 | /// this works when adding `let =`. | ||
365 | // FIXME: use a snippet completion instead of this hack here. | ||
366 | pub fn on_eq_typed(&self, position: FilePosition) -> Option<SourceChange> { | ||
367 | let file = self.db.source_file(position.file_id); | ||
368 | let edit = ra_ide_api_light::on_eq_typed(&file, position.offset)?; | ||
369 | Some(SourceChange::from_local_edit(position.file_id, edit)) | ||
370 | } | ||
371 | /// Returns an edit which should be applied when a dot ('.') is typed on a blank line, indenting the line appropriately. | ||
372 | pub fn on_dot_typed(&self, position: FilePosition) -> Option<SourceChange> { | ||
373 | let file = self.db.source_file(position.file_id); | ||
374 | let edit = ra_ide_api_light::on_dot_typed(&file, position.offset)?; | ||
375 | Some(SourceChange::from_local_edit(position.file_id, edit)) | ||
376 | } | ||
377 | /// Returns a tree representation of symbols in the file. Useful to draw a | ||
378 | /// file outline. | ||
379 | pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> { | ||
380 | let file = self.db.source_file(file_id); | ||
381 | ra_ide_api_light::file_structure(&file) | ||
382 | } | ||
383 | /// Returns the set of folding ranges. | ||
384 | pub fn folding_ranges(&self, file_id: FileId) -> Vec<Fold> { | ||
385 | let file = self.db.source_file(file_id); | ||
386 | ra_ide_api_light::folding_ranges(&file) | ||
387 | } | ||
388 | /// Fuzzy searches for a symbol. | ||
389 | pub fn symbol_search(&self, query: Query) -> Cancelable<Vec<NavigationTarget>> { | ||
390 | let res = symbol_index::world_symbols(&*self.db, query)? | ||
391 | .into_iter() | ||
392 | .map(NavigationTarget::from_symbol) | ||
393 | .collect(); | ||
394 | Ok(res) | ||
395 | } | ||
396 | pub fn goto_defenition( | ||
397 | &self, | ||
398 | position: FilePosition, | ||
399 | ) -> Cancelable<Option<Vec<NavigationTarget>>> { | ||
400 | goto_defenition::goto_defenition(&*self.db, position) | ||
401 | } | ||
402 | /// Finds all usages of the reference at point. | ||
403 | pub fn find_all_refs(&self, position: FilePosition) -> Cancelable<Vec<(FileId, TextRange)>> { | ||
404 | self.db.find_all_refs(position) | ||
405 | } | ||
406 | /// Returns a short text descrbing element at position. | ||
407 | pub fn hover(&self, position: FilePosition) -> Cancelable<Option<RangeInfo<String>>> { | ||
408 | hover::hover(&*self.db, position) | ||
409 | } | ||
410 | /// Computes parameter information for the given call expression. | ||
411 | pub fn call_info(&self, position: FilePosition) -> Cancelable<Option<CallInfo>> { | ||
412 | call_info::call_info(&*self.db, position) | ||
413 | } | ||
414 | /// Returns a `mod name;` declaration which created the current module. | ||
415 | pub fn parent_module(&self, position: FilePosition) -> Cancelable<Vec<NavigationTarget>> { | ||
416 | self.db.parent_module(position) | ||
417 | } | ||
418 | /// Returns crates this file belongs too. | ||
419 | pub fn crate_for(&self, file_id: FileId) -> Cancelable<Vec<CrateId>> { | ||
420 | self.db.crate_for(file_id) | ||
421 | } | ||
422 | /// Returns the root file of the given crate. | ||
423 | pub fn crate_root(&self, crate_id: CrateId) -> Cancelable<FileId> { | ||
424 | Ok(self.db.crate_graph().crate_root(crate_id)) | ||
425 | } | ||
426 | /// Returns the set of possible targets to run for the current file. | ||
427 | pub fn runnables(&self, file_id: FileId) -> Cancelable<Vec<Runnable>> { | ||
428 | runnables::runnables(&*self.db, file_id) | ||
429 | } | ||
430 | /// Computes syntax highlighting for the given file. | ||
431 | pub fn highlight(&self, file_id: FileId) -> Cancelable<Vec<HighlightedRange>> { | ||
432 | syntax_highlighting::highlight(&*self.db, file_id) | ||
433 | } | ||
434 | /// Computes completions at the given position. | ||
435 | pub fn completions(&self, position: FilePosition) -> Cancelable<Option<Vec<CompletionItem>>> { | ||
436 | let completions = completion::completions(&self.db, position)?; | ||
437 | Ok(completions.map(|it| it.into())) | ||
438 | } | ||
439 | /// Computes assists (aks code actons aka intentions) for the given | ||
440 | /// position. | ||
441 | pub fn assists(&self, frange: FileRange) -> Cancelable<Vec<SourceChange>> { | ||
442 | Ok(self.db.assists(frange)) | ||
443 | } | ||
444 | /// Computes the set of diagnostics for the given file. | ||
445 | pub fn diagnostics(&self, file_id: FileId) -> Cancelable<Vec<Diagnostic>> { | ||
446 | self.db.diagnostics(file_id) | ||
447 | } | ||
448 | /// Computes the type of the expression at the given position. | ||
449 | pub fn type_of(&self, frange: FileRange) -> Cancelable<Option<String>> { | ||
450 | hover::type_of(&*self.db, frange) | ||
451 | } | ||
452 | /// Returns the edit required to rename reference at the position to the new | ||
453 | /// name. | ||
454 | pub fn rename( | ||
455 | &self, | ||
456 | position: FilePosition, | ||
457 | new_name: &str, | ||
458 | ) -> Cancelable<Vec<SourceFileEdit>> { | ||
459 | self.db.rename(position, new_name) | ||
460 | } | ||
461 | } | ||
462 | |||
463 | pub struct LibraryData { | ||
464 | root_id: SourceRootId, | ||
465 | root_change: RootChange, | ||
466 | symbol_index: SymbolIndex, | ||
467 | } | ||
468 | |||
469 | impl fmt::Debug for LibraryData { | ||
470 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
471 | f.debug_struct("LibraryData") | ||
472 | .field("root_id", &self.root_id) | ||
473 | .field("root_change", &self.root_change) | ||
474 | .field("n_symbols", &self.symbol_index.len()) | ||
475 | .finish() | ||
476 | } | ||
477 | } | ||
478 | |||
479 | impl LibraryData { | ||
480 | pub fn prepare( | ||
481 | root_id: SourceRootId, | ||
482 | files: Vec<(FileId, RelativePathBuf, Arc<String>)>, | ||
483 | ) -> LibraryData { | ||
484 | let symbol_index = SymbolIndex::for_files(files.par_iter().map(|(file_id, _, text)| { | ||
485 | let file = SourceFile::parse(text); | ||
486 | (*file_id, file) | ||
487 | })); | ||
488 | let mut root_change = RootChange::default(); | ||
489 | root_change.added = files | ||
490 | .into_iter() | ||
491 | .map(|(file_id, path, text)| AddFile { | ||
492 | file_id, | ||
493 | path, | ||
494 | text, | ||
495 | }) | ||
496 | .collect(); | ||
497 | LibraryData { | ||
498 | root_id, | ||
499 | root_change, | ||
500 | symbol_index, | ||
501 | } | ||
502 | } | ||
503 | } | ||
504 | |||
505 | #[test] | ||
506 | fn analysis_is_send() { | ||
507 | fn is_send<T: Send>() {} | ||
508 | is_send::<Analysis>(); | ||
509 | } | ||
diff --git a/crates/ra_ide_api/src/mock_analysis.rs b/crates/ra_ide_api/src/mock_analysis.rs new file mode 100644 index 000000000..846c76cfe --- /dev/null +++ b/crates/ra_ide_api/src/mock_analysis.rs | |||
@@ -0,0 +1,135 @@ | |||
1 | use std::sync::Arc; | ||
2 | |||
3 | use relative_path::RelativePathBuf; | ||
4 | use test_utils::{extract_offset, extract_range, parse_fixture, CURSOR_MARKER}; | ||
5 | use ra_db::mock::FileMap; | ||
6 | |||
7 | use crate::{Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId, FilePosition, FileRange, SourceRootId}; | ||
8 | |||
9 | /// Mock analysis is used in test to bootstrap an AnalysisHost/Analysis | ||
10 | /// from a set of in-memory files. | ||
11 | #[derive(Debug, Default)] | ||
12 | pub struct MockAnalysis { | ||
13 | files: Vec<(String, String)>, | ||
14 | } | ||
15 | |||
16 | impl MockAnalysis { | ||
17 | pub fn new() -> MockAnalysis { | ||
18 | MockAnalysis::default() | ||
19 | } | ||
20 | /// Creates `MockAnalysis` using a fixture data in the following format: | ||
21 | /// | ||
22 | /// ```notrust | ||
23 | /// //- /main.rs | ||
24 | /// mod foo; | ||
25 | /// fn main() {} | ||
26 | /// | ||
27 | /// //- /foo.rs | ||
28 | /// struct Baz; | ||
29 | /// ``` | ||
30 | pub fn with_files(fixture: &str) -> MockAnalysis { | ||
31 | let mut res = MockAnalysis::new(); | ||
32 | for entry in parse_fixture(fixture) { | ||
33 | res.add_file(&entry.meta, &entry.text); | ||
34 | } | ||
35 | res | ||
36 | } | ||
37 | |||
38 | /// Same as `with_files`, but requires that a single file contains a `<|>` marker, | ||
39 | /// whose position is also returned. | ||
40 | pub fn with_files_and_position(fixture: &str) -> (MockAnalysis, FilePosition) { | ||
41 | let mut position = None; | ||
42 | let mut res = MockAnalysis::new(); | ||
43 | for entry in parse_fixture(fixture) { | ||
44 | if entry.text.contains(CURSOR_MARKER) { | ||
45 | assert!( | ||
46 | position.is_none(), | ||
47 | "only one marker (<|>) per fixture is allowed" | ||
48 | ); | ||
49 | position = Some(res.add_file_with_position(&entry.meta, &entry.text)); | ||
50 | } else { | ||
51 | res.add_file(&entry.meta, &entry.text); | ||
52 | } | ||
53 | } | ||
54 | let position = position.expect("expected a marker (<|>)"); | ||
55 | (res, position) | ||
56 | } | ||
57 | |||
58 | pub fn add_file(&mut self, path: &str, text: &str) -> FileId { | ||
59 | let file_id = FileId((self.files.len() + 1) as u32); | ||
60 | self.files.push((path.to_string(), text.to_string())); | ||
61 | file_id | ||
62 | } | ||
63 | pub fn add_file_with_position(&mut self, path: &str, text: &str) -> FilePosition { | ||
64 | let (offset, text) = extract_offset(text); | ||
65 | let file_id = FileId((self.files.len() + 1) as u32); | ||
66 | self.files.push((path.to_string(), text.to_string())); | ||
67 | FilePosition { file_id, offset } | ||
68 | } | ||
69 | pub fn add_file_with_range(&mut self, path: &str, text: &str) -> FileRange { | ||
70 | let (range, text) = extract_range(text); | ||
71 | let file_id = FileId((self.files.len() + 1) as u32); | ||
72 | self.files.push((path.to_string(), text.to_string())); | ||
73 | FileRange { file_id, range } | ||
74 | } | ||
75 | pub fn id_of(&self, path: &str) -> FileId { | ||
76 | let (idx, _) = self | ||
77 | .files | ||
78 | .iter() | ||
79 | .enumerate() | ||
80 | .find(|(_, (p, _text))| path == p) | ||
81 | .expect("no file in this mock"); | ||
82 | FileId(idx as u32 + 1) | ||
83 | } | ||
84 | pub fn analysis_host(self) -> AnalysisHost { | ||
85 | let mut host = AnalysisHost::default(); | ||
86 | let mut file_map = FileMap::default(); | ||
87 | let source_root = SourceRootId(0); | ||
88 | let mut change = AnalysisChange::new(); | ||
89 | change.add_root(source_root, true); | ||
90 | let mut crate_graph = CrateGraph::default(); | ||
91 | for (path, contents) in self.files.into_iter() { | ||
92 | assert!(path.starts_with('/')); | ||
93 | let path = RelativePathBuf::from_path(&path[1..]).unwrap(); | ||
94 | let file_id = file_map.add(path.clone()); | ||
95 | if path == "/lib.rs" || path == "/main.rs" { | ||
96 | crate_graph.add_crate_root(file_id); | ||
97 | } | ||
98 | change.add_file(source_root, file_id, path, Arc::new(contents)); | ||
99 | } | ||
100 | change.set_crate_graph(crate_graph); | ||
101 | // change.set_file_resolver(Arc::new(file_map)); | ||
102 | host.apply_change(change); | ||
103 | host | ||
104 | } | ||
105 | pub fn analysis(self) -> Analysis { | ||
106 | self.analysis_host().analysis() | ||
107 | } | ||
108 | } | ||
109 | |||
110 | /// Creates analysis from a multi-file fixture, returns positions marked with <|>. | ||
111 | pub fn analysis_and_position(fixture: &str) -> (Analysis, FilePosition) { | ||
112 | let (mock, position) = MockAnalysis::with_files_and_position(fixture); | ||
113 | (mock.analysis(), position) | ||
114 | } | ||
115 | |||
116 | /// Creates analysis for a single file. | ||
117 | pub fn single_file(code: &str) -> (Analysis, FileId) { | ||
118 | let mut mock = MockAnalysis::new(); | ||
119 | let file_id = mock.add_file("/main.rs", code); | ||
120 | (mock.analysis(), file_id) | ||
121 | } | ||
122 | |||
123 | /// Creates analysis for a single file, returns position marked with <|>. | ||
124 | pub fn single_file_with_position(code: &str) -> (Analysis, FilePosition) { | ||
125 | let mut mock = MockAnalysis::new(); | ||
126 | let pos = mock.add_file_with_position("/main.rs", code); | ||
127 | (mock.analysis(), pos) | ||
128 | } | ||
129 | |||
130 | /// Creates analysis for a single file, returns range marked with a pair of <|>. | ||
131 | pub fn single_file_with_range(code: &str) -> (Analysis, FileRange) { | ||
132 | let mut mock = MockAnalysis::new(); | ||
133 | let pos = mock.add_file_with_range("/main.rs", code); | ||
134 | (mock.analysis(), pos) | ||
135 | } | ||
diff --git a/crates/ra_ide_api/src/runnables.rs b/crates/ra_ide_api/src/runnables.rs new file mode 100644 index 000000000..98b1d2d55 --- /dev/null +++ b/crates/ra_ide_api/src/runnables.rs | |||
@@ -0,0 +1,89 @@ | |||
1 | use itertools::Itertools; | ||
2 | use ra_syntax::{ | ||
3 | TextRange, SyntaxNode, | ||
4 | ast::{self, AstNode, NameOwner, ModuleItemOwner}, | ||
5 | }; | ||
6 | use ra_db::{Cancelable, SyntaxDatabase}; | ||
7 | |||
8 | use crate::{db::RootDatabase, FileId}; | ||
9 | |||
10 | #[derive(Debug)] | ||
11 | pub struct Runnable { | ||
12 | pub range: TextRange, | ||
13 | pub kind: RunnableKind, | ||
14 | } | ||
15 | |||
16 | #[derive(Debug)] | ||
17 | pub enum RunnableKind { | ||
18 | Test { name: String }, | ||
19 | TestMod { path: String }, | ||
20 | Bin, | ||
21 | } | ||
22 | |||
23 | pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Cancelable<Vec<Runnable>> { | ||
24 | let source_file = db.source_file(file_id); | ||
25 | let res = source_file | ||
26 | .syntax() | ||
27 | .descendants() | ||
28 | .filter_map(|i| runnable(db, file_id, i)) | ||
29 | .collect(); | ||
30 | Ok(res) | ||
31 | } | ||
32 | |||
33 | fn runnable(db: &RootDatabase, file_id: FileId, item: &SyntaxNode) -> Option<Runnable> { | ||
34 | if let Some(fn_def) = ast::FnDef::cast(item) { | ||
35 | runnable_fn(fn_def) | ||
36 | } else if let Some(m) = ast::Module::cast(item) { | ||
37 | runnable_mod(db, file_id, m) | ||
38 | } else { | ||
39 | None | ||
40 | } | ||
41 | } | ||
42 | |||
43 | fn runnable_fn(fn_def: &ast::FnDef) -> Option<Runnable> { | ||
44 | let name = fn_def.name()?.text(); | ||
45 | let kind = if name == "main" { | ||
46 | RunnableKind::Bin | ||
47 | } else if fn_def.has_atom_attr("test") { | ||
48 | RunnableKind::Test { | ||
49 | name: name.to_string(), | ||
50 | } | ||
51 | } else { | ||
52 | return None; | ||
53 | }; | ||
54 | Some(Runnable { | ||
55 | range: fn_def.syntax().range(), | ||
56 | kind, | ||
57 | }) | ||
58 | } | ||
59 | |||
60 | fn runnable_mod(db: &RootDatabase, file_id: FileId, module: &ast::Module) -> Option<Runnable> { | ||
61 | let has_test_function = module | ||
62 | .item_list()? | ||
63 | .items() | ||
64 | .filter_map(|it| match it.kind() { | ||
65 | ast::ModuleItemKind::FnDef(it) => Some(it), | ||
66 | _ => None, | ||
67 | }) | ||
68 | .any(|f| f.has_atom_attr("test")); | ||
69 | if !has_test_function { | ||
70 | return None; | ||
71 | } | ||
72 | let range = module.syntax().range(); | ||
73 | let module = | ||
74 | hir::source_binder::module_from_child_node(db, file_id, module.syntax()).ok()??; | ||
75 | |||
76 | // FIXME: thread cancellation instead of `.ok`ing | ||
77 | let path = module | ||
78 | .path_to_root(db) | ||
79 | .ok()? | ||
80 | .into_iter() | ||
81 | .rev() | ||
82 | .filter_map(|it| it.name(db).ok()) | ||
83 | .filter_map(|it| it) | ||
84 | .join("::"); | ||
85 | Some(Runnable { | ||
86 | range, | ||
87 | kind: RunnableKind::TestMod { path }, | ||
88 | }) | ||
89 | } | ||
diff --git a/crates/ra_ide_api/src/symbol_index.rs b/crates/ra_ide_api/src/symbol_index.rs new file mode 100644 index 000000000..8dd15b40e --- /dev/null +++ b/crates/ra_ide_api/src/symbol_index.rs | |||
@@ -0,0 +1,222 @@ | |||
1 | //! This module handles fuzzy-searching of functions, structs and other symbols | ||
2 | //! by name across the whole workspace and dependencies. | ||
3 | //! | ||
4 | //! It works by building an incrementally-updated text-search index of all | ||
5 | //! symbols. The backbone of the index is the **awesome** `fst` crate by | ||
6 | //! @BurntSushi. | ||
7 | //! | ||
8 | //! In a nutshell, you give a set of strings to the `fst`, and it builds a | ||
9 | //! finite state machine describing this set of strtings. The strings which | ||
10 | //! could fuzzy-match a pattern can also be described by a finite state machine. | ||
11 | //! What is freakingly cool is that you can now traverse both state machines in | ||
12 | //! lock-step to enumerate the strings which are both in the input set and | ||
13 | //! fuzz-match the query. Or, more formally, given two langauges described by | ||
14 | //! fsts, one can build an product fst which describes the intersection of the | ||
15 | //! languages. | ||
16 | //! | ||
17 | //! `fst` does not support cheap updating of the index, but it supports unioning | ||
18 | //! of state machines. So, to account for changing source code, we build an fst | ||
19 | //! for each library (which is assumed to never change) and an fst for each rust | ||
20 | //! file in the current workspace, and run a query aginst the union of all | ||
21 | //! thouse fsts. | ||
22 | use std::{ | ||
23 | cmp::Ordering, | ||
24 | hash::{Hash, Hasher}, | ||
25 | sync::Arc, | ||
26 | }; | ||
27 | |||
28 | use fst::{self, Streamer}; | ||
29 | use ra_syntax::{ | ||
30 | SyntaxNode, SourceFile, SmolStr, TreePtr, AstNode, | ||
31 | algo::{visit::{visitor, Visitor}, find_covering_node}, | ||
32 | SyntaxKind::{self, *}, | ||
33 | ast::{self, NameOwner}, | ||
34 | }; | ||
35 | use ra_db::{SourceRootId, FilesDatabase, LocalSyntaxPtr}; | ||
36 | use salsa::ParallelDatabase; | ||
37 | use rayon::prelude::*; | ||
38 | |||
39 | use crate::{ | ||
40 | Cancelable, FileId, Query, | ||
41 | db::RootDatabase, | ||
42 | }; | ||
43 | |||
44 | salsa::query_group! { | ||
45 | pub(crate) trait SymbolsDatabase: hir::db::HirDatabase { | ||
46 | fn file_symbols(file_id: FileId) -> Cancelable<Arc<SymbolIndex>> { | ||
47 | type FileSymbolsQuery; | ||
48 | } | ||
49 | fn library_symbols(id: SourceRootId) -> Arc<SymbolIndex> { | ||
50 | type LibrarySymbolsQuery; | ||
51 | storage input; | ||
52 | } | ||
53 | } | ||
54 | } | ||
55 | |||
56 | fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Cancelable<Arc<SymbolIndex>> { | ||
57 | db.check_canceled()?; | ||
58 | let source_file = db.source_file(file_id); | ||
59 | let mut symbols = source_file | ||
60 | .syntax() | ||
61 | .descendants() | ||
62 | .filter_map(to_symbol) | ||
63 | .map(move |(name, ptr)| FileSymbol { name, ptr, file_id }) | ||
64 | .collect::<Vec<_>>(); | ||
65 | |||
66 | for (name, text_range) in hir::source_binder::macro_symbols(db, file_id)? { | ||
67 | let node = find_covering_node(source_file.syntax(), text_range); | ||
68 | let ptr = LocalSyntaxPtr::new(node); | ||
69 | symbols.push(FileSymbol { file_id, name, ptr }) | ||
70 | } | ||
71 | |||
72 | Ok(Arc::new(SymbolIndex::new(symbols))) | ||
73 | } | ||
74 | |||
75 | pub(crate) fn world_symbols(db: &RootDatabase, query: Query) -> Cancelable<Vec<FileSymbol>> { | ||
76 | /// Need to wrap Snapshot to provide `Clone` impl for `map_with` | ||
77 | struct Snap(salsa::Snapshot<RootDatabase>); | ||
78 | impl Clone for Snap { | ||
79 | fn clone(&self) -> Snap { | ||
80 | Snap(self.0.snapshot()) | ||
81 | } | ||
82 | } | ||
83 | |||
84 | let buf: Vec<Arc<SymbolIndex>> = if query.libs { | ||
85 | let snap = Snap(db.snapshot()); | ||
86 | db.library_roots() | ||
87 | .par_iter() | ||
88 | .map_with(snap, |db, &lib_id| db.0.library_symbols(lib_id)) | ||
89 | .collect() | ||
90 | } else { | ||
91 | let mut files = Vec::new(); | ||
92 | for &root in db.local_roots().iter() { | ||
93 | let sr = db.source_root(root); | ||
94 | files.extend(sr.files.values().map(|&it| it)) | ||
95 | } | ||
96 | |||
97 | let snap = Snap(db.snapshot()); | ||
98 | files | ||
99 | .par_iter() | ||
100 | .map_with(snap, |db, &file_id| db.0.file_symbols(file_id)) | ||
101 | .filter_map(|it| it.ok()) | ||
102 | .collect() | ||
103 | }; | ||
104 | Ok(query.search(&buf)) | ||
105 | } | ||
106 | |||
107 | #[derive(Default, Debug)] | ||
108 | pub(crate) struct SymbolIndex { | ||
109 | symbols: Vec<FileSymbol>, | ||
110 | map: fst::Map, | ||
111 | } | ||
112 | |||
113 | impl PartialEq for SymbolIndex { | ||
114 | fn eq(&self, other: &SymbolIndex) -> bool { | ||
115 | self.symbols == other.symbols | ||
116 | } | ||
117 | } | ||
118 | |||
119 | impl Eq for SymbolIndex {} | ||
120 | |||
121 | impl Hash for SymbolIndex { | ||
122 | fn hash<H: Hasher>(&self, hasher: &mut H) { | ||
123 | self.symbols.hash(hasher) | ||
124 | } | ||
125 | } | ||
126 | |||
127 | impl SymbolIndex { | ||
128 | fn new(mut symbols: Vec<FileSymbol>) -> SymbolIndex { | ||
129 | fn cmp(s1: &FileSymbol, s2: &FileSymbol) -> Ordering { | ||
130 | unicase::Ascii::new(s1.name.as_str()).cmp(&unicase::Ascii::new(s2.name.as_str())) | ||
131 | } | ||
132 | symbols.par_sort_by(cmp); | ||
133 | symbols.dedup_by(|s1, s2| cmp(s1, s2) == Ordering::Equal); | ||
134 | let names = symbols.iter().map(|it| it.name.as_str().to_lowercase()); | ||
135 | let map = fst::Map::from_iter(names.into_iter().zip(0u64..)).unwrap(); | ||
136 | SymbolIndex { symbols, map } | ||
137 | } | ||
138 | |||
139 | pub(crate) fn len(&self) -> usize { | ||
140 | self.symbols.len() | ||
141 | } | ||
142 | |||
143 | pub(crate) fn for_files( | ||
144 | files: impl ParallelIterator<Item = (FileId, TreePtr<SourceFile>)>, | ||
145 | ) -> SymbolIndex { | ||
146 | let symbols = files | ||
147 | .flat_map(|(file_id, file)| { | ||
148 | file.syntax() | ||
149 | .descendants() | ||
150 | .filter_map(to_symbol) | ||
151 | .map(move |(name, ptr)| FileSymbol { name, ptr, file_id }) | ||
152 | .collect::<Vec<_>>() | ||
153 | }) | ||
154 | .collect::<Vec<_>>(); | ||
155 | SymbolIndex::new(symbols) | ||
156 | } | ||
157 | } | ||
158 | |||
159 | impl Query { | ||
160 | pub(crate) fn search(self, indices: &[Arc<SymbolIndex>]) -> Vec<FileSymbol> { | ||
161 | let mut op = fst::map::OpBuilder::new(); | ||
162 | for file_symbols in indices.iter() { | ||
163 | let automaton = fst::automaton::Subsequence::new(&self.lowercased); | ||
164 | op = op.add(file_symbols.map.search(automaton)) | ||
165 | } | ||
166 | let mut stream = op.union(); | ||
167 | let mut res = Vec::new(); | ||
168 | while let Some((_, indexed_values)) = stream.next() { | ||
169 | if res.len() >= self.limit { | ||
170 | break; | ||
171 | } | ||
172 | for indexed_value in indexed_values { | ||
173 | let file_symbols = &indices[indexed_value.index]; | ||
174 | let idx = indexed_value.value as usize; | ||
175 | |||
176 | let symbol = &file_symbols.symbols[idx]; | ||
177 | if self.only_types && !is_type(symbol.ptr.kind()) { | ||
178 | continue; | ||
179 | } | ||
180 | if self.exact && symbol.name != self.query { | ||
181 | continue; | ||
182 | } | ||
183 | res.push(symbol.clone()); | ||
184 | } | ||
185 | } | ||
186 | res | ||
187 | } | ||
188 | } | ||
189 | |||
190 | fn is_type(kind: SyntaxKind) -> bool { | ||
191 | match kind { | ||
192 | STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_DEF => true, | ||
193 | _ => false, | ||
194 | } | ||
195 | } | ||
196 | |||
197 | /// The actual data that is stored in the index. It should be as compact as | ||
198 | /// possible. | ||
199 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
200 | pub(crate) struct FileSymbol { | ||
201 | pub(crate) file_id: FileId, | ||
202 | pub(crate) name: SmolStr, | ||
203 | pub(crate) ptr: LocalSyntaxPtr, | ||
204 | } | ||
205 | |||
206 | fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, LocalSyntaxPtr)> { | ||
207 | fn decl<N: NameOwner>(node: &N) -> Option<(SmolStr, LocalSyntaxPtr)> { | ||
208 | let name = node.name()?.text().clone(); | ||
209 | let ptr = LocalSyntaxPtr::new(node.syntax()); | ||
210 | Some((name, ptr)) | ||
211 | } | ||
212 | visitor() | ||
213 | .visit(decl::<ast::FnDef>) | ||
214 | .visit(decl::<ast::StructDef>) | ||
215 | .visit(decl::<ast::EnumDef>) | ||
216 | .visit(decl::<ast::TraitDef>) | ||
217 | .visit(decl::<ast::Module>) | ||
218 | .visit(decl::<ast::TypeDef>) | ||
219 | .visit(decl::<ast::ConstDef>) | ||
220 | .visit(decl::<ast::StaticDef>) | ||
221 | .accept(node)? | ||
222 | } | ||
diff --git a/crates/ra_ide_api/src/syntax_highlighting.rs b/crates/ra_ide_api/src/syntax_highlighting.rs new file mode 100644 index 000000000..cb19e9515 --- /dev/null +++ b/crates/ra_ide_api/src/syntax_highlighting.rs | |||
@@ -0,0 +1,92 @@ | |||
1 | use ra_syntax::{ast, AstNode,}; | ||
2 | use ra_db::SyntaxDatabase; | ||
3 | |||
4 | use crate::{ | ||
5 | FileId, Cancelable, HighlightedRange, | ||
6 | db::RootDatabase, | ||
7 | }; | ||
8 | |||
9 | pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Cancelable<Vec<HighlightedRange>> { | ||
10 | let source_file = db.source_file(file_id); | ||
11 | let mut res = ra_ide_api_light::highlight(source_file.syntax()); | ||
12 | for macro_call in source_file | ||
13 | .syntax() | ||
14 | .descendants() | ||
15 | .filter_map(ast::MacroCall::cast) | ||
16 | { | ||
17 | if let Some((off, exp)) = hir::MacroDef::ast_expand(macro_call) { | ||
18 | let mapped_ranges = ra_ide_api_light::highlight(&exp.syntax()) | ||
19 | .into_iter() | ||
20 | .filter_map(|r| { | ||
21 | let mapped_range = exp.map_range_back(r.range)?; | ||
22 | let res = HighlightedRange { | ||
23 | range: mapped_range + off, | ||
24 | tag: r.tag, | ||
25 | }; | ||
26 | Some(res) | ||
27 | }); | ||
28 | res.extend(mapped_ranges); | ||
29 | } | ||
30 | } | ||
31 | Ok(res) | ||
32 | } | ||
33 | |||
34 | #[cfg(test)] | ||
35 | mod tests { | ||
36 | use crate::mock_analysis::single_file; | ||
37 | use test_utils::assert_eq_dbg; | ||
38 | |||
39 | #[test] | ||
40 | fn highlights_code_inside_macros() { | ||
41 | let (analysis, file_id) = single_file( | ||
42 | " | ||
43 | fn main() { | ||
44 | ctry!({ let x = 92; x}); | ||
45 | vec![{ let x = 92; x}]; | ||
46 | } | ||
47 | ", | ||
48 | ); | ||
49 | let highlights = analysis.highlight(file_id).unwrap(); | ||
50 | assert_eq_dbg( | ||
51 | r#"[HighlightedRange { range: [13; 15), tag: "keyword" }, | ||
52 | HighlightedRange { range: [16; 20), tag: "function" }, | ||
53 | HighlightedRange { range: [41; 46), tag: "macro" }, | ||
54 | HighlightedRange { range: [49; 52), tag: "keyword" }, | ||
55 | HighlightedRange { range: [57; 59), tag: "literal" }, | ||
56 | HighlightedRange { range: [82; 86), tag: "macro" }, | ||
57 | HighlightedRange { range: [89; 92), tag: "keyword" }, | ||
58 | HighlightedRange { range: [97; 99), tag: "literal" }, | ||
59 | HighlightedRange { range: [49; 52), tag: "keyword" }, | ||
60 | HighlightedRange { range: [53; 54), tag: "function" }, | ||
61 | HighlightedRange { range: [57; 59), tag: "literal" }, | ||
62 | HighlightedRange { range: [61; 62), tag: "text" }, | ||
63 | HighlightedRange { range: [89; 92), tag: "keyword" }, | ||
64 | HighlightedRange { range: [93; 94), tag: "function" }, | ||
65 | HighlightedRange { range: [97; 99), tag: "literal" }, | ||
66 | HighlightedRange { range: [101; 102), tag: "text" }]"#, | ||
67 | &highlights, | ||
68 | ) | ||
69 | } | ||
70 | |||
71 | // FIXME: this test is not really necessary: artifact of the inital hacky | ||
72 | // macros implementation. | ||
73 | #[test] | ||
74 | fn highlight_query_group_macro() { | ||
75 | let (analysis, file_id) = single_file( | ||
76 | " | ||
77 | salsa::query_group! { | ||
78 | pub trait HirDatabase: SyntaxDatabase {} | ||
79 | } | ||
80 | ", | ||
81 | ); | ||
82 | let highlights = analysis.highlight(file_id).unwrap(); | ||
83 | assert_eq_dbg( | ||
84 | r#"[HighlightedRange { range: [20; 32), tag: "macro" }, | ||
85 | HighlightedRange { range: [13; 18), tag: "text" }, | ||
86 | HighlightedRange { range: [51; 54), tag: "keyword" }, | ||
87 | HighlightedRange { range: [55; 60), tag: "keyword" }, | ||
88 | HighlightedRange { range: [61; 72), tag: "function" }]"#, | ||
89 | &highlights, | ||
90 | ) | ||
91 | } | ||
92 | } | ||
diff --git a/crates/ra_ide_api/tests/test/main.rs b/crates/ra_ide_api/tests/test/main.rs new file mode 100644 index 000000000..d1dc07e5b --- /dev/null +++ b/crates/ra_ide_api/tests/test/main.rs | |||
@@ -0,0 +1,249 @@ | |||
1 | mod runnables; | ||
2 | |||
3 | use ra_syntax::TextRange; | ||
4 | use test_utils::{assert_eq_dbg, assert_eq_text}; | ||
5 | |||
6 | use ra_ide_api::{ | ||
7 | mock_analysis::{analysis_and_position, single_file, single_file_with_position, MockAnalysis}, | ||
8 | AnalysisChange, CrateGraph, FileId, Query | ||
9 | }; | ||
10 | |||
11 | #[test] | ||
12 | fn test_unresolved_module_diagnostic() { | ||
13 | let (analysis, file_id) = single_file("mod foo;"); | ||
14 | let diagnostics = analysis.diagnostics(file_id).unwrap(); | ||
15 | assert_eq_dbg( | ||
16 | r#"[Diagnostic { | ||
17 | message: "unresolved module", | ||
18 | range: [4; 7), | ||
19 | fix: Some(SourceChange { | ||
20 | label: "create module", | ||
21 | source_file_edits: [], | ||
22 | file_system_edits: [CreateFile { source_root: SourceRootId(0), path: "foo.rs" }], | ||
23 | cursor_position: None }), | ||
24 | severity: Error }]"#, | ||
25 | &diagnostics, | ||
26 | ); | ||
27 | } | ||
28 | |||
29 | // FIXME: move this test to hir | ||
30 | #[test] | ||
31 | fn test_unresolved_module_diagnostic_no_diag_for_inline_mode() { | ||
32 | let (analysis, file_id) = single_file("mod foo {}"); | ||
33 | let diagnostics = analysis.diagnostics(file_id).unwrap(); | ||
34 | assert_eq_dbg(r#"[]"#, &diagnostics); | ||
35 | } | ||
36 | |||
37 | #[test] | ||
38 | fn test_resolve_parent_module() { | ||
39 | let (analysis, pos) = analysis_and_position( | ||
40 | " | ||
41 | //- /lib.rs | ||
42 | mod foo; | ||
43 | //- /foo.rs | ||
44 | <|>// empty | ||
45 | ", | ||
46 | ); | ||
47 | let symbols = analysis.parent_module(pos).unwrap(); | ||
48 | assert_eq_dbg( | ||
49 | r#"[NavigationTarget { file_id: FileId(1), name: "foo", kind: MODULE, range: [4; 7), ptr: None }]"#, | ||
50 | &symbols, | ||
51 | ); | ||
52 | } | ||
53 | |||
54 | #[test] | ||
55 | fn test_resolve_parent_module_for_inline() { | ||
56 | let (analysis, pos) = analysis_and_position( | ||
57 | " | ||
58 | //- /lib.rs | ||
59 | mod foo { | ||
60 | mod bar { | ||
61 | mod baz { <|> } | ||
62 | } | ||
63 | } | ||
64 | ", | ||
65 | ); | ||
66 | let symbols = analysis.parent_module(pos).unwrap(); | ||
67 | assert_eq_dbg( | ||
68 | r#"[NavigationTarget { file_id: FileId(1), name: "baz", kind: MODULE, range: [36; 39), ptr: None }]"#, | ||
69 | &symbols, | ||
70 | ); | ||
71 | } | ||
72 | |||
73 | #[test] | ||
74 | fn test_resolve_crate_root() { | ||
75 | let mock = MockAnalysis::with_files( | ||
76 | " | ||
77 | //- /bar.rs | ||
78 | mod foo; | ||
79 | //- /bar/foo.rs | ||
80 | // emtpy <|> | ||
81 | ", | ||
82 | ); | ||
83 | let root_file = mock.id_of("/bar.rs"); | ||
84 | let mod_file = mock.id_of("/bar/foo.rs"); | ||
85 | let mut host = mock.analysis_host(); | ||
86 | assert!(host.analysis().crate_for(mod_file).unwrap().is_empty()); | ||
87 | |||
88 | let mut crate_graph = CrateGraph::default(); | ||
89 | let crate_id = crate_graph.add_crate_root(root_file); | ||
90 | let mut change = AnalysisChange::new(); | ||
91 | change.set_crate_graph(crate_graph); | ||
92 | host.apply_change(change); | ||
93 | |||
94 | assert_eq!(host.analysis().crate_for(mod_file).unwrap(), vec![crate_id]); | ||
95 | } | ||
96 | |||
97 | fn get_all_refs(text: &str) -> Vec<(FileId, TextRange)> { | ||
98 | let (analysis, position) = single_file_with_position(text); | ||
99 | analysis.find_all_refs(position).unwrap() | ||
100 | } | ||
101 | |||
102 | #[test] | ||
103 | fn test_find_all_refs_for_local() { | ||
104 | let code = r#" | ||
105 | fn main() { | ||
106 | let mut i = 1; | ||
107 | let j = 1; | ||
108 | i = i<|> + j; | ||
109 | |||
110 | { | ||
111 | i = 0; | ||
112 | } | ||
113 | |||
114 | i = 5; | ||
115 | }"#; | ||
116 | |||
117 | let refs = get_all_refs(code); | ||
118 | assert_eq!(refs.len(), 5); | ||
119 | } | ||
120 | |||
121 | #[test] | ||
122 | fn test_find_all_refs_for_param_inside() { | ||
123 | let code = r#" | ||
124 | fn foo(i : u32) -> u32 { | ||
125 | i<|> | ||
126 | }"#; | ||
127 | |||
128 | let refs = get_all_refs(code); | ||
129 | assert_eq!(refs.len(), 2); | ||
130 | } | ||
131 | |||
132 | #[test] | ||
133 | fn test_find_all_refs_for_fn_param() { | ||
134 | let code = r#" | ||
135 | fn foo(i<|> : u32) -> u32 { | ||
136 | i | ||
137 | }"#; | ||
138 | |||
139 | let refs = get_all_refs(code); | ||
140 | assert_eq!(refs.len(), 2); | ||
141 | } | ||
142 | #[test] | ||
143 | fn test_rename_for_local() { | ||
144 | test_rename( | ||
145 | r#" | ||
146 | fn main() { | ||
147 | let mut i = 1; | ||
148 | let j = 1; | ||
149 | i = i<|> + j; | ||
150 | |||
151 | { | ||
152 | i = 0; | ||
153 | } | ||
154 | |||
155 | i = 5; | ||
156 | }"#, | ||
157 | "k", | ||
158 | r#" | ||
159 | fn main() { | ||
160 | let mut k = 1; | ||
161 | let j = 1; | ||
162 | k = k + j; | ||
163 | |||
164 | { | ||
165 | k = 0; | ||
166 | } | ||
167 | |||
168 | k = 5; | ||
169 | }"#, | ||
170 | ); | ||
171 | } | ||
172 | |||
173 | #[test] | ||
174 | fn test_rename_for_param_inside() { | ||
175 | test_rename( | ||
176 | r#" | ||
177 | fn foo(i : u32) -> u32 { | ||
178 | i<|> | ||
179 | }"#, | ||
180 | "j", | ||
181 | r#" | ||
182 | fn foo(j : u32) -> u32 { | ||
183 | j | ||
184 | }"#, | ||
185 | ); | ||
186 | } | ||
187 | |||
188 | #[test] | ||
189 | fn test_rename_refs_for_fn_param() { | ||
190 | test_rename( | ||
191 | r#" | ||
192 | fn foo(i<|> : u32) -> u32 { | ||
193 | i | ||
194 | }"#, | ||
195 | "new_name", | ||
196 | r#" | ||
197 | fn foo(new_name : u32) -> u32 { | ||
198 | new_name | ||
199 | }"#, | ||
200 | ); | ||
201 | } | ||
202 | |||
203 | #[test] | ||
204 | fn test_rename_for_mut_param() { | ||
205 | test_rename( | ||
206 | r#" | ||
207 | fn foo(mut i<|> : u32) -> u32 { | ||
208 | i | ||
209 | }"#, | ||
210 | "new_name", | ||
211 | r#" | ||
212 | fn foo(mut new_name : u32) -> u32 { | ||
213 | new_name | ||
214 | }"#, | ||
215 | ); | ||
216 | } | ||
217 | |||
218 | fn test_rename(text: &str, new_name: &str, expected: &str) { | ||
219 | let (analysis, position) = single_file_with_position(text); | ||
220 | let edits = analysis.rename(position, new_name).unwrap(); | ||
221 | let mut text_edit_bulder = ra_text_edit::TextEditBuilder::default(); | ||
222 | let mut file_id: Option<FileId> = None; | ||
223 | for edit in edits { | ||
224 | file_id = Some(edit.file_id); | ||
225 | for atom in edit.edit.as_atoms() { | ||
226 | text_edit_bulder.replace(atom.delete, atom.insert.clone()); | ||
227 | } | ||
228 | } | ||
229 | let result = text_edit_bulder | ||
230 | .finish() | ||
231 | .apply(&*analysis.file_text(file_id.unwrap())); | ||
232 | assert_eq_text!(expected, &*result); | ||
233 | } | ||
234 | |||
235 | #[test] | ||
236 | fn world_symbols_include_stuff_from_macros() { | ||
237 | let (analysis, _) = single_file( | ||
238 | " | ||
239 | salsa::query_group! { | ||
240 | pub trait HirDatabase: SyntaxDatabase {} | ||
241 | } | ||
242 | ", | ||
243 | ); | ||
244 | |||
245 | let mut symbols = analysis.symbol_search(Query::new("Hir".into())).unwrap(); | ||
246 | let s = symbols.pop().unwrap(); | ||
247 | assert_eq!(s.name(), "HirDatabase"); | ||
248 | assert_eq!(s.range(), TextRange::from_to(33.into(), 44.into())); | ||
249 | } | ||
diff --git a/crates/ra_ide_api/tests/test/runnables.rs b/crates/ra_ide_api/tests/test/runnables.rs new file mode 100644 index 000000000..da8d5e0d5 --- /dev/null +++ b/crates/ra_ide_api/tests/test/runnables.rs | |||
@@ -0,0 +1,109 @@ | |||
1 | use test_utils::assert_eq_dbg; | ||
2 | |||
3 | use ra_ide_api::mock_analysis::analysis_and_position; | ||
4 | |||
5 | #[test] | ||
6 | fn test_runnables() { | ||
7 | let (analysis, pos) = analysis_and_position( | ||
8 | r#" | ||
9 | //- /lib.rs | ||
10 | <|> //empty | ||
11 | fn main() {} | ||
12 | |||
13 | #[test] | ||
14 | fn test_foo() {} | ||
15 | |||
16 | #[test] | ||
17 | #[ignore] | ||
18 | fn test_foo() {} | ||
19 | "#, | ||
20 | ); | ||
21 | let runnables = analysis.runnables(pos.file_id).unwrap(); | ||
22 | assert_eq_dbg( | ||
23 | r#"[Runnable { range: [1; 21), kind: Bin }, | ||
24 | Runnable { range: [22; 46), kind: Test { name: "test_foo" } }, | ||
25 | Runnable { range: [47; 81), kind: Test { name: "test_foo" } }]"#, | ||
26 | &runnables, | ||
27 | ) | ||
28 | } | ||
29 | |||
30 | #[test] | ||
31 | fn test_runnables_module() { | ||
32 | let (analysis, pos) = analysis_and_position( | ||
33 | r#" | ||
34 | //- /lib.rs | ||
35 | <|> //empty | ||
36 | mod test_mod { | ||
37 | #[test] | ||
38 | fn test_foo1() {} | ||
39 | } | ||
40 | "#, | ||
41 | ); | ||
42 | let runnables = analysis.runnables(pos.file_id).unwrap(); | ||
43 | assert_eq_dbg( | ||
44 | r#"[Runnable { range: [1; 59), kind: TestMod { path: "test_mod" } }, | ||
45 | Runnable { range: [28; 57), kind: Test { name: "test_foo1" } }]"#, | ||
46 | &runnables, | ||
47 | ) | ||
48 | } | ||
49 | |||
50 | #[test] | ||
51 | fn test_runnables_one_depth_layer_module() { | ||
52 | let (analysis, pos) = analysis_and_position( | ||
53 | r#" | ||
54 | //- /lib.rs | ||
55 | <|> //empty | ||
56 | mod foo { | ||
57 | mod test_mod { | ||
58 | #[test] | ||
59 | fn test_foo1() {} | ||
60 | } | ||
61 | } | ||
62 | "#, | ||
63 | ); | ||
64 | let runnables = analysis.runnables(pos.file_id).unwrap(); | ||
65 | assert_eq_dbg( | ||
66 | r#"[Runnable { range: [23; 85), kind: TestMod { path: "foo::test_mod" } }, | ||
67 | Runnable { range: [46; 79), kind: Test { name: "test_foo1" } }]"#, | ||
68 | &runnables, | ||
69 | ) | ||
70 | } | ||
71 | |||
72 | #[test] | ||
73 | fn test_runnables_multiple_depth_module() { | ||
74 | let (analysis, pos) = analysis_and_position( | ||
75 | r#" | ||
76 | //- /lib.rs | ||
77 | <|> //empty | ||
78 | mod foo { | ||
79 | mod bar { | ||
80 | mod test_mod { | ||
81 | #[test] | ||
82 | fn test_foo1() {} | ||
83 | } | ||
84 | } | ||
85 | } | ||
86 | "#, | ||
87 | ); | ||
88 | let runnables = analysis.runnables(pos.file_id).unwrap(); | ||
89 | assert_eq_dbg( | ||
90 | r#"[Runnable { range: [41; 115), kind: TestMod { path: "foo::bar::test_mod" } }, | ||
91 | Runnable { range: [68; 105), kind: Test { name: "test_foo1" } }]"#, | ||
92 | &runnables, | ||
93 | ) | ||
94 | } | ||
95 | |||
96 | #[test] | ||
97 | fn test_runnables_no_test_function_in_module() { | ||
98 | let (analysis, pos) = analysis_and_position( | ||
99 | r#" | ||
100 | //- /lib.rs | ||
101 | <|> //empty | ||
102 | mod test_mod { | ||
103 | fn foo1() {} | ||
104 | } | ||
105 | "#, | ||
106 | ); | ||
107 | let runnables = analysis.runnables(pos.file_id).unwrap(); | ||
108 | assert_eq_dbg(r#"[]"#, &runnables) | ||
109 | } | ||
diff --git a/crates/ra_lsp_server/Cargo.toml b/crates/ra_lsp_server/Cargo.toml index b9fd61105..296fae34f 100644 --- a/crates/ra_lsp_server/Cargo.toml +++ b/crates/ra_lsp_server/Cargo.toml | |||
@@ -29,7 +29,7 @@ parking_lot = "0.7.0" | |||
29 | thread_worker = { path = "../thread_worker" } | 29 | thread_worker = { path = "../thread_worker" } |
30 | ra_syntax = { path = "../ra_syntax" } | 30 | ra_syntax = { path = "../ra_syntax" } |
31 | ra_text_edit = { path = "../ra_text_edit" } | 31 | ra_text_edit = { path = "../ra_text_edit" } |
32 | ra_analysis = { path = "../ra_analysis" } | 32 | ra_ide_api = { path = "../ra_ide_api" } |
33 | gen_lsp_server = { path = "../gen_lsp_server" } | 33 | gen_lsp_server = { path = "../gen_lsp_server" } |
34 | ra_vfs = { path = "../ra_vfs" } | 34 | ra_vfs = { path = "../ra_vfs" } |
35 | 35 | ||
diff --git a/crates/ra_lsp_server/src/conv.rs b/crates/ra_lsp_server/src/conv.rs index b3f8c83cc..5c8b3c194 100644 --- a/crates/ra_lsp_server/src/conv.rs +++ b/crates/ra_lsp_server/src/conv.rs | |||
@@ -4,7 +4,7 @@ use languageserver_types::{ | |||
4 | TextDocumentItem, TextDocumentPositionParams, Url, VersionedTextDocumentIdentifier, | 4 | TextDocumentItem, TextDocumentPositionParams, Url, VersionedTextDocumentIdentifier, |
5 | WorkspaceEdit, | 5 | WorkspaceEdit, |
6 | }; | 6 | }; |
7 | use ra_analysis::{ | 7 | use ra_ide_api::{ |
8 | CompletionItem, CompletionItemKind, FileId, FilePosition, FileRange, FileSystemEdit, | 8 | CompletionItem, CompletionItemKind, FileId, FilePosition, FileRange, FileSystemEdit, |
9 | InsertText, NavigationTarget, SourceChange, SourceFileEdit, | 9 | InsertText, NavigationTarget, SourceChange, SourceFileEdit, |
10 | LineCol, LineIndex, translate_offset_with_edit | 10 | LineCol, LineIndex, translate_offset_with_edit |
diff --git a/crates/ra_lsp_server/src/main_loop.rs b/crates/ra_lsp_server/src/main_loop.rs index 2dc1be26a..96923fac7 100644 --- a/crates/ra_lsp_server/src/main_loop.rs +++ b/crates/ra_lsp_server/src/main_loop.rs | |||
@@ -10,7 +10,7 @@ use gen_lsp_server::{ | |||
10 | handle_shutdown, ErrorCode, RawMessage, RawNotification, RawRequest, RawResponse, | 10 | handle_shutdown, ErrorCode, RawMessage, RawNotification, RawRequest, RawResponse, |
11 | }; | 11 | }; |
12 | use languageserver_types::NumberOrString; | 12 | use languageserver_types::NumberOrString; |
13 | use ra_analysis::{Canceled, FileId, LibraryData}; | 13 | use ra_ide_api::{Canceled, FileId, LibraryData}; |
14 | use ra_vfs::VfsTask; | 14 | use ra_vfs::VfsTask; |
15 | use rayon; | 15 | use rayon; |
16 | use rustc_hash::FxHashSet; | 16 | use rustc_hash::FxHashSet; |
diff --git a/crates/ra_lsp_server/src/main_loop/handlers.rs b/crates/ra_lsp_server/src/main_loop/handlers.rs index b7777bfc3..a653c5ada 100644 --- a/crates/ra_lsp_server/src/main_loop/handlers.rs +++ b/crates/ra_lsp_server/src/main_loop/handlers.rs | |||
@@ -8,7 +8,7 @@ use languageserver_types::{ | |||
8 | ParameterInformation, ParameterLabel, Position, PrepareRenameResponse, Range, RenameParams, | 8 | ParameterInformation, ParameterLabel, Position, PrepareRenameResponse, Range, RenameParams, |
9 | SignatureInformation, SymbolInformation, TextDocumentIdentifier, TextEdit, WorkspaceEdit, | 9 | SignatureInformation, SymbolInformation, TextDocumentIdentifier, TextEdit, WorkspaceEdit, |
10 | }; | 10 | }; |
11 | use ra_analysis::{ | 11 | use ra_ide_api::{ |
12 | FileId, FilePosition, FileRange, FoldKind, Query, RunnableKind, Severity, SourceChange, | 12 | FileId, FilePosition, FileRange, FoldKind, Query, RunnableKind, Severity, SourceChange, |
13 | }; | 13 | }; |
14 | use ra_syntax::{TextUnit, AstNode}; | 14 | use ra_syntax::{TextUnit, AstNode}; |
@@ -736,7 +736,7 @@ fn highlight(world: &ServerWorld, file_id: FileId) -> Result<Vec<Decoration>> { | |||
736 | } | 736 | } |
737 | 737 | ||
738 | fn to_diagnostic_severity(severity: Severity) -> DiagnosticSeverity { | 738 | fn to_diagnostic_severity(severity: Severity) -> DiagnosticSeverity { |
739 | use ra_analysis::Severity::*; | 739 | use ra_ide_api::Severity::*; |
740 | 740 | ||
741 | match severity { | 741 | match severity { |
742 | Error => DiagnosticSeverity::Error, | 742 | Error => DiagnosticSeverity::Error, |
diff --git a/crates/ra_lsp_server/src/main_loop/subscriptions.rs b/crates/ra_lsp_server/src/main_loop/subscriptions.rs index 03f41e870..a83e01557 100644 --- a/crates/ra_lsp_server/src/main_loop/subscriptions.rs +++ b/crates/ra_lsp_server/src/main_loop/subscriptions.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | use ra_analysis::FileId; | 1 | use ra_ide_api::FileId; |
2 | use rustc_hash::FxHashSet; | 2 | use rustc_hash::FxHashSet; |
3 | 3 | ||
4 | pub struct Subscriptions { | 4 | pub struct Subscriptions { |
diff --git a/crates/ra_lsp_server/src/server_world.rs b/crates/ra_lsp_server/src/server_world.rs index ebf2b15cc..76c76766d 100644 --- a/crates/ra_lsp_server/src/server_world.rs +++ b/crates/ra_lsp_server/src/server_world.rs | |||
@@ -1,10 +1,10 @@ | |||
1 | use std::{ | 1 | use std::{ |
2 | path::{PathBuf}, | 2 | path::PathBuf, |
3 | sync::Arc, | 3 | sync::Arc, |
4 | }; | 4 | }; |
5 | 5 | ||
6 | use languageserver_types::Url; | 6 | use languageserver_types::Url; |
7 | use ra_analysis::{ | 7 | use ra_ide_api::{ |
8 | Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId, LibraryData, | 8 | Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId, LibraryData, |
9 | SourceRootId | 9 | SourceRootId |
10 | }; | 10 | }; |
@@ -12,7 +12,7 @@ use ra_vfs::{Vfs, VfsChange, VfsFile, VfsRoot}; | |||
12 | use rustc_hash::FxHashMap; | 12 | use rustc_hash::FxHashMap; |
13 | use relative_path::RelativePathBuf; | 13 | use relative_path::RelativePathBuf; |
14 | use parking_lot::RwLock; | 14 | use parking_lot::RwLock; |
15 | use failure::{format_err}; | 15 | use failure::format_err; |
16 | 16 | ||
17 | use crate::{ | 17 | use crate::{ |
18 | project_model::{CargoWorkspace, TargetKind}, | 18 | project_model::{CargoWorkspace, TargetKind}, |