diff options
author | bors[bot] <bors[bot]@users.noreply.github.com> | 2019-01-08 19:48:48 +0000 |
---|---|---|
committer | bors[bot] <bors[bot]@users.noreply.github.com> | 2019-01-08 19:48:48 +0000 |
commit | 46f74e33ca53a7897e9020d3de75cc76a6b89d79 (patch) | |
tree | 2bc001c8ecf58b49ac9a0da1f20d5644ce29fb3a /crates/ra_ide_api/src | |
parent | 4f4f7933b1b7ff34f8633b1686b18b2d1b994c47 (diff) | |
parent | 0c62b1bb7a49bf527780ce1f8cade5eb4fbfdb2d (diff) |
Merge #471
471: rename crates to match reality r=matklad a=matklad
Co-authored-by: Aleksey Kladov <[email protected]>
Diffstat (limited to 'crates/ra_ide_api/src')
20 files changed, 3874 insertions, 0 deletions
diff --git a/crates/ra_ide_api/src/call_info.rs b/crates/ra_ide_api/src/call_info.rs new file mode 100644 index 000000000..27b760780 --- /dev/null +++ b/crates/ra_ide_api/src/call_info.rs | |||
@@ -0,0 +1,451 @@ | |||
1 | use std::cmp::{max, min}; | ||
2 | |||
3 | use ra_db::{SyntaxDatabase, Cancelable}; | ||
4 | use ra_syntax::{ | ||
5 | AstNode, SyntaxNode, TextUnit, TextRange, | ||
6 | SyntaxKind::FN_DEF, | ||
7 | ast::{self, ArgListOwner, DocCommentsOwner}, | ||
8 | algo::find_node_at_offset, | ||
9 | }; | ||
10 | |||
11 | use crate::{FilePosition, CallInfo, db::RootDatabase}; | ||
12 | |||
13 | /// Computes parameter information for the given call expression. | ||
14 | pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Cancelable<Option<CallInfo>> { | ||
15 | let file = db.source_file(position.file_id); | ||
16 | let syntax = file.syntax(); | ||
17 | |||
18 | // Find the calling expression and it's NameRef | ||
19 | let calling_node = ctry!(FnCallNode::with_node(syntax, position.offset)); | ||
20 | let name_ref = ctry!(calling_node.name_ref()); | ||
21 | |||
22 | // Resolve the function's NameRef (NOTE: this isn't entirely accurate). | ||
23 | let file_symbols = db.index_resolve(name_ref)?; | ||
24 | let symbol = ctry!(file_symbols.into_iter().find(|it| it.ptr.kind() == FN_DEF)); | ||
25 | let fn_file = db.source_file(symbol.file_id); | ||
26 | let fn_def = symbol.ptr.resolve(&fn_file); | ||
27 | let fn_def = ast::FnDef::cast(&fn_def).unwrap(); | ||
28 | let mut call_info = ctry!(CallInfo::new(fn_def)); | ||
29 | // If we have a calling expression let's find which argument we are on | ||
30 | let num_params = call_info.parameters.len(); | ||
31 | let has_self = fn_def.param_list().and_then(|l| l.self_param()).is_some(); | ||
32 | |||
33 | if num_params == 1 { | ||
34 | if !has_self { | ||
35 | call_info.active_parameter = Some(0); | ||
36 | } | ||
37 | } else if num_params > 1 { | ||
38 | // Count how many parameters into the call we are. | ||
39 | // TODO: This is best effort for now and should be fixed at some point. | ||
40 | // It may be better to see where we are in the arg_list and then check | ||
41 | // where offset is in that list (or beyond). | ||
42 | // Revisit this after we get documentation comments in. | ||
43 | if let Some(ref arg_list) = calling_node.arg_list() { | ||
44 | let start = arg_list.syntax().range().start(); | ||
45 | |||
46 | let range_search = TextRange::from_to(start, position.offset); | ||
47 | let mut commas: usize = arg_list | ||
48 | .syntax() | ||
49 | .text() | ||
50 | .slice(range_search) | ||
51 | .to_string() | ||
52 | .matches(',') | ||
53 | .count(); | ||
54 | |||
55 | // If we have a method call eat the first param since it's just self. | ||
56 | if has_self { | ||
57 | commas += 1; | ||
58 | } | ||
59 | |||
60 | call_info.active_parameter = Some(commas); | ||
61 | } | ||
62 | } | ||
63 | |||
64 | Ok(Some(call_info)) | ||
65 | } | ||
66 | |||
67 | enum FnCallNode<'a> { | ||
68 | CallExpr(&'a ast::CallExpr), | ||
69 | MethodCallExpr(&'a ast::MethodCallExpr), | ||
70 | } | ||
71 | |||
72 | impl<'a> FnCallNode<'a> { | ||
73 | pub fn with_node(syntax: &'a SyntaxNode, offset: TextUnit) -> Option<FnCallNode<'a>> { | ||
74 | if let Some(expr) = find_node_at_offset::<ast::CallExpr>(syntax, offset) { | ||
75 | return Some(FnCallNode::CallExpr(expr)); | ||
76 | } | ||
77 | if let Some(expr) = find_node_at_offset::<ast::MethodCallExpr>(syntax, offset) { | ||
78 | return Some(FnCallNode::MethodCallExpr(expr)); | ||
79 | } | ||
80 | None | ||
81 | } | ||
82 | |||
83 | pub fn name_ref(&self) -> Option<&'a ast::NameRef> { | ||
84 | match *self { | ||
85 | FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()?.kind() { | ||
86 | ast::ExprKind::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?, | ||
87 | _ => return None, | ||
88 | }), | ||
89 | |||
90 | FnCallNode::MethodCallExpr(call_expr) => call_expr | ||
91 | .syntax() | ||
92 | .children() | ||
93 | .filter_map(ast::NameRef::cast) | ||
94 | .nth(0), | ||
95 | } | ||
96 | } | ||
97 | |||
98 | pub fn arg_list(&self) -> Option<&'a ast::ArgList> { | ||
99 | match *self { | ||
100 | FnCallNode::CallExpr(expr) => expr.arg_list(), | ||
101 | FnCallNode::MethodCallExpr(expr) => expr.arg_list(), | ||
102 | } | ||
103 | } | ||
104 | } | ||
105 | |||
106 | impl CallInfo { | ||
107 | fn new(node: &ast::FnDef) -> Option<Self> { | ||
108 | let mut doc = None; | ||
109 | |||
110 | // Strip the body out for the label. | ||
111 | let mut label: String = if let Some(body) = node.body() { | ||
112 | let body_range = body.syntax().range(); | ||
113 | let label: String = node | ||
114 | .syntax() | ||
115 | .children() | ||
116 | .filter(|child| !child.range().is_subrange(&body_range)) | ||
117 | .map(|node| node.text().to_string()) | ||
118 | .collect(); | ||
119 | label | ||
120 | } else { | ||
121 | node.syntax().text().to_string() | ||
122 | }; | ||
123 | |||
124 | if let Some((comment_range, docs)) = extract_doc_comments(node) { | ||
125 | let comment_range = comment_range | ||
126 | .checked_sub(node.syntax().range().start()) | ||
127 | .unwrap(); | ||
128 | let start = comment_range.start().to_usize(); | ||
129 | let end = comment_range.end().to_usize(); | ||
130 | |||
131 | // Remove the comment from the label | ||
132 | label.replace_range(start..end, ""); | ||
133 | |||
134 | // Massage markdown | ||
135 | let mut processed_lines = Vec::new(); | ||
136 | let mut in_code_block = false; | ||
137 | for line in docs.lines() { | ||
138 | if line.starts_with("```") { | ||
139 | in_code_block = !in_code_block; | ||
140 | } | ||
141 | |||
142 | let line = if in_code_block && line.starts_with("```") && !line.contains("rust") { | ||
143 | "```rust".into() | ||
144 | } else { | ||
145 | line.to_string() | ||
146 | }; | ||
147 | |||
148 | processed_lines.push(line); | ||
149 | } | ||
150 | |||
151 | if !processed_lines.is_empty() { | ||
152 | doc = Some(processed_lines.join("\n")); | ||
153 | } | ||
154 | } | ||
155 | |||
156 | Some(CallInfo { | ||
157 | parameters: param_list(node), | ||
158 | label: label.trim().to_owned(), | ||
159 | doc, | ||
160 | active_parameter: None, | ||
161 | }) | ||
162 | } | ||
163 | } | ||
164 | |||
165 | fn extract_doc_comments(node: &ast::FnDef) -> Option<(TextRange, String)> { | ||
166 | if node.doc_comments().count() == 0 { | ||
167 | return None; | ||
168 | } | ||
169 | |||
170 | let comment_text = node.doc_comment_text(); | ||
171 | |||
172 | let (begin, end) = node | ||
173 | .doc_comments() | ||
174 | .map(|comment| comment.syntax().range()) | ||
175 | .map(|range| (range.start().to_usize(), range.end().to_usize())) | ||
176 | .fold((std::usize::MAX, std::usize::MIN), |acc, range| { | ||
177 | (min(acc.0, range.0), max(acc.1, range.1)) | ||
178 | }); | ||
179 | |||
180 | let range = TextRange::from_to(TextUnit::from_usize(begin), TextUnit::from_usize(end)); | ||
181 | |||
182 | Some((range, comment_text)) | ||
183 | } | ||
184 | |||
185 | fn param_list(node: &ast::FnDef) -> Vec<String> { | ||
186 | let mut res = vec![]; | ||
187 | if let Some(param_list) = node.param_list() { | ||
188 | if let Some(self_param) = param_list.self_param() { | ||
189 | res.push(self_param.syntax().text().to_string()) | ||
190 | } | ||
191 | |||
192 | // Maybe use param.pat here? See if we can just extract the name? | ||
193 | //res.extend(param_list.params().map(|p| p.syntax().text().to_string())); | ||
194 | res.extend( | ||
195 | param_list | ||
196 | .params() | ||
197 | .filter_map(|p| p.pat()) | ||
198 | .map(|pat| pat.syntax().text().to_string()), | ||
199 | ); | ||
200 | } | ||
201 | res | ||
202 | } | ||
203 | |||
204 | #[cfg(test)] | ||
205 | mod tests { | ||
206 | use super::*; | ||
207 | |||
208 | use crate::mock_analysis::single_file_with_position; | ||
209 | |||
210 | fn call_info(text: &str) -> CallInfo { | ||
211 | let (analysis, position) = single_file_with_position(text); | ||
212 | analysis.call_info(position).unwrap().unwrap() | ||
213 | } | ||
214 | |||
215 | #[test] | ||
216 | fn test_fn_signature_two_args_first() { | ||
217 | let info = call_info( | ||
218 | r#"fn foo(x: u32, y: u32) -> u32 {x + y} | ||
219 | fn bar() { foo(<|>3, ); }"#, | ||
220 | ); | ||
221 | |||
222 | assert_eq!(info.parameters, vec!("x".to_string(), "y".to_string())); | ||
223 | assert_eq!(info.active_parameter, Some(0)); | ||
224 | } | ||
225 | |||
226 | #[test] | ||
227 | fn test_fn_signature_two_args_second() { | ||
228 | let info = call_info( | ||
229 | r#"fn foo(x: u32, y: u32) -> u32 {x + y} | ||
230 | fn bar() { foo(3, <|>); }"#, | ||
231 | ); | ||
232 | |||
233 | assert_eq!(info.parameters, vec!("x".to_string(), "y".to_string())); | ||
234 | assert_eq!(info.active_parameter, Some(1)); | ||
235 | } | ||
236 | |||
237 | #[test] | ||
238 | fn test_fn_signature_for_impl() { | ||
239 | let info = call_info( | ||
240 | r#"struct F; impl F { pub fn new() { F{}} } | ||
241 | fn bar() {let _ : F = F::new(<|>);}"#, | ||
242 | ); | ||
243 | |||
244 | assert_eq!(info.parameters, Vec::<String>::new()); | ||
245 | assert_eq!(info.active_parameter, None); | ||
246 | } | ||
247 | |||
248 | #[test] | ||
249 | fn test_fn_signature_for_method_self() { | ||
250 | let info = call_info( | ||
251 | r#"struct F; | ||
252 | impl F { | ||
253 | pub fn new() -> F{ | ||
254 | F{} | ||
255 | } | ||
256 | |||
257 | pub fn do_it(&self) {} | ||
258 | } | ||
259 | |||
260 | fn bar() { | ||
261 | let f : F = F::new(); | ||
262 | f.do_it(<|>); | ||
263 | }"#, | ||
264 | ); | ||
265 | |||
266 | assert_eq!(info.parameters, vec!["&self".to_string()]); | ||
267 | assert_eq!(info.active_parameter, None); | ||
268 | } | ||
269 | |||
270 | #[test] | ||
271 | fn test_fn_signature_for_method_with_arg() { | ||
272 | let info = call_info( | ||
273 | r#"struct F; | ||
274 | impl F { | ||
275 | pub fn new() -> F{ | ||
276 | F{} | ||
277 | } | ||
278 | |||
279 | pub fn do_it(&self, x: i32) {} | ||
280 | } | ||
281 | |||
282 | fn bar() { | ||
283 | let f : F = F::new(); | ||
284 | f.do_it(<|>); | ||
285 | }"#, | ||
286 | ); | ||
287 | |||
288 | assert_eq!(info.parameters, vec!["&self".to_string(), "x".to_string()]); | ||
289 | assert_eq!(info.active_parameter, Some(1)); | ||
290 | } | ||
291 | |||
292 | #[test] | ||
293 | fn test_fn_signature_with_docs_simple() { | ||
294 | let info = call_info( | ||
295 | r#" | ||
296 | /// test | ||
297 | // non-doc-comment | ||
298 | fn foo(j: u32) -> u32 { | ||
299 | j | ||
300 | } | ||
301 | |||
302 | fn bar() { | ||
303 | let _ = foo(<|>); | ||
304 | } | ||
305 | "#, | ||
306 | ); | ||
307 | |||
308 | assert_eq!(info.parameters, vec!["j".to_string()]); | ||
309 | assert_eq!(info.active_parameter, Some(0)); | ||
310 | assert_eq!(info.label, "fn foo(j: u32) -> u32".to_string()); | ||
311 | assert_eq!(info.doc, Some("test".into())); | ||
312 | } | ||
313 | |||
314 | #[test] | ||
315 | fn test_fn_signature_with_docs() { | ||
316 | let info = call_info( | ||
317 | r#" | ||
318 | /// Adds one to the number given. | ||
319 | /// | ||
320 | /// # Examples | ||
321 | /// | ||
322 | /// ``` | ||
323 | /// let five = 5; | ||
324 | /// | ||
325 | /// assert_eq!(6, my_crate::add_one(5)); | ||
326 | /// ``` | ||
327 | pub fn add_one(x: i32) -> i32 { | ||
328 | x + 1 | ||
329 | } | ||
330 | |||
331 | pub fn do() { | ||
332 | add_one(<|> | ||
333 | }"#, | ||
334 | ); | ||
335 | |||
336 | assert_eq!(info.parameters, vec!["x".to_string()]); | ||
337 | assert_eq!(info.active_parameter, Some(0)); | ||
338 | assert_eq!(info.label, "pub fn add_one(x: i32) -> i32".to_string()); | ||
339 | assert_eq!( | ||
340 | info.doc, | ||
341 | Some( | ||
342 | r#"Adds one to the number given. | ||
343 | |||
344 | # Examples | ||
345 | |||
346 | ```rust | ||
347 | let five = 5; | ||
348 | |||
349 | assert_eq!(6, my_crate::add_one(5)); | ||
350 | ```"# | ||
351 | .into() | ||
352 | ) | ||
353 | ); | ||
354 | } | ||
355 | |||
356 | #[test] | ||
357 | fn test_fn_signature_with_docs_impl() { | ||
358 | let info = call_info( | ||
359 | r#" | ||
360 | struct addr; | ||
361 | impl addr { | ||
362 | /// Adds one to the number given. | ||
363 | /// | ||
364 | /// # Examples | ||
365 | /// | ||
366 | /// ``` | ||
367 | /// let five = 5; | ||
368 | /// | ||
369 | /// assert_eq!(6, my_crate::add_one(5)); | ||
370 | /// ``` | ||
371 | pub fn add_one(x: i32) -> i32 { | ||
372 | x + 1 | ||
373 | } | ||
374 | } | ||
375 | |||
376 | pub fn do_it() { | ||
377 | addr {}; | ||
378 | addr::add_one(<|>); | ||
379 | }"#, | ||
380 | ); | ||
381 | |||
382 | assert_eq!(info.parameters, vec!["x".to_string()]); | ||
383 | assert_eq!(info.active_parameter, Some(0)); | ||
384 | assert_eq!(info.label, "pub fn add_one(x: i32) -> i32".to_string()); | ||
385 | assert_eq!( | ||
386 | info.doc, | ||
387 | Some( | ||
388 | r#"Adds one to the number given. | ||
389 | |||
390 | # Examples | ||
391 | |||
392 | ```rust | ||
393 | let five = 5; | ||
394 | |||
395 | assert_eq!(6, my_crate::add_one(5)); | ||
396 | ```"# | ||
397 | .into() | ||
398 | ) | ||
399 | ); | ||
400 | } | ||
401 | |||
402 | #[test] | ||
403 | fn test_fn_signature_with_docs_from_actix() { | ||
404 | let info = call_info( | ||
405 | r#" | ||
406 | pub trait WriteHandler<E> | ||
407 | where | ||
408 | Self: Actor, | ||
409 | Self::Context: ActorContext, | ||
410 | { | ||
411 | /// Method is called when writer emits error. | ||
412 | /// | ||
413 | /// If this method returns `ErrorAction::Continue` writer processing | ||
414 | /// continues otherwise stream processing stops. | ||
415 | fn error(&mut self, err: E, ctx: &mut Self::Context) -> Running { | ||
416 | Running::Stop | ||
417 | } | ||
418 | |||
419 | /// Method is called when writer finishes. | ||
420 | /// | ||
421 | /// By default this method stops actor's `Context`. | ||
422 | fn finished(&mut self, ctx: &mut Self::Context) { | ||
423 | ctx.stop() | ||
424 | } | ||
425 | } | ||
426 | |||
427 | pub fn foo() { | ||
428 | WriteHandler r; | ||
429 | r.finished(<|>); | ||
430 | } | ||
431 | |||
432 | "#, | ||
433 | ); | ||
434 | |||
435 | assert_eq!( | ||
436 | info.parameters, | ||
437 | vec!["&mut self".to_string(), "ctx".to_string()] | ||
438 | ); | ||
439 | assert_eq!(info.active_parameter, Some(1)); | ||
440 | assert_eq!( | ||
441 | info.doc, | ||
442 | Some( | ||
443 | r#"Method is called when writer finishes. | ||
444 | |||
445 | By default this method stops actor's `Context`."# | ||
446 | .into() | ||
447 | ) | ||
448 | ); | ||
449 | } | ||
450 | |||
451 | } | ||
diff --git a/crates/ra_ide_api/src/completion.rs b/crates/ra_ide_api/src/completion.rs new file mode 100644 index 000000000..ce777a771 --- /dev/null +++ b/crates/ra_ide_api/src/completion.rs | |||
@@ -0,0 +1,77 @@ | |||
1 | mod completion_item; | ||
2 | mod completion_context; | ||
3 | |||
4 | mod complete_dot; | ||
5 | mod complete_fn_param; | ||
6 | mod complete_keyword; | ||
7 | mod complete_snippet; | ||
8 | mod complete_path; | ||
9 | mod complete_scope; | ||
10 | |||
11 | use ra_db::SyntaxDatabase; | ||
12 | |||
13 | use crate::{ | ||
14 | db, | ||
15 | Cancelable, FilePosition, | ||
16 | completion::{ | ||
17 | completion_item::{Completions, CompletionKind}, | ||
18 | completion_context::CompletionContext, | ||
19 | }, | ||
20 | }; | ||
21 | |||
22 | pub use crate::completion::completion_item::{CompletionItem, InsertText, CompletionItemKind}; | ||
23 | |||
24 | /// Main entry point for completion. We run completion as a two-phase process. | ||
25 | /// | ||
26 | /// First, we look at the position and collect a so-called `CompletionContext. | ||
27 | /// This is a somewhat messy process, because, during completion, syntax tree is | ||
28 | /// incomplete and can look really weird. | ||
29 | /// | ||
30 | /// Once the context is collected, we run a series of completion routines which | ||
31 | /// look at the context and produce completion items. One subtelty about this | ||
32 | /// phase is that completion engine should not filter by the substring which is | ||
33 | /// already present, it should give all possible variants for the identifier at | ||
34 | /// the caret. In other words, for | ||
35 | /// | ||
36 | /// ```no-run | ||
37 | /// fn f() { | ||
38 | /// let foo = 92; | ||
39 | /// let _ = bar<|> | ||
40 | /// } | ||
41 | /// ``` | ||
42 | /// | ||
43 | /// `foo` *should* be present among the completion variants. Filtering by | ||
44 | /// identifier prefix/fuzzy match should be done higher in the stack, together | ||
45 | /// with ordering of completions (currently this is done by the client). | ||
46 | pub(crate) fn completions( | ||
47 | db: &db::RootDatabase, | ||
48 | position: FilePosition, | ||
49 | ) -> Cancelable<Option<Completions>> { | ||
50 | let original_file = db.source_file(position.file_id); | ||
51 | let ctx = ctry!(CompletionContext::new(db, &original_file, position)?); | ||
52 | |||
53 | let mut acc = Completions::default(); | ||
54 | |||
55 | complete_fn_param::complete_fn_param(&mut acc, &ctx); | ||
56 | complete_keyword::complete_expr_keyword(&mut acc, &ctx); | ||
57 | complete_keyword::complete_use_tree_keyword(&mut acc, &ctx); | ||
58 | complete_snippet::complete_expr_snippet(&mut acc, &ctx); | ||
59 | complete_snippet::complete_item_snippet(&mut acc, &ctx); | ||
60 | complete_path::complete_path(&mut acc, &ctx)?; | ||
61 | complete_scope::complete_scope(&mut acc, &ctx)?; | ||
62 | complete_dot::complete_dot(&mut acc, &ctx)?; | ||
63 | |||
64 | Ok(Some(acc)) | ||
65 | } | ||
66 | |||
67 | #[cfg(test)] | ||
68 | fn check_completion(code: &str, expected_completions: &str, kind: CompletionKind) { | ||
69 | use crate::mock_analysis::{single_file_with_position, analysis_and_position}; | ||
70 | let (analysis, position) = if code.contains("//-") { | ||
71 | analysis_and_position(code) | ||
72 | } else { | ||
73 | single_file_with_position(code) | ||
74 | }; | ||
75 | let completions = completions(&analysis.db, position).unwrap().unwrap(); | ||
76 | completions.assert_match(expected_completions, kind); | ||
77 | } | ||
diff --git a/crates/ra_ide_api/src/completion/complete_dot.rs b/crates/ra_ide_api/src/completion/complete_dot.rs new file mode 100644 index 000000000..5d4e60dc5 --- /dev/null +++ b/crates/ra_ide_api/src/completion/complete_dot.rs | |||
@@ -0,0 +1,121 @@ | |||
1 | use hir::{Ty, Def}; | ||
2 | |||
3 | use crate::Cancelable; | ||
4 | use crate::completion::{CompletionContext, Completions, CompletionKind, CompletionItem, CompletionItemKind}; | ||
5 | |||
6 | /// Complete dot accesses, i.e. fields or methods (currently only fields). | ||
7 | pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) -> Cancelable<()> { | ||
8 | let (function, receiver) = match (&ctx.function, ctx.dot_receiver) { | ||
9 | (Some(function), Some(receiver)) => (function, receiver), | ||
10 | _ => return Ok(()), | ||
11 | }; | ||
12 | let infer_result = function.infer(ctx.db)?; | ||
13 | let syntax_mapping = function.body_syntax_mapping(ctx.db)?; | ||
14 | let expr = match syntax_mapping.node_expr(receiver) { | ||
15 | Some(expr) => expr, | ||
16 | None => return Ok(()), | ||
17 | }; | ||
18 | let receiver_ty = infer_result[expr].clone(); | ||
19 | if !ctx.is_method_call { | ||
20 | complete_fields(acc, ctx, receiver_ty)?; | ||
21 | } | ||
22 | Ok(()) | ||
23 | } | ||
24 | |||
25 | fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty) -> Cancelable<()> { | ||
26 | for receiver in receiver.autoderef(ctx.db) { | ||
27 | match receiver { | ||
28 | Ty::Adt { def_id, .. } => { | ||
29 | match def_id.resolve(ctx.db)? { | ||
30 | Def::Struct(s) => { | ||
31 | let variant_data = s.variant_data(ctx.db)?; | ||
32 | for field in variant_data.fields() { | ||
33 | CompletionItem::new( | ||
34 | CompletionKind::Reference, | ||
35 | field.name().to_string(), | ||
36 | ) | ||
37 | .kind(CompletionItemKind::Field) | ||
38 | .add_to(acc); | ||
39 | } | ||
40 | } | ||
41 | // TODO unions | ||
42 | _ => {} | ||
43 | } | ||
44 | } | ||
45 | Ty::Tuple(fields) => { | ||
46 | for (i, _ty) in fields.iter().enumerate() { | ||
47 | CompletionItem::new(CompletionKind::Reference, i.to_string()) | ||
48 | .kind(CompletionItemKind::Field) | ||
49 | .add_to(acc); | ||
50 | } | ||
51 | } | ||
52 | _ => {} | ||
53 | }; | ||
54 | } | ||
55 | Ok(()) | ||
56 | } | ||
57 | |||
58 | #[cfg(test)] | ||
59 | mod tests { | ||
60 | use crate::completion::*; | ||
61 | |||
62 | fn check_ref_completion(code: &str, expected_completions: &str) { | ||
63 | check_completion(code, expected_completions, CompletionKind::Reference); | ||
64 | } | ||
65 | |||
66 | #[test] | ||
67 | fn test_struct_field_completion() { | ||
68 | check_ref_completion( | ||
69 | r" | ||
70 | struct A { the_field: u32 } | ||
71 | fn foo(a: A) { | ||
72 | a.<|> | ||
73 | } | ||
74 | ", | ||
75 | r#"the_field"#, | ||
76 | ); | ||
77 | } | ||
78 | |||
79 | #[test] | ||
80 | fn test_struct_field_completion_self() { | ||
81 | check_ref_completion( | ||
82 | r" | ||
83 | struct A { the_field: u32 } | ||
84 | impl A { | ||
85 | fn foo(self) { | ||
86 | self.<|> | ||
87 | } | ||
88 | } | ||
89 | ", | ||
90 | r#"the_field"#, | ||
91 | ); | ||
92 | } | ||
93 | |||
94 | #[test] | ||
95 | fn test_struct_field_completion_autoderef() { | ||
96 | check_ref_completion( | ||
97 | r" | ||
98 | struct A { the_field: u32 } | ||
99 | impl A { | ||
100 | fn foo(&self) { | ||
101 | self.<|> | ||
102 | } | ||
103 | } | ||
104 | ", | ||
105 | r#"the_field"#, | ||
106 | ); | ||
107 | } | ||
108 | |||
109 | #[test] | ||
110 | fn test_no_struct_field_completion_for_method_call() { | ||
111 | check_ref_completion( | ||
112 | r" | ||
113 | struct A { the_field: u32 } | ||
114 | fn foo(a: A) { | ||
115 | a.<|>() | ||
116 | } | ||
117 | ", | ||
118 | r#""#, | ||
119 | ); | ||
120 | } | ||
121 | } | ||
diff --git a/crates/ra_ide_api/src/completion/complete_fn_param.rs b/crates/ra_ide_api/src/completion/complete_fn_param.rs new file mode 100644 index 000000000..c1739e47e --- /dev/null +++ b/crates/ra_ide_api/src/completion/complete_fn_param.rs | |||
@@ -0,0 +1,102 @@ | |||
1 | use ra_syntax::{ | ||
2 | algo::visit::{visitor_ctx, VisitorCtx}, | ||
3 | ast, | ||
4 | AstNode, | ||
5 | }; | ||
6 | use rustc_hash::FxHashMap; | ||
7 | |||
8 | use crate::completion::{CompletionContext, Completions, CompletionKind, CompletionItem}; | ||
9 | |||
10 | /// Complete repeated parametes, both name and type. For example, if all | ||
11 | /// functions in a file have a `spam: &mut Spam` parameter, a completion with | ||
12 | /// `spam: &mut Spam` insert text/label and `spam` lookup string will be | ||
13 | /// suggested. | ||
14 | pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext) { | ||
15 | if !ctx.is_param { | ||
16 | return; | ||
17 | } | ||
18 | |||
19 | let mut params = FxHashMap::default(); | ||
20 | for node in ctx.leaf.ancestors() { | ||
21 | let _ = visitor_ctx(&mut params) | ||
22 | .visit::<ast::SourceFile, _>(process) | ||
23 | .visit::<ast::ItemList, _>(process) | ||
24 | .accept(node); | ||
25 | } | ||
26 | params | ||
27 | .into_iter() | ||
28 | .filter_map(|(label, (count, param))| { | ||
29 | let lookup = param.pat()?.syntax().text().to_string(); | ||
30 | if count < 2 { | ||
31 | None | ||
32 | } else { | ||
33 | Some((label, lookup)) | ||
34 | } | ||
35 | }) | ||
36 | .for_each(|(label, lookup)| { | ||
37 | CompletionItem::new(CompletionKind::Magic, label) | ||
38 | .lookup_by(lookup) | ||
39 | .add_to(acc) | ||
40 | }); | ||
41 | |||
42 | fn process<'a, N: ast::FnDefOwner>( | ||
43 | node: &'a N, | ||
44 | params: &mut FxHashMap<String, (u32, &'a ast::Param)>, | ||
45 | ) { | ||
46 | node.functions() | ||
47 | .filter_map(|it| it.param_list()) | ||
48 | .flat_map(|it| it.params()) | ||
49 | .for_each(|param| { | ||
50 | let text = param.syntax().text().to_string(); | ||
51 | params.entry(text).or_insert((0, param)).0 += 1; | ||
52 | }) | ||
53 | } | ||
54 | } | ||
55 | |||
56 | #[cfg(test)] | ||
57 | mod tests { | ||
58 | use crate::completion::*; | ||
59 | |||
60 | fn check_magic_completion(code: &str, expected_completions: &str) { | ||
61 | check_completion(code, expected_completions, CompletionKind::Magic); | ||
62 | } | ||
63 | |||
64 | #[test] | ||
65 | fn test_param_completion_last_param() { | ||
66 | check_magic_completion( | ||
67 | r" | ||
68 | fn foo(file_id: FileId) {} | ||
69 | fn bar(file_id: FileId) {} | ||
70 | fn baz(file<|>) {} | ||
71 | ", | ||
72 | r#"file_id "file_id: FileId""#, | ||
73 | ); | ||
74 | } | ||
75 | |||
76 | #[test] | ||
77 | fn test_param_completion_nth_param() { | ||
78 | check_magic_completion( | ||
79 | r" | ||
80 | fn foo(file_id: FileId) {} | ||
81 | fn bar(file_id: FileId) {} | ||
82 | fn baz(file<|>, x: i32) {} | ||
83 | ", | ||
84 | r#"file_id "file_id: FileId""#, | ||
85 | ); | ||
86 | } | ||
87 | |||
88 | #[test] | ||
89 | fn test_param_completion_trait_param() { | ||
90 | check_magic_completion( | ||
91 | r" | ||
92 | pub(crate) trait SourceRoot { | ||
93 | pub fn contains(&self, file_id: FileId) -> bool; | ||
94 | pub fn module_map(&self) -> &ModuleMap; | ||
95 | pub fn lines(&self, file_id: FileId) -> &LineIndex; | ||
96 | pub fn syntax(&self, file<|>) | ||
97 | } | ||
98 | ", | ||
99 | r#"file_id "file_id: FileId""#, | ||
100 | ); | ||
101 | } | ||
102 | } | ||
diff --git a/crates/ra_ide_api/src/completion/complete_keyword.rs b/crates/ra_ide_api/src/completion/complete_keyword.rs new file mode 100644 index 000000000..d350f06ce --- /dev/null +++ b/crates/ra_ide_api/src/completion/complete_keyword.rs | |||
@@ -0,0 +1,339 @@ | |||
1 | use ra_syntax::{ | ||
2 | algo::visit::{visitor, Visitor}, | ||
3 | AstNode, | ||
4 | ast::{self, LoopBodyOwner}, | ||
5 | SyntaxKind::*, SyntaxNode, | ||
6 | }; | ||
7 | |||
8 | use crate::completion::{CompletionContext, CompletionItem, Completions, CompletionKind, CompletionItemKind}; | ||
9 | |||
10 | pub(super) fn complete_use_tree_keyword(acc: &mut Completions, ctx: &CompletionContext) { | ||
11 | // complete keyword "crate" in use stmt | ||
12 | match (ctx.use_item_syntax.as_ref(), ctx.path_prefix.as_ref()) { | ||
13 | (Some(_), None) => { | ||
14 | CompletionItem::new(CompletionKind::Keyword, "crate") | ||
15 | .kind(CompletionItemKind::Keyword) | ||
16 | .lookup_by("crate") | ||
17 | .snippet("crate::") | ||
18 | .add_to(acc); | ||
19 | CompletionItem::new(CompletionKind::Keyword, "self") | ||
20 | .kind(CompletionItemKind::Keyword) | ||
21 | .lookup_by("self") | ||
22 | .add_to(acc); | ||
23 | CompletionItem::new(CompletionKind::Keyword, "super") | ||
24 | .kind(CompletionItemKind::Keyword) | ||
25 | .lookup_by("super") | ||
26 | .add_to(acc); | ||
27 | } | ||
28 | (Some(_), Some(_)) => { | ||
29 | CompletionItem::new(CompletionKind::Keyword, "self") | ||
30 | .kind(CompletionItemKind::Keyword) | ||
31 | .lookup_by("self") | ||
32 | .add_to(acc); | ||
33 | CompletionItem::new(CompletionKind::Keyword, "super") | ||
34 | .kind(CompletionItemKind::Keyword) | ||
35 | .lookup_by("super") | ||
36 | .add_to(acc); | ||
37 | } | ||
38 | _ => {} | ||
39 | } | ||
40 | } | ||
41 | |||
42 | fn keyword(kw: &str, snippet: &str) -> CompletionItem { | ||
43 | CompletionItem::new(CompletionKind::Keyword, kw) | ||
44 | .kind(CompletionItemKind::Keyword) | ||
45 | .snippet(snippet) | ||
46 | .build() | ||
47 | } | ||
48 | |||
49 | pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionContext) { | ||
50 | if !ctx.is_trivial_path { | ||
51 | return; | ||
52 | } | ||
53 | |||
54 | let fn_def = match ctx.function_syntax { | ||
55 | Some(it) => it, | ||
56 | None => return, | ||
57 | }; | ||
58 | acc.add(keyword("if", "if $0 {}")); | ||
59 | acc.add(keyword("match", "match $0 {}")); | ||
60 | acc.add(keyword("while", "while $0 {}")); | ||
61 | acc.add(keyword("loop", "loop {$0}")); | ||
62 | |||
63 | if ctx.after_if { | ||
64 | acc.add(keyword("else", "else {$0}")); | ||
65 | acc.add(keyword("else if", "else if $0 {}")); | ||
66 | } | ||
67 | if is_in_loop_body(ctx.leaf) { | ||
68 | if ctx.can_be_stmt { | ||
69 | acc.add(keyword("continue", "continue;")); | ||
70 | acc.add(keyword("break", "break;")); | ||
71 | } else { | ||
72 | acc.add(keyword("continue", "continue")); | ||
73 | acc.add(keyword("break", "break")); | ||
74 | } | ||
75 | } | ||
76 | acc.add_all(complete_return(fn_def, ctx.can_be_stmt)); | ||
77 | } | ||
78 | |||
79 | fn is_in_loop_body(leaf: &SyntaxNode) -> bool { | ||
80 | for node in leaf.ancestors() { | ||
81 | if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR { | ||
82 | break; | ||
83 | } | ||
84 | let loop_body = visitor() | ||
85 | .visit::<ast::ForExpr, _>(LoopBodyOwner::loop_body) | ||
86 | .visit::<ast::WhileExpr, _>(LoopBodyOwner::loop_body) | ||
87 | .visit::<ast::LoopExpr, _>(LoopBodyOwner::loop_body) | ||
88 | .accept(node); | ||
89 | if let Some(Some(body)) = loop_body { | ||
90 | if leaf.range().is_subrange(&body.syntax().range()) { | ||
91 | return true; | ||
92 | } | ||
93 | } | ||
94 | } | ||
95 | false | ||
96 | } | ||
97 | |||
98 | fn complete_return(fn_def: &ast::FnDef, can_be_stmt: bool) -> Option<CompletionItem> { | ||
99 | let snip = match (can_be_stmt, fn_def.ret_type().is_some()) { | ||
100 | (true, true) => "return $0;", | ||
101 | (true, false) => "return;", | ||
102 | (false, true) => "return $0", | ||
103 | (false, false) => "return", | ||
104 | }; | ||
105 | Some(keyword("return", snip)) | ||
106 | } | ||
107 | |||
108 | #[cfg(test)] | ||
109 | mod tests { | ||
110 | use crate::completion::{CompletionKind, check_completion}; | ||
111 | fn check_keyword_completion(code: &str, expected_completions: &str) { | ||
112 | check_completion(code, expected_completions, CompletionKind::Keyword); | ||
113 | } | ||
114 | |||
115 | #[test] | ||
116 | fn completes_keywords_in_use_stmt() { | ||
117 | check_keyword_completion( | ||
118 | r" | ||
119 | use <|> | ||
120 | ", | ||
121 | r#" | ||
122 | crate "crate" "crate::" | ||
123 | self "self" | ||
124 | super "super" | ||
125 | "#, | ||
126 | ); | ||
127 | |||
128 | check_keyword_completion( | ||
129 | r" | ||
130 | use a::<|> | ||
131 | ", | ||
132 | r#" | ||
133 | self "self" | ||
134 | super "super" | ||
135 | "#, | ||
136 | ); | ||
137 | |||
138 | check_keyword_completion( | ||
139 | r" | ||
140 | use a::{b, <|>} | ||
141 | ", | ||
142 | r#" | ||
143 | self "self" | ||
144 | super "super" | ||
145 | "#, | ||
146 | ); | ||
147 | } | ||
148 | |||
149 | #[test] | ||
150 | fn completes_various_keywords_in_function() { | ||
151 | check_keyword_completion( | ||
152 | r" | ||
153 | fn quux() { | ||
154 | <|> | ||
155 | } | ||
156 | ", | ||
157 | r#" | ||
158 | if "if $0 {}" | ||
159 | match "match $0 {}" | ||
160 | while "while $0 {}" | ||
161 | loop "loop {$0}" | ||
162 | return "return;" | ||
163 | "#, | ||
164 | ); | ||
165 | } | ||
166 | |||
167 | #[test] | ||
168 | fn completes_else_after_if() { | ||
169 | check_keyword_completion( | ||
170 | r" | ||
171 | fn quux() { | ||
172 | if true { | ||
173 | () | ||
174 | } <|> | ||
175 | } | ||
176 | ", | ||
177 | r#" | ||
178 | if "if $0 {}" | ||
179 | match "match $0 {}" | ||
180 | while "while $0 {}" | ||
181 | loop "loop {$0}" | ||
182 | else "else {$0}" | ||
183 | else if "else if $0 {}" | ||
184 | return "return;" | ||
185 | "#, | ||
186 | ); | ||
187 | } | ||
188 | |||
189 | #[test] | ||
190 | fn test_completion_return_value() { | ||
191 | check_keyword_completion( | ||
192 | r" | ||
193 | fn quux() -> i32 { | ||
194 | <|> | ||
195 | 92 | ||
196 | } | ||
197 | ", | ||
198 | r#" | ||
199 | if "if $0 {}" | ||
200 | match "match $0 {}" | ||
201 | while "while $0 {}" | ||
202 | loop "loop {$0}" | ||
203 | return "return $0;" | ||
204 | "#, | ||
205 | ); | ||
206 | check_keyword_completion( | ||
207 | r" | ||
208 | fn quux() { | ||
209 | <|> | ||
210 | 92 | ||
211 | } | ||
212 | ", | ||
213 | r#" | ||
214 | if "if $0 {}" | ||
215 | match "match $0 {}" | ||
216 | while "while $0 {}" | ||
217 | loop "loop {$0}" | ||
218 | return "return;" | ||
219 | "#, | ||
220 | ); | ||
221 | } | ||
222 | |||
223 | #[test] | ||
224 | fn dont_add_semi_after_return_if_not_a_statement() { | ||
225 | check_keyword_completion( | ||
226 | r" | ||
227 | fn quux() -> i32 { | ||
228 | match () { | ||
229 | () => <|> | ||
230 | } | ||
231 | } | ||
232 | ", | ||
233 | r#" | ||
234 | if "if $0 {}" | ||
235 | match "match $0 {}" | ||
236 | while "while $0 {}" | ||
237 | loop "loop {$0}" | ||
238 | return "return $0" | ||
239 | "#, | ||
240 | ); | ||
241 | } | ||
242 | |||
243 | #[test] | ||
244 | fn last_return_in_block_has_semi() { | ||
245 | check_keyword_completion( | ||
246 | r" | ||
247 | fn quux() -> i32 { | ||
248 | if condition { | ||
249 | <|> | ||
250 | } | ||
251 | } | ||
252 | ", | ||
253 | r#" | ||
254 | if "if $0 {}" | ||
255 | match "match $0 {}" | ||
256 | while "while $0 {}" | ||
257 | loop "loop {$0}" | ||
258 | return "return $0;" | ||
259 | "#, | ||
260 | ); | ||
261 | check_keyword_completion( | ||
262 | r" | ||
263 | fn quux() -> i32 { | ||
264 | if condition { | ||
265 | <|> | ||
266 | } | ||
267 | let x = 92; | ||
268 | x | ||
269 | } | ||
270 | ", | ||
271 | r#" | ||
272 | if "if $0 {}" | ||
273 | match "match $0 {}" | ||
274 | while "while $0 {}" | ||
275 | loop "loop {$0}" | ||
276 | return "return $0;" | ||
277 | "#, | ||
278 | ); | ||
279 | } | ||
280 | |||
281 | #[test] | ||
282 | fn completes_break_and_continue_in_loops() { | ||
283 | check_keyword_completion( | ||
284 | r" | ||
285 | fn quux() -> i32 { | ||
286 | loop { <|> } | ||
287 | } | ||
288 | ", | ||
289 | r#" | ||
290 | if "if $0 {}" | ||
291 | match "match $0 {}" | ||
292 | while "while $0 {}" | ||
293 | loop "loop {$0}" | ||
294 | continue "continue;" | ||
295 | break "break;" | ||
296 | return "return $0;" | ||
297 | "#, | ||
298 | ); | ||
299 | // No completion: lambda isolates control flow | ||
300 | check_keyword_completion( | ||
301 | r" | ||
302 | fn quux() -> i32 { | ||
303 | loop { || { <|> } } | ||
304 | } | ||
305 | ", | ||
306 | r#" | ||
307 | if "if $0 {}" | ||
308 | match "match $0 {}" | ||
309 | while "while $0 {}" | ||
310 | loop "loop {$0}" | ||
311 | return "return $0;" | ||
312 | "#, | ||
313 | ); | ||
314 | } | ||
315 | |||
316 | #[test] | ||
317 | fn no_semi_after_break_continue_in_expr() { | ||
318 | check_keyword_completion( | ||
319 | r" | ||
320 | fn f() { | ||
321 | loop { | ||
322 | match () { | ||
323 | () => br<|> | ||
324 | } | ||
325 | } | ||
326 | } | ||
327 | ", | ||
328 | r#" | ||
329 | if "if $0 {}" | ||
330 | match "match $0 {}" | ||
331 | while "while $0 {}" | ||
332 | loop "loop {$0}" | ||
333 | continue "continue" | ||
334 | break "break" | ||
335 | return "return" | ||
336 | "#, | ||
337 | ) | ||
338 | } | ||
339 | } | ||
diff --git a/crates/ra_ide_api/src/completion/complete_path.rs b/crates/ra_ide_api/src/completion/complete_path.rs new file mode 100644 index 000000000..4723a65a6 --- /dev/null +++ b/crates/ra_ide_api/src/completion/complete_path.rs | |||
@@ -0,0 +1,128 @@ | |||
1 | use crate::{ | ||
2 | Cancelable, | ||
3 | completion::{CompletionItem, CompletionItemKind, Completions, CompletionKind, CompletionContext}, | ||
4 | }; | ||
5 | |||
6 | pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) -> Cancelable<()> { | ||
7 | let (path, module) = match (&ctx.path_prefix, &ctx.module) { | ||
8 | (Some(path), Some(module)) => (path.clone(), module), | ||
9 | _ => return Ok(()), | ||
10 | }; | ||
11 | let def_id = match module.resolve_path(ctx.db, &path)?.take_types() { | ||
12 | Some(it) => it, | ||
13 | None => return Ok(()), | ||
14 | }; | ||
15 | match def_id.resolve(ctx.db)? { | ||
16 | hir::Def::Module(module) => { | ||
17 | let module_scope = module.scope(ctx.db)?; | ||
18 | module_scope.entries().for_each(|(name, res)| { | ||
19 | CompletionItem::new(CompletionKind::Reference, name.to_string()) | ||
20 | .from_resolution(ctx, res) | ||
21 | .add_to(acc) | ||
22 | }); | ||
23 | } | ||
24 | hir::Def::Enum(e) => e | ||
25 | .variants(ctx.db)? | ||
26 | .into_iter() | ||
27 | .for_each(|(name, _variant)| { | ||
28 | CompletionItem::new(CompletionKind::Reference, name.to_string()) | ||
29 | .kind(CompletionItemKind::EnumVariant) | ||
30 | .add_to(acc) | ||
31 | }), | ||
32 | _ => return Ok(()), | ||
33 | }; | ||
34 | Ok(()) | ||
35 | } | ||
36 | |||
37 | #[cfg(test)] | ||
38 | mod tests { | ||
39 | use crate::completion::{CompletionKind, check_completion}; | ||
40 | |||
41 | fn check_reference_completion(code: &str, expected_completions: &str) { | ||
42 | check_completion(code, expected_completions, CompletionKind::Reference); | ||
43 | } | ||
44 | |||
45 | #[test] | ||
46 | fn completes_use_item_starting_with_self() { | ||
47 | check_reference_completion( | ||
48 | r" | ||
49 | use self::m::<|>; | ||
50 | |||
51 | mod m { | ||
52 | struct Bar; | ||
53 | } | ||
54 | ", | ||
55 | "Bar", | ||
56 | ); | ||
57 | } | ||
58 | |||
59 | #[test] | ||
60 | fn completes_use_item_starting_with_crate() { | ||
61 | check_reference_completion( | ||
62 | " | ||
63 | //- /lib.rs | ||
64 | mod foo; | ||
65 | struct Spam; | ||
66 | //- /foo.rs | ||
67 | use crate::Sp<|> | ||
68 | ", | ||
69 | "Spam;foo", | ||
70 | ); | ||
71 | } | ||
72 | |||
73 | #[test] | ||
74 | fn completes_nested_use_tree() { | ||
75 | check_reference_completion( | ||
76 | " | ||
77 | //- /lib.rs | ||
78 | mod foo; | ||
79 | struct Spam; | ||
80 | //- /foo.rs | ||
81 | use crate::{Sp<|>}; | ||
82 | ", | ||
83 | "Spam;foo", | ||
84 | ); | ||
85 | } | ||
86 | |||
87 | #[test] | ||
88 | fn completes_deeply_nested_use_tree() { | ||
89 | check_reference_completion( | ||
90 | " | ||
91 | //- /lib.rs | ||
92 | mod foo; | ||
93 | pub mod bar { | ||
94 | pub mod baz { | ||
95 | pub struct Spam; | ||
96 | } | ||
97 | } | ||
98 | //- /foo.rs | ||
99 | use crate::{bar::{baz::Sp<|>}}; | ||
100 | ", | ||
101 | "Spam", | ||
102 | ); | ||
103 | } | ||
104 | |||
105 | #[test] | ||
106 | fn completes_enum_variant() { | ||
107 | check_reference_completion( | ||
108 | " | ||
109 | //- /lib.rs | ||
110 | enum E { Foo, Bar(i32) } | ||
111 | fn foo() { let _ = E::<|> } | ||
112 | ", | ||
113 | "Foo;Bar", | ||
114 | ); | ||
115 | } | ||
116 | |||
117 | #[test] | ||
118 | fn dont_render_function_parens_in_use_item() { | ||
119 | check_reference_completion( | ||
120 | " | ||
121 | //- /lib.rs | ||
122 | mod m { pub fn foo() {} } | ||
123 | use crate::m::f<|>; | ||
124 | ", | ||
125 | "foo", | ||
126 | ) | ||
127 | } | ||
128 | } | ||
diff --git a/crates/ra_ide_api/src/completion/complete_scope.rs b/crates/ra_ide_api/src/completion/complete_scope.rs new file mode 100644 index 000000000..ee9052d3d --- /dev/null +++ b/crates/ra_ide_api/src/completion/complete_scope.rs | |||
@@ -0,0 +1,192 @@ | |||
1 | use rustc_hash::FxHashSet; | ||
2 | use ra_syntax::TextUnit; | ||
3 | |||
4 | use crate::{ | ||
5 | Cancelable, | ||
6 | completion::{CompletionItem, CompletionItemKind, Completions, CompletionKind, CompletionContext}, | ||
7 | }; | ||
8 | |||
9 | pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) -> Cancelable<()> { | ||
10 | if !ctx.is_trivial_path { | ||
11 | return Ok(()); | ||
12 | } | ||
13 | let module = match &ctx.module { | ||
14 | Some(it) => it, | ||
15 | None => return Ok(()), | ||
16 | }; | ||
17 | if let Some(function) = &ctx.function { | ||
18 | let scopes = function.scopes(ctx.db)?; | ||
19 | complete_fn(acc, &scopes, ctx.offset); | ||
20 | } | ||
21 | |||
22 | let module_scope = module.scope(ctx.db)?; | ||
23 | let (file_id, _) = module.defenition_source(ctx.db)?; | ||
24 | module_scope | ||
25 | .entries() | ||
26 | .filter(|(_name, res)| { | ||
27 | // Don't expose this item | ||
28 | // FIXME: this penetrates through all kinds of abstractions, | ||
29 | // we need to figura out the way to do it less ugly. | ||
30 | match res.import { | ||
31 | None => true, | ||
32 | Some(import) => { | ||
33 | let range = import.range(ctx.db, file_id); | ||
34 | !range.is_subrange(&ctx.leaf.range()) | ||
35 | } | ||
36 | } | ||
37 | }) | ||
38 | .for_each(|(name, res)| { | ||
39 | CompletionItem::new(CompletionKind::Reference, name.to_string()) | ||
40 | .from_resolution(ctx, res) | ||
41 | .add_to(acc) | ||
42 | }); | ||
43 | Ok(()) | ||
44 | } | ||
45 | |||
46 | fn complete_fn(acc: &mut Completions, scopes: &hir::ScopesWithSyntaxMapping, offset: TextUnit) { | ||
47 | let mut shadowed = FxHashSet::default(); | ||
48 | scopes | ||
49 | .scope_chain_for_offset(offset) | ||
50 | .flat_map(|scope| scopes.scopes.entries(scope).iter()) | ||
51 | .filter(|entry| shadowed.insert(entry.name())) | ||
52 | .for_each(|entry| { | ||
53 | CompletionItem::new(CompletionKind::Reference, entry.name().to_string()) | ||
54 | .kind(CompletionItemKind::Binding) | ||
55 | .add_to(acc) | ||
56 | }); | ||
57 | } | ||
58 | |||
59 | #[cfg(test)] | ||
60 | mod tests { | ||
61 | use crate::completion::{CompletionKind, check_completion}; | ||
62 | |||
63 | fn check_reference_completion(code: &str, expected_completions: &str) { | ||
64 | check_completion(code, expected_completions, CompletionKind::Reference); | ||
65 | } | ||
66 | |||
67 | #[test] | ||
68 | fn completes_bindings_from_let() { | ||
69 | check_reference_completion( | ||
70 | r" | ||
71 | fn quux(x: i32) { | ||
72 | let y = 92; | ||
73 | 1 + <|>; | ||
74 | let z = (); | ||
75 | } | ||
76 | ", | ||
77 | r#"y;x;quux "quux($0)""#, | ||
78 | ); | ||
79 | } | ||
80 | |||
81 | #[test] | ||
82 | fn completes_bindings_from_if_let() { | ||
83 | check_reference_completion( | ||
84 | r" | ||
85 | fn quux() { | ||
86 | if let Some(x) = foo() { | ||
87 | let y = 92; | ||
88 | }; | ||
89 | if let Some(a) = bar() { | ||
90 | let b = 62; | ||
91 | 1 + <|> | ||
92 | } | ||
93 | } | ||
94 | ", | ||
95 | r#"b;a;quux "quux()$0""#, | ||
96 | ); | ||
97 | } | ||
98 | |||
99 | #[test] | ||
100 | fn completes_bindings_from_for() { | ||
101 | check_reference_completion( | ||
102 | r" | ||
103 | fn quux() { | ||
104 | for x in &[1, 2, 3] { | ||
105 | <|> | ||
106 | } | ||
107 | } | ||
108 | ", | ||
109 | r#"x;quux "quux()$0""#, | ||
110 | ); | ||
111 | } | ||
112 | |||
113 | #[test] | ||
114 | fn completes_module_items() { | ||
115 | check_reference_completion( | ||
116 | r" | ||
117 | struct Foo; | ||
118 | enum Baz {} | ||
119 | fn quux() { | ||
120 | <|> | ||
121 | } | ||
122 | ", | ||
123 | r#"quux "quux()$0";Foo;Baz"#, | ||
124 | ); | ||
125 | } | ||
126 | |||
127 | #[test] | ||
128 | fn completes_module_items_in_nested_modules() { | ||
129 | check_reference_completion( | ||
130 | r" | ||
131 | struct Foo; | ||
132 | mod m { | ||
133 | struct Bar; | ||
134 | fn quux() { <|> } | ||
135 | } | ||
136 | ", | ||
137 | r#"quux "quux()$0";Bar"#, | ||
138 | ); | ||
139 | } | ||
140 | |||
141 | #[test] | ||
142 | fn completes_return_type() { | ||
143 | check_reference_completion( | ||
144 | r" | ||
145 | struct Foo; | ||
146 | fn x() -> <|> | ||
147 | ", | ||
148 | r#"Foo;x "x()$0""#, | ||
149 | ) | ||
150 | } | ||
151 | |||
152 | #[test] | ||
153 | fn dont_show_both_completions_for_shadowing() { | ||
154 | check_reference_completion( | ||
155 | r" | ||
156 | fn foo() -> { | ||
157 | let bar = 92; | ||
158 | { | ||
159 | let bar = 62; | ||
160 | <|> | ||
161 | } | ||
162 | } | ||
163 | ", | ||
164 | r#"bar;foo "foo()$0""#, | ||
165 | ) | ||
166 | } | ||
167 | |||
168 | #[test] | ||
169 | fn completes_self_in_methods() { | ||
170 | check_reference_completion(r"impl S { fn foo(&self) { <|> } }", "self") | ||
171 | } | ||
172 | |||
173 | #[test] | ||
174 | fn inserts_parens_for_function_calls() { | ||
175 | check_reference_completion( | ||
176 | r" | ||
177 | fn no_args() {} | ||
178 | fn main() { no_<|> } | ||
179 | ", | ||
180 | r#"no_args "no_args()$0" | ||
181 | main "main()$0""#, | ||
182 | ); | ||
183 | check_reference_completion( | ||
184 | r" | ||
185 | fn with_args(x: i32, y: String) {} | ||
186 | fn main() { with_<|> } | ||
187 | ", | ||
188 | r#"main "main()$0" | ||
189 | with_args "with_args($0)""#, | ||
190 | ); | ||
191 | } | ||
192 | } | ||
diff --git a/crates/ra_ide_api/src/completion/complete_snippet.rs b/crates/ra_ide_api/src/completion/complete_snippet.rs new file mode 100644 index 000000000..a495751dd --- /dev/null +++ b/crates/ra_ide_api/src/completion/complete_snippet.rs | |||
@@ -0,0 +1,73 @@ | |||
1 | use crate::completion::{CompletionItem, Completions, CompletionKind, CompletionItemKind, CompletionContext, completion_item::Builder}; | ||
2 | |||
3 | fn snippet(label: &str, snippet: &str) -> Builder { | ||
4 | CompletionItem::new(CompletionKind::Snippet, label) | ||
5 | .snippet(snippet) | ||
6 | .kind(CompletionItemKind::Snippet) | ||
7 | } | ||
8 | |||
9 | pub(super) fn complete_expr_snippet(acc: &mut Completions, ctx: &CompletionContext) { | ||
10 | if !(ctx.is_trivial_path && ctx.function_syntax.is_some()) { | ||
11 | return; | ||
12 | } | ||
13 | snippet("pd", "eprintln!(\"$0 = {:?}\", $0);").add_to(acc); | ||
14 | snippet("ppd", "eprintln!(\"$0 = {:#?}\", $0);").add_to(acc); | ||
15 | } | ||
16 | |||
17 | pub(super) fn complete_item_snippet(acc: &mut Completions, ctx: &CompletionContext) { | ||
18 | if !ctx.is_new_item { | ||
19 | return; | ||
20 | } | ||
21 | snippet( | ||
22 | "Test function", | ||
23 | "\ | ||
24 | #[test] | ||
25 | fn ${1:feature}() { | ||
26 | $0 | ||
27 | }", | ||
28 | ) | ||
29 | .lookup_by("tfn") | ||
30 | .add_to(acc); | ||
31 | |||
32 | snippet("pub(crate)", "pub(crate) $0").add_to(acc); | ||
33 | } | ||
34 | |||
35 | #[cfg(test)] | ||
36 | mod tests { | ||
37 | use crate::completion::{CompletionKind, check_completion}; | ||
38 | fn check_snippet_completion(code: &str, expected_completions: &str) { | ||
39 | check_completion(code, expected_completions, CompletionKind::Snippet); | ||
40 | } | ||
41 | |||
42 | #[test] | ||
43 | fn completes_snippets_in_expressions() { | ||
44 | check_snippet_completion( | ||
45 | r"fn foo(x: i32) { <|> }", | ||
46 | r##" | ||
47 | pd "eprintln!(\"$0 = {:?}\", $0);" | ||
48 | ppd "eprintln!(\"$0 = {:#?}\", $0);" | ||
49 | "##, | ||
50 | ); | ||
51 | } | ||
52 | |||
53 | #[test] | ||
54 | fn completes_snippets_in_items() { | ||
55 | // check_snippet_completion(r" | ||
56 | // <|> | ||
57 | // ", | ||
58 | // r##"[CompletionItem { label: "Test function", lookup: None, snippet: Some("#[test]\nfn test_${1:feature}() {\n$0\n}"##, | ||
59 | // ); | ||
60 | check_snippet_completion( | ||
61 | r" | ||
62 | #[cfg(test)] | ||
63 | mod tests { | ||
64 | <|> | ||
65 | } | ||
66 | ", | ||
67 | r##" | ||
68 | tfn "Test function" "#[test]\nfn ${1:feature}() {\n $0\n}" | ||
69 | pub(crate) "pub(crate) $0" | ||
70 | "##, | ||
71 | ); | ||
72 | } | ||
73 | } | ||
diff --git a/crates/ra_ide_api/src/completion/completion_context.rs b/crates/ra_ide_api/src/completion/completion_context.rs new file mode 100644 index 000000000..01786bb69 --- /dev/null +++ b/crates/ra_ide_api/src/completion/completion_context.rs | |||
@@ -0,0 +1,205 @@ | |||
1 | use ra_text_edit::AtomTextEdit; | ||
2 | use ra_syntax::{ | ||
3 | AstNode, SyntaxNode, SourceFile, TextUnit, TextRange, | ||
4 | ast, | ||
5 | algo::{find_leaf_at_offset, find_covering_node, find_node_at_offset}, | ||
6 | SyntaxKind::*, | ||
7 | }; | ||
8 | use hir::source_binder; | ||
9 | |||
10 | use crate::{db, FilePosition, Cancelable}; | ||
11 | |||
12 | /// `CompletionContext` is created early during completion to figure out, where | ||
13 | /// exactly is the cursor, syntax-wise. | ||
14 | #[derive(Debug)] | ||
15 | pub(super) struct CompletionContext<'a> { | ||
16 | pub(super) db: &'a db::RootDatabase, | ||
17 | pub(super) offset: TextUnit, | ||
18 | pub(super) leaf: &'a SyntaxNode, | ||
19 | pub(super) module: Option<hir::Module>, | ||
20 | pub(super) function: Option<hir::Function>, | ||
21 | pub(super) function_syntax: Option<&'a ast::FnDef>, | ||
22 | pub(super) use_item_syntax: Option<&'a ast::UseItem>, | ||
23 | pub(super) is_param: bool, | ||
24 | /// A single-indent path, like `foo`. | ||
25 | pub(super) is_trivial_path: bool, | ||
26 | /// If not a trivial, path, the prefix (qualifier). | ||
27 | pub(super) path_prefix: Option<hir::Path>, | ||
28 | pub(super) after_if: bool, | ||
29 | /// `true` if we are a statement or a last expr in the block. | ||
30 | pub(super) can_be_stmt: bool, | ||
31 | /// Something is typed at the "top" level, in module or impl/trait. | ||
32 | pub(super) is_new_item: bool, | ||
33 | /// The receiver if this is a field or method access, i.e. writing something.<|> | ||
34 | pub(super) dot_receiver: Option<&'a ast::Expr>, | ||
35 | /// If this is a method call in particular, i.e. the () are already there. | ||
36 | pub(super) is_method_call: bool, | ||
37 | } | ||
38 | |||
39 | impl<'a> CompletionContext<'a> { | ||
40 | pub(super) fn new( | ||
41 | db: &'a db::RootDatabase, | ||
42 | original_file: &'a SourceFile, | ||
43 | position: FilePosition, | ||
44 | ) -> Cancelable<Option<CompletionContext<'a>>> { | ||
45 | let module = source_binder::module_from_position(db, position)?; | ||
46 | let leaf = | ||
47 | ctry!(find_leaf_at_offset(original_file.syntax(), position.offset).left_biased()); | ||
48 | let mut ctx = CompletionContext { | ||
49 | db, | ||
50 | leaf, | ||
51 | offset: position.offset, | ||
52 | module, | ||
53 | function: None, | ||
54 | function_syntax: None, | ||
55 | use_item_syntax: None, | ||
56 | is_param: false, | ||
57 | is_trivial_path: false, | ||
58 | path_prefix: None, | ||
59 | after_if: false, | ||
60 | can_be_stmt: false, | ||
61 | is_new_item: false, | ||
62 | dot_receiver: None, | ||
63 | is_method_call: false, | ||
64 | }; | ||
65 | ctx.fill(original_file, position.offset); | ||
66 | Ok(Some(ctx)) | ||
67 | } | ||
68 | |||
69 | fn fill(&mut self, original_file: &'a SourceFile, offset: TextUnit) { | ||
70 | // Insert a fake ident to get a valid parse tree. We will use this file | ||
71 | // to determine context, though the original_file will be used for | ||
72 | // actual completion. | ||
73 | let file = { | ||
74 | let edit = AtomTextEdit::insert(offset, "intellijRulezz".to_string()); | ||
75 | original_file.reparse(&edit) | ||
76 | }; | ||
77 | |||
78 | // First, let's try to complete a reference to some declaration. | ||
79 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), offset) { | ||
80 | // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`. | ||
81 | // See RFC#1685. | ||
82 | if is_node::<ast::Param>(name_ref.syntax()) { | ||
83 | self.is_param = true; | ||
84 | return; | ||
85 | } | ||
86 | self.classify_name_ref(original_file, name_ref); | ||
87 | } | ||
88 | |||
89 | // Otherwise, see if this is a declaration. We can use heuristics to | ||
90 | // suggest declaration names, see `CompletionKind::Magic`. | ||
91 | if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), offset) { | ||
92 | if is_node::<ast::Param>(name.syntax()) { | ||
93 | self.is_param = true; | ||
94 | return; | ||
95 | } | ||
96 | } | ||
97 | } | ||
98 | fn classify_name_ref(&mut self, original_file: &'a SourceFile, name_ref: &ast::NameRef) { | ||
99 | let name_range = name_ref.syntax().range(); | ||
100 | let top_node = name_ref | ||
101 | .syntax() | ||
102 | .ancestors() | ||
103 | .take_while(|it| it.range() == name_range) | ||
104 | .last() | ||
105 | .unwrap(); | ||
106 | |||
107 | match top_node.parent().map(|it| it.kind()) { | ||
108 | Some(SOURCE_FILE) | Some(ITEM_LIST) => { | ||
109 | self.is_new_item = true; | ||
110 | return; | ||
111 | } | ||
112 | _ => (), | ||
113 | } | ||
114 | |||
115 | self.use_item_syntax = self.leaf.ancestors().find_map(ast::UseItem::cast); | ||
116 | |||
117 | self.function_syntax = self | ||
118 | .leaf | ||
119 | .ancestors() | ||
120 | .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) | ||
121 | .find_map(ast::FnDef::cast); | ||
122 | match (&self.module, self.function_syntax) { | ||
123 | (Some(module), Some(fn_def)) => { | ||
124 | let function = source_binder::function_from_module(self.db, module, fn_def); | ||
125 | self.function = Some(function); | ||
126 | } | ||
127 | _ => (), | ||
128 | } | ||
129 | |||
130 | let parent = match name_ref.syntax().parent() { | ||
131 | Some(it) => it, | ||
132 | None => return, | ||
133 | }; | ||
134 | if let Some(segment) = ast::PathSegment::cast(parent) { | ||
135 | let path = segment.parent_path(); | ||
136 | if let Some(mut path) = hir::Path::from_ast(path) { | ||
137 | if !path.is_ident() { | ||
138 | path.segments.pop().unwrap(); | ||
139 | self.path_prefix = Some(path); | ||
140 | return; | ||
141 | } | ||
142 | } | ||
143 | if path.qualifier().is_none() { | ||
144 | self.is_trivial_path = true; | ||
145 | |||
146 | // Find either enclosing expr statement (thing with `;`) or a | ||
147 | // block. If block, check that we are the last expr. | ||
148 | self.can_be_stmt = name_ref | ||
149 | .syntax() | ||
150 | .ancestors() | ||
151 | .find_map(|node| { | ||
152 | if let Some(stmt) = ast::ExprStmt::cast(node) { | ||
153 | return Some(stmt.syntax().range() == name_ref.syntax().range()); | ||
154 | } | ||
155 | if let Some(block) = ast::Block::cast(node) { | ||
156 | return Some( | ||
157 | block.expr().map(|e| e.syntax().range()) | ||
158 | == Some(name_ref.syntax().range()), | ||
159 | ); | ||
160 | } | ||
161 | None | ||
162 | }) | ||
163 | .unwrap_or(false); | ||
164 | |||
165 | if let Some(off) = name_ref.syntax().range().start().checked_sub(2.into()) { | ||
166 | if let Some(if_expr) = | ||
167 | find_node_at_offset::<ast::IfExpr>(original_file.syntax(), off) | ||
168 | { | ||
169 | if if_expr.syntax().range().end() < name_ref.syntax().range().start() { | ||
170 | self.after_if = true; | ||
171 | } | ||
172 | } | ||
173 | } | ||
174 | } | ||
175 | } | ||
176 | if let Some(field_expr) = ast::FieldExpr::cast(parent) { | ||
177 | // The receiver comes before the point of insertion of the fake | ||
178 | // ident, so it should have the same range in the non-modified file | ||
179 | self.dot_receiver = field_expr | ||
180 | .expr() | ||
181 | .map(|e| e.syntax().range()) | ||
182 | .and_then(|r| find_node_with_range(original_file.syntax(), r)); | ||
183 | } | ||
184 | if let Some(method_call_expr) = ast::MethodCallExpr::cast(parent) { | ||
185 | // As above | ||
186 | self.dot_receiver = method_call_expr | ||
187 | .expr() | ||
188 | .map(|e| e.syntax().range()) | ||
189 | .and_then(|r| find_node_with_range(original_file.syntax(), r)); | ||
190 | self.is_method_call = true; | ||
191 | } | ||
192 | } | ||
193 | } | ||
194 | |||
195 | fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<&N> { | ||
196 | let node = find_covering_node(syntax, range); | ||
197 | node.ancestors().find_map(N::cast) | ||
198 | } | ||
199 | |||
200 | fn is_node<N: AstNode>(node: &SyntaxNode) -> bool { | ||
201 | match node.ancestors().filter_map(N::cast).next() { | ||
202 | None => false, | ||
203 | Some(n) => n.syntax().range() == node.range(), | ||
204 | } | ||
205 | } | ||
diff --git a/crates/ra_ide_api/src/completion/completion_item.rs b/crates/ra_ide_api/src/completion/completion_item.rs new file mode 100644 index 000000000..a25b87bee --- /dev/null +++ b/crates/ra_ide_api/src/completion/completion_item.rs | |||
@@ -0,0 +1,244 @@ | |||
1 | use hir::PerNs; | ||
2 | |||
3 | use crate::completion::CompletionContext; | ||
4 | |||
5 | /// `CompletionItem` describes a single completion variant in the editor pop-up. | ||
6 | /// It is basically a POD with various properties. To construct a | ||
7 | /// `CompletionItem`, use `new` method and the `Builder` struct. | ||
8 | #[derive(Debug)] | ||
9 | pub struct CompletionItem { | ||
10 | /// Used only internally in tests, to check only specific kind of | ||
11 | /// completion. | ||
12 | completion_kind: CompletionKind, | ||
13 | label: String, | ||
14 | lookup: Option<String>, | ||
15 | snippet: Option<String>, | ||
16 | kind: Option<CompletionItemKind>, | ||
17 | } | ||
18 | |||
19 | pub enum InsertText { | ||
20 | PlainText { text: String }, | ||
21 | Snippet { text: String }, | ||
22 | } | ||
23 | |||
24 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] | ||
25 | pub enum CompletionItemKind { | ||
26 | Snippet, | ||
27 | Keyword, | ||
28 | Module, | ||
29 | Function, | ||
30 | Struct, | ||
31 | Enum, | ||
32 | EnumVariant, | ||
33 | Binding, | ||
34 | Field, | ||
35 | } | ||
36 | |||
37 | #[derive(Debug, PartialEq, Eq)] | ||
38 | pub(crate) enum CompletionKind { | ||
39 | /// Parser-based keyword completion. | ||
40 | Keyword, | ||
41 | /// Your usual "complete all valid identifiers". | ||
42 | Reference, | ||
43 | /// "Secret sauce" completions. | ||
44 | Magic, | ||
45 | Snippet, | ||
46 | } | ||
47 | |||
48 | impl CompletionItem { | ||
49 | pub(crate) fn new(completion_kind: CompletionKind, label: impl Into<String>) -> Builder { | ||
50 | let label = label.into(); | ||
51 | Builder { | ||
52 | completion_kind, | ||
53 | label, | ||
54 | lookup: None, | ||
55 | snippet: None, | ||
56 | kind: None, | ||
57 | } | ||
58 | } | ||
59 | /// What user sees in pop-up in the UI. | ||
60 | pub fn label(&self) -> &str { | ||
61 | &self.label | ||
62 | } | ||
63 | /// What string is used for filtering. | ||
64 | pub fn lookup(&self) -> &str { | ||
65 | self.lookup | ||
66 | .as_ref() | ||
67 | .map(|it| it.as_str()) | ||
68 | .unwrap_or(self.label()) | ||
69 | } | ||
70 | /// What is inserted. | ||
71 | pub fn insert_text(&self) -> InsertText { | ||
72 | match &self.snippet { | ||
73 | None => InsertText::PlainText { | ||
74 | text: self.label.clone(), | ||
75 | }, | ||
76 | Some(it) => InsertText::Snippet { text: it.clone() }, | ||
77 | } | ||
78 | } | ||
79 | |||
80 | pub fn kind(&self) -> Option<CompletionItemKind> { | ||
81 | self.kind | ||
82 | } | ||
83 | } | ||
84 | |||
85 | /// A helper to make `CompletionItem`s. | ||
86 | #[must_use] | ||
87 | pub(crate) struct Builder { | ||
88 | completion_kind: CompletionKind, | ||
89 | label: String, | ||
90 | lookup: Option<String>, | ||
91 | snippet: Option<String>, | ||
92 | kind: Option<CompletionItemKind>, | ||
93 | } | ||
94 | |||
95 | impl Builder { | ||
96 | pub(crate) fn add_to(self, acc: &mut Completions) { | ||
97 | acc.add(self.build()) | ||
98 | } | ||
99 | |||
100 | pub(crate) fn build(self) -> CompletionItem { | ||
101 | CompletionItem { | ||
102 | label: self.label, | ||
103 | lookup: self.lookup, | ||
104 | snippet: self.snippet, | ||
105 | kind: self.kind, | ||
106 | completion_kind: self.completion_kind, | ||
107 | } | ||
108 | } | ||
109 | pub(crate) fn lookup_by(mut self, lookup: impl Into<String>) -> Builder { | ||
110 | self.lookup = Some(lookup.into()); | ||
111 | self | ||
112 | } | ||
113 | pub(crate) fn snippet(mut self, snippet: impl Into<String>) -> Builder { | ||
114 | self.snippet = Some(snippet.into()); | ||
115 | self | ||
116 | } | ||
117 | pub(crate) fn kind(mut self, kind: CompletionItemKind) -> Builder { | ||
118 | self.kind = Some(kind); | ||
119 | self | ||
120 | } | ||
121 | pub(super) fn from_resolution( | ||
122 | mut self, | ||
123 | ctx: &CompletionContext, | ||
124 | resolution: &hir::Resolution, | ||
125 | ) -> Builder { | ||
126 | let resolved = resolution.def_id.and_then(|d| d.resolve(ctx.db).ok()); | ||
127 | let kind = match resolved { | ||
128 | PerNs { | ||
129 | types: Some(hir::Def::Module(..)), | ||
130 | .. | ||
131 | } => CompletionItemKind::Module, | ||
132 | PerNs { | ||
133 | types: Some(hir::Def::Struct(..)), | ||
134 | .. | ||
135 | } => CompletionItemKind::Struct, | ||
136 | PerNs { | ||
137 | types: Some(hir::Def::Enum(..)), | ||
138 | .. | ||
139 | } => CompletionItemKind::Enum, | ||
140 | PerNs { | ||
141 | values: Some(hir::Def::Function(function)), | ||
142 | .. | ||
143 | } => return self.from_function(ctx, function), | ||
144 | _ => return self, | ||
145 | }; | ||
146 | self.kind = Some(kind); | ||
147 | self | ||
148 | } | ||
149 | |||
150 | fn from_function(mut self, ctx: &CompletionContext, function: hir::Function) -> Builder { | ||
151 | // If not an import, add parenthesis automatically. | ||
152 | if ctx.use_item_syntax.is_none() { | ||
153 | if function.signature(ctx.db).args().is_empty() { | ||
154 | self.snippet = Some(format!("{}()$0", self.label)); | ||
155 | } else { | ||
156 | self.snippet = Some(format!("{}($0)", self.label)); | ||
157 | } | ||
158 | } | ||
159 | self.kind = Some(CompletionItemKind::Function); | ||
160 | self | ||
161 | } | ||
162 | } | ||
163 | |||
164 | impl Into<CompletionItem> for Builder { | ||
165 | fn into(self) -> CompletionItem { | ||
166 | self.build() | ||
167 | } | ||
168 | } | ||
169 | |||
170 | /// Represents an in-progress set of completions being built. | ||
171 | #[derive(Debug, Default)] | ||
172 | pub(crate) struct Completions { | ||
173 | buf: Vec<CompletionItem>, | ||
174 | } | ||
175 | |||
176 | impl Completions { | ||
177 | pub(crate) fn add(&mut self, item: impl Into<CompletionItem>) { | ||
178 | self.buf.push(item.into()) | ||
179 | } | ||
180 | pub(crate) fn add_all<I>(&mut self, items: I) | ||
181 | where | ||
182 | I: IntoIterator, | ||
183 | I::Item: Into<CompletionItem>, | ||
184 | { | ||
185 | items.into_iter().for_each(|item| self.add(item.into())) | ||
186 | } | ||
187 | |||
188 | #[cfg(test)] | ||
189 | pub(crate) fn assert_match(&self, expected: &str, kind: CompletionKind) { | ||
190 | let expected = normalize(expected); | ||
191 | let actual = self.debug_render(kind); | ||
192 | test_utils::assert_eq_text!(expected.as_str(), actual.as_str(),); | ||
193 | |||
194 | /// Normalize the textual representation of `Completions`: | ||
195 | /// replace `;` with newlines, normalize whitespace | ||
196 | fn normalize(expected: &str) -> String { | ||
197 | use ra_syntax::{tokenize, TextUnit, TextRange, SyntaxKind::SEMI}; | ||
198 | let mut res = String::new(); | ||
199 | for line in expected.trim().lines() { | ||
200 | let line = line.trim(); | ||
201 | let mut start_offset: TextUnit = 0.into(); | ||
202 | // Yep, we use rust tokenize in completion tests :-) | ||
203 | for token in tokenize(line) { | ||
204 | let range = TextRange::offset_len(start_offset, token.len); | ||
205 | start_offset += token.len; | ||
206 | if token.kind == SEMI { | ||
207 | res.push('\n'); | ||
208 | } else { | ||
209 | res.push_str(&line[range]); | ||
210 | } | ||
211 | } | ||
212 | |||
213 | res.push('\n'); | ||
214 | } | ||
215 | res | ||
216 | } | ||
217 | } | ||
218 | |||
219 | #[cfg(test)] | ||
220 | fn debug_render(&self, kind: CompletionKind) -> String { | ||
221 | let mut res = String::new(); | ||
222 | for c in self.buf.iter() { | ||
223 | if c.completion_kind == kind { | ||
224 | if let Some(lookup) = &c.lookup { | ||
225 | res.push_str(lookup); | ||
226 | res.push_str(&format!(" {:?}", c.label)); | ||
227 | } else { | ||
228 | res.push_str(&c.label); | ||
229 | } | ||
230 | if let Some(snippet) = &c.snippet { | ||
231 | res.push_str(&format!(" {:?}", snippet)); | ||
232 | } | ||
233 | res.push('\n'); | ||
234 | } | ||
235 | } | ||
236 | res | ||
237 | } | ||
238 | } | ||
239 | |||
240 | impl Into<Vec<CompletionItem>> for Completions { | ||
241 | fn into(self) -> Vec<CompletionItem> { | ||
242 | self.buf | ||
243 | } | ||
244 | } | ||
diff --git a/crates/ra_ide_api/src/db.rs b/crates/ra_ide_api/src/db.rs new file mode 100644 index 000000000..9d46609ec --- /dev/null +++ b/crates/ra_ide_api/src/db.rs | |||
@@ -0,0 +1,128 @@ | |||
1 | use std::{fmt, sync::Arc}; | ||
2 | |||
3 | use salsa::{self, Database}; | ||
4 | use ra_db::{LocationIntener, BaseDatabase, FileId}; | ||
5 | |||
6 | use crate::{symbol_index, LineIndex}; | ||
7 | |||
8 | #[derive(Debug)] | ||
9 | pub(crate) struct RootDatabase { | ||
10 | runtime: salsa::Runtime<RootDatabase>, | ||
11 | id_maps: Arc<IdMaps>, | ||
12 | } | ||
13 | |||
14 | #[derive(Default)] | ||
15 | struct IdMaps { | ||
16 | defs: LocationIntener<hir::DefLoc, hir::DefId>, | ||
17 | macros: LocationIntener<hir::MacroCallLoc, hir::MacroCallId>, | ||
18 | } | ||
19 | |||
20 | impl fmt::Debug for IdMaps { | ||
21 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
22 | f.debug_struct("IdMaps") | ||
23 | .field("n_defs", &self.defs.len()) | ||
24 | .finish() | ||
25 | } | ||
26 | } | ||
27 | |||
28 | impl salsa::Database for RootDatabase { | ||
29 | fn salsa_runtime(&self) -> &salsa::Runtime<RootDatabase> { | ||
30 | &self.runtime | ||
31 | } | ||
32 | } | ||
33 | |||
34 | impl Default for RootDatabase { | ||
35 | fn default() -> RootDatabase { | ||
36 | let mut db = RootDatabase { | ||
37 | runtime: salsa::Runtime::default(), | ||
38 | id_maps: Default::default(), | ||
39 | }; | ||
40 | db.query_mut(ra_db::CrateGraphQuery) | ||
41 | .set((), Default::default()); | ||
42 | db.query_mut(ra_db::LocalRootsQuery) | ||
43 | .set((), Default::default()); | ||
44 | db.query_mut(ra_db::LibraryRootsQuery) | ||
45 | .set((), Default::default()); | ||
46 | db | ||
47 | } | ||
48 | } | ||
49 | |||
50 | impl salsa::ParallelDatabase for RootDatabase { | ||
51 | fn snapshot(&self) -> salsa::Snapshot<RootDatabase> { | ||
52 | salsa::Snapshot::new(RootDatabase { | ||
53 | runtime: self.runtime.snapshot(self), | ||
54 | id_maps: self.id_maps.clone(), | ||
55 | }) | ||
56 | } | ||
57 | } | ||
58 | |||
59 | impl BaseDatabase for RootDatabase {} | ||
60 | |||
61 | impl AsRef<LocationIntener<hir::DefLoc, hir::DefId>> for RootDatabase { | ||
62 | fn as_ref(&self) -> &LocationIntener<hir::DefLoc, hir::DefId> { | ||
63 | &self.id_maps.defs | ||
64 | } | ||
65 | } | ||
66 | |||
67 | impl AsRef<LocationIntener<hir::MacroCallLoc, hir::MacroCallId>> for RootDatabase { | ||
68 | fn as_ref(&self) -> &LocationIntener<hir::MacroCallLoc, hir::MacroCallId> { | ||
69 | &self.id_maps.macros | ||
70 | } | ||
71 | } | ||
72 | |||
73 | salsa::query_group! { | ||
74 | pub(crate) trait LineIndexDatabase: ra_db::FilesDatabase + BaseDatabase { | ||
75 | fn line_index(file_id: FileId) -> Arc<LineIndex> { | ||
76 | type LineIndexQuery; | ||
77 | } | ||
78 | } | ||
79 | } | ||
80 | |||
81 | fn line_index(db: &impl ra_db::FilesDatabase, file_id: FileId) -> Arc<LineIndex> { | ||
82 | let text = db.file_text(file_id); | ||
83 | Arc::new(LineIndex::new(&*text)) | ||
84 | } | ||
85 | |||
86 | salsa::database_storage! { | ||
87 | pub(crate) struct RootDatabaseStorage for RootDatabase { | ||
88 | impl ra_db::FilesDatabase { | ||
89 | fn file_text() for ra_db::FileTextQuery; | ||
90 | fn file_relative_path() for ra_db::FileRelativePathQuery; | ||
91 | fn file_source_root() for ra_db::FileSourceRootQuery; | ||
92 | fn source_root() for ra_db::SourceRootQuery; | ||
93 | fn local_roots() for ra_db::LocalRootsQuery; | ||
94 | fn library_roots() for ra_db::LibraryRootsQuery; | ||
95 | fn crate_graph() for ra_db::CrateGraphQuery; | ||
96 | } | ||
97 | impl ra_db::SyntaxDatabase { | ||
98 | fn source_file() for ra_db::SourceFileQuery; | ||
99 | } | ||
100 | impl LineIndexDatabase { | ||
101 | fn line_index() for LineIndexQuery; | ||
102 | } | ||
103 | impl symbol_index::SymbolsDatabase { | ||
104 | fn file_symbols() for symbol_index::FileSymbolsQuery; | ||
105 | fn library_symbols() for symbol_index::LibrarySymbolsQuery; | ||
106 | } | ||
107 | impl hir::db::HirDatabase { | ||
108 | fn hir_source_file() for hir::db::HirSourceFileQuery; | ||
109 | fn expand_macro_invocation() for hir::db::ExpandMacroCallQuery; | ||
110 | fn module_tree() for hir::db::ModuleTreeQuery; | ||
111 | fn fn_scopes() for hir::db::FnScopesQuery; | ||
112 | fn file_items() for hir::db::SourceFileItemsQuery; | ||
113 | fn file_item() for hir::db::FileItemQuery; | ||
114 | fn input_module_items() for hir::db::InputModuleItemsQuery; | ||
115 | fn item_map() for hir::db::ItemMapQuery; | ||
116 | fn submodules() for hir::db::SubmodulesQuery; | ||
117 | fn infer() for hir::db::InferQuery; | ||
118 | fn type_for_def() for hir::db::TypeForDefQuery; | ||
119 | fn type_for_field() for hir::db::TypeForFieldQuery; | ||
120 | fn struct_data() for hir::db::StructDataQuery; | ||
121 | fn enum_data() for hir::db::EnumDataQuery; | ||
122 | fn impls_in_module() for hir::db::ImplsInModuleQuery; | ||
123 | fn body_hir() for hir::db::BodyHirQuery; | ||
124 | fn body_syntax_mapping() for hir::db::BodySyntaxMappingQuery; | ||
125 | fn fn_signature() for hir::db::FnSignatureQuery; | ||
126 | } | ||
127 | } | ||
128 | } | ||
diff --git a/crates/ra_ide_api/src/extend_selection.rs b/crates/ra_ide_api/src/extend_selection.rs new file mode 100644 index 000000000..c3c809c9f --- /dev/null +++ b/crates/ra_ide_api/src/extend_selection.rs | |||
@@ -0,0 +1,56 @@ | |||
1 | use ra_db::SyntaxDatabase; | ||
2 | use ra_syntax::{ | ||
3 | SyntaxNode, AstNode, SourceFile, | ||
4 | ast, algo::find_covering_node, | ||
5 | }; | ||
6 | |||
7 | use crate::{ | ||
8 | TextRange, FileRange, | ||
9 | db::RootDatabase, | ||
10 | }; | ||
11 | |||
12 | pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { | ||
13 | let source_file = db.source_file(frange.file_id); | ||
14 | if let Some(range) = extend_selection_in_macro(db, &source_file, frange) { | ||
15 | return range; | ||
16 | } | ||
17 | ra_ide_api_light::extend_selection(source_file.syntax(), frange.range).unwrap_or(frange.range) | ||
18 | } | ||
19 | |||
20 | fn extend_selection_in_macro( | ||
21 | _db: &RootDatabase, | ||
22 | source_file: &SourceFile, | ||
23 | frange: FileRange, | ||
24 | ) -> Option<TextRange> { | ||
25 | let macro_call = find_macro_call(source_file.syntax(), frange.range)?; | ||
26 | let (off, exp) = hir::MacroDef::ast_expand(macro_call)?; | ||
27 | let dst_range = exp.map_range_forward(frange.range - off)?; | ||
28 | let dst_range = ra_ide_api_light::extend_selection(&exp.syntax(), dst_range)?; | ||
29 | let src_range = exp.map_range_back(dst_range)? + off; | ||
30 | Some(src_range) | ||
31 | } | ||
32 | |||
33 | fn find_macro_call(node: &SyntaxNode, range: TextRange) -> Option<&ast::MacroCall> { | ||
34 | find_covering_node(node, range) | ||
35 | .ancestors() | ||
36 | .find_map(ast::MacroCall::cast) | ||
37 | } | ||
38 | |||
39 | #[cfg(test)] | ||
40 | mod tests { | ||
41 | use crate::mock_analysis::single_file_with_range; | ||
42 | use test_utils::assert_eq_dbg; | ||
43 | |||
44 | #[test] | ||
45 | fn extend_selection_inside_macros() { | ||
46 | let (analysis, frange) = single_file_with_range( | ||
47 | " | ||
48 | fn main() { | ||
49 | ctry!(foo(|x| <|>x<|>)); | ||
50 | } | ||
51 | ", | ||
52 | ); | ||
53 | let r = analysis.extend_selection(frange); | ||
54 | assert_eq_dbg("[51; 56)", &r); | ||
55 | } | ||
56 | } | ||
diff --git a/crates/ra_ide_api/src/goto_defenition.rs b/crates/ra_ide_api/src/goto_defenition.rs new file mode 100644 index 000000000..fcd8d315e --- /dev/null +++ b/crates/ra_ide_api/src/goto_defenition.rs | |||
@@ -0,0 +1,139 @@ | |||
1 | use ra_db::{FileId, Cancelable, SyntaxDatabase}; | ||
2 | use ra_syntax::{ | ||
3 | TextRange, AstNode, ast, SyntaxKind::{NAME, MODULE}, | ||
4 | algo::find_node_at_offset, | ||
5 | }; | ||
6 | |||
7 | use crate::{FilePosition, NavigationTarget, db::RootDatabase}; | ||
8 | |||
9 | pub(crate) fn goto_defenition( | ||
10 | db: &RootDatabase, | ||
11 | position: FilePosition, | ||
12 | ) -> Cancelable<Option<Vec<NavigationTarget>>> { | ||
13 | let file = db.source_file(position.file_id); | ||
14 | let syntax = file.syntax(); | ||
15 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) { | ||
16 | return Ok(Some(reference_defenition(db, position.file_id, name_ref)?)); | ||
17 | } | ||
18 | if let Some(name) = find_node_at_offset::<ast::Name>(syntax, position.offset) { | ||
19 | return name_defenition(db, position.file_id, name); | ||
20 | } | ||
21 | Ok(None) | ||
22 | } | ||
23 | |||
24 | pub(crate) fn reference_defenition( | ||
25 | db: &RootDatabase, | ||
26 | file_id: FileId, | ||
27 | name_ref: &ast::NameRef, | ||
28 | ) -> Cancelable<Vec<NavigationTarget>> { | ||
29 | if let Some(fn_descr) = | ||
30 | hir::source_binder::function_from_child_node(db, file_id, name_ref.syntax())? | ||
31 | { | ||
32 | let scope = fn_descr.scopes(db)?; | ||
33 | // First try to resolve the symbol locally | ||
34 | if let Some(entry) = scope.resolve_local_name(name_ref) { | ||
35 | let nav = NavigationTarget { | ||
36 | file_id, | ||
37 | name: entry.name().to_string().into(), | ||
38 | range: entry.ptr().range(), | ||
39 | kind: NAME, | ||
40 | ptr: None, | ||
41 | }; | ||
42 | return Ok(vec![nav]); | ||
43 | }; | ||
44 | } | ||
45 | // If that fails try the index based approach. | ||
46 | let navs = db | ||
47 | .index_resolve(name_ref)? | ||
48 | .into_iter() | ||
49 | .map(NavigationTarget::from_symbol) | ||
50 | .collect(); | ||
51 | Ok(navs) | ||
52 | } | ||
53 | |||
54 | fn name_defenition( | ||
55 | db: &RootDatabase, | ||
56 | file_id: FileId, | ||
57 | name: &ast::Name, | ||
58 | ) -> Cancelable<Option<Vec<NavigationTarget>>> { | ||
59 | if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) { | ||
60 | if module.has_semi() { | ||
61 | if let Some(child_module) = | ||
62 | hir::source_binder::module_from_declaration(db, file_id, module)? | ||
63 | { | ||
64 | let (file_id, _) = child_module.defenition_source(db)?; | ||
65 | let name = match child_module.name(db)? { | ||
66 | Some(name) => name.to_string().into(), | ||
67 | None => "".into(), | ||
68 | }; | ||
69 | let nav = NavigationTarget { | ||
70 | file_id, | ||
71 | name, | ||
72 | range: TextRange::offset_len(0.into(), 0.into()), | ||
73 | kind: MODULE, | ||
74 | ptr: None, | ||
75 | }; | ||
76 | return Ok(Some(vec![nav])); | ||
77 | } | ||
78 | } | ||
79 | } | ||
80 | Ok(None) | ||
81 | } | ||
82 | |||
83 | #[cfg(test)] | ||
84 | mod tests { | ||
85 | use test_utils::assert_eq_dbg; | ||
86 | use crate::mock_analysis::analysis_and_position; | ||
87 | |||
88 | #[test] | ||
89 | fn goto_defenition_works_in_items() { | ||
90 | let (analysis, pos) = analysis_and_position( | ||
91 | " | ||
92 | //- /lib.rs | ||
93 | struct Foo; | ||
94 | enum E { X(Foo<|>) } | ||
95 | ", | ||
96 | ); | ||
97 | |||
98 | let symbols = analysis.goto_defenition(pos).unwrap().unwrap(); | ||
99 | assert_eq_dbg( | ||
100 | r#"[NavigationTarget { file_id: FileId(1), name: "Foo", | ||
101 | kind: STRUCT_DEF, range: [0; 11), | ||
102 | ptr: Some(LocalSyntaxPtr { range: [0; 11), kind: STRUCT_DEF }) }]"#, | ||
103 | &symbols, | ||
104 | ); | ||
105 | } | ||
106 | |||
107 | #[test] | ||
108 | fn goto_defenition_works_for_module_declaration() { | ||
109 | let (analysis, pos) = analysis_and_position( | ||
110 | " | ||
111 | //- /lib.rs | ||
112 | mod <|>foo; | ||
113 | //- /foo.rs | ||
114 | // empty | ||
115 | ", | ||
116 | ); | ||
117 | |||
118 | let symbols = analysis.goto_defenition(pos).unwrap().unwrap(); | ||
119 | assert_eq_dbg( | ||
120 | r#"[NavigationTarget { file_id: FileId(2), name: "foo", kind: MODULE, range: [0; 0), ptr: None }]"#, | ||
121 | &symbols, | ||
122 | ); | ||
123 | |||
124 | let (analysis, pos) = analysis_and_position( | ||
125 | " | ||
126 | //- /lib.rs | ||
127 | mod <|>foo; | ||
128 | //- /foo/mod.rs | ||
129 | // empty | ||
130 | ", | ||
131 | ); | ||
132 | |||
133 | let symbols = analysis.goto_defenition(pos).unwrap().unwrap(); | ||
134 | assert_eq_dbg( | ||
135 | r#"[NavigationTarget { file_id: FileId(2), name: "foo", kind: MODULE, range: [0; 0), ptr: None }]"#, | ||
136 | &symbols, | ||
137 | ); | ||
138 | } | ||
139 | } | ||
diff --git a/crates/ra_ide_api/src/hover.rs b/crates/ra_ide_api/src/hover.rs new file mode 100644 index 000000000..475524ee1 --- /dev/null +++ b/crates/ra_ide_api/src/hover.rs | |||
@@ -0,0 +1,257 @@ | |||
1 | use ra_db::{Cancelable, SyntaxDatabase}; | ||
2 | use ra_syntax::{ | ||
3 | AstNode, SyntaxNode, TreePtr, | ||
4 | ast::{self, NameOwner}, | ||
5 | algo::{find_covering_node, find_node_at_offset, find_leaf_at_offset, visit::{visitor, Visitor}}, | ||
6 | }; | ||
7 | |||
8 | use crate::{db::RootDatabase, RangeInfo, FilePosition, FileRange, NavigationTarget}; | ||
9 | |||
10 | pub(crate) fn hover( | ||
11 | db: &RootDatabase, | ||
12 | position: FilePosition, | ||
13 | ) -> Cancelable<Option<RangeInfo<String>>> { | ||
14 | let file = db.source_file(position.file_id); | ||
15 | let mut res = Vec::new(); | ||
16 | |||
17 | let mut range = None; | ||
18 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), position.offset) { | ||
19 | let navs = crate::goto_defenition::reference_defenition(db, position.file_id, name_ref)?; | ||
20 | for nav in navs { | ||
21 | res.extend(doc_text_for(db, nav)?) | ||
22 | } | ||
23 | if !res.is_empty() { | ||
24 | range = Some(name_ref.syntax().range()) | ||
25 | } | ||
26 | } | ||
27 | if range.is_none() { | ||
28 | let node = find_leaf_at_offset(file.syntax(), position.offset).find_map(|leaf| { | ||
29 | leaf.ancestors() | ||
30 | .find(|n| ast::Expr::cast(*n).is_some() || ast::Pat::cast(*n).is_some()) | ||
31 | }); | ||
32 | let node = ctry!(node); | ||
33 | let frange = FileRange { | ||
34 | file_id: position.file_id, | ||
35 | range: node.range(), | ||
36 | }; | ||
37 | res.extend(type_of(db, frange)?); | ||
38 | range = Some(node.range()); | ||
39 | }; | ||
40 | |||
41 | let range = ctry!(range); | ||
42 | if res.is_empty() { | ||
43 | return Ok(None); | ||
44 | } | ||
45 | let res = RangeInfo::new(range, res.join("\n\n---\n")); | ||
46 | Ok(Some(res)) | ||
47 | } | ||
48 | |||
49 | pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Cancelable<Option<String>> { | ||
50 | let file = db.source_file(frange.file_id); | ||
51 | let syntax = file.syntax(); | ||
52 | let leaf_node = find_covering_node(syntax, frange.range); | ||
53 | // if we picked identifier, expand to pattern/expression | ||
54 | let node = leaf_node | ||
55 | .ancestors() | ||
56 | .take_while(|it| it.range() == leaf_node.range()) | ||
57 | .find(|&it| ast::Expr::cast(it).is_some() || ast::Pat::cast(it).is_some()) | ||
58 | .unwrap_or(leaf_node); | ||
59 | let parent_fn = ctry!(node.ancestors().find_map(ast::FnDef::cast)); | ||
60 | let function = ctry!(hir::source_binder::function_from_source( | ||
61 | db, | ||
62 | frange.file_id, | ||
63 | parent_fn | ||
64 | )?); | ||
65 | let infer = function.infer(db)?; | ||
66 | let syntax_mapping = function.body_syntax_mapping(db)?; | ||
67 | if let Some(expr) = ast::Expr::cast(node).and_then(|e| syntax_mapping.node_expr(e)) { | ||
68 | Ok(Some(infer[expr].to_string())) | ||
69 | } else if let Some(pat) = ast::Pat::cast(node).and_then(|p| syntax_mapping.node_pat(p)) { | ||
70 | Ok(Some(infer[pat].to_string())) | ||
71 | } else { | ||
72 | Ok(None) | ||
73 | } | ||
74 | } | ||
75 | |||
76 | // FIXME: this should not really use navigation target. Rather, approximatelly | ||
77 | // resovled symbol should return a `DefId`. | ||
78 | fn doc_text_for(db: &RootDatabase, nav: NavigationTarget) -> Cancelable<Option<String>> { | ||
79 | let result = match (nav.description(db), nav.docs(db)) { | ||
80 | (Some(desc), Some(docs)) => Some("```rust\n".to_string() + &*desc + "\n```\n\n" + &*docs), | ||
81 | (Some(desc), None) => Some("```rust\n".to_string() + &*desc + "\n```"), | ||
82 | (None, Some(docs)) => Some(docs), | ||
83 | _ => None, | ||
84 | }; | ||
85 | |||
86 | Ok(result) | ||
87 | } | ||
88 | |||
89 | impl NavigationTarget { | ||
90 | fn node(&self, db: &RootDatabase) -> Option<TreePtr<SyntaxNode>> { | ||
91 | let source_file = db.source_file(self.file_id); | ||
92 | let source_file = source_file.syntax(); | ||
93 | let node = source_file | ||
94 | .descendants() | ||
95 | .find(|node| node.kind() == self.kind && node.range() == self.range)? | ||
96 | .to_owned(); | ||
97 | Some(node) | ||
98 | } | ||
99 | |||
100 | fn docs(&self, db: &RootDatabase) -> Option<String> { | ||
101 | let node = self.node(db)?; | ||
102 | fn doc_comments<N: ast::DocCommentsOwner>(node: &N) -> Option<String> { | ||
103 | let comments = node.doc_comment_text(); | ||
104 | if comments.is_empty() { | ||
105 | None | ||
106 | } else { | ||
107 | Some(comments) | ||
108 | } | ||
109 | } | ||
110 | |||
111 | visitor() | ||
112 | .visit(doc_comments::<ast::FnDef>) | ||
113 | .visit(doc_comments::<ast::StructDef>) | ||
114 | .visit(doc_comments::<ast::EnumDef>) | ||
115 | .visit(doc_comments::<ast::TraitDef>) | ||
116 | .visit(doc_comments::<ast::Module>) | ||
117 | .visit(doc_comments::<ast::TypeDef>) | ||
118 | .visit(doc_comments::<ast::ConstDef>) | ||
119 | .visit(doc_comments::<ast::StaticDef>) | ||
120 | .accept(&node)? | ||
121 | } | ||
122 | |||
123 | /// Get a description of this node. | ||
124 | /// | ||
125 | /// e.g. `struct Name`, `enum Name`, `fn Name` | ||
126 | fn description(&self, db: &RootDatabase) -> Option<String> { | ||
127 | // TODO: After type inference is done, add type information to improve the output | ||
128 | let node = self.node(db)?; | ||
129 | // TODO: Refactor to be have less repetition | ||
130 | visitor() | ||
131 | .visit(|node: &ast::FnDef| { | ||
132 | let mut string = "fn ".to_string(); | ||
133 | node.name()?.syntax().text().push_to(&mut string); | ||
134 | Some(string) | ||
135 | }) | ||
136 | .visit(|node: &ast::StructDef| { | ||
137 | let mut string = "struct ".to_string(); | ||
138 | node.name()?.syntax().text().push_to(&mut string); | ||
139 | Some(string) | ||
140 | }) | ||
141 | .visit(|node: &ast::EnumDef| { | ||
142 | let mut string = "enum ".to_string(); | ||
143 | node.name()?.syntax().text().push_to(&mut string); | ||
144 | Some(string) | ||
145 | }) | ||
146 | .visit(|node: &ast::TraitDef| { | ||
147 | let mut string = "trait ".to_string(); | ||
148 | node.name()?.syntax().text().push_to(&mut string); | ||
149 | Some(string) | ||
150 | }) | ||
151 | .visit(|node: &ast::Module| { | ||
152 | let mut string = "mod ".to_string(); | ||
153 | node.name()?.syntax().text().push_to(&mut string); | ||
154 | Some(string) | ||
155 | }) | ||
156 | .visit(|node: &ast::TypeDef| { | ||
157 | let mut string = "type ".to_string(); | ||
158 | node.name()?.syntax().text().push_to(&mut string); | ||
159 | Some(string) | ||
160 | }) | ||
161 | .visit(|node: &ast::ConstDef| { | ||
162 | let mut string = "const ".to_string(); | ||
163 | node.name()?.syntax().text().push_to(&mut string); | ||
164 | Some(string) | ||
165 | }) | ||
166 | .visit(|node: &ast::StaticDef| { | ||
167 | let mut string = "static ".to_string(); | ||
168 | node.name()?.syntax().text().push_to(&mut string); | ||
169 | Some(string) | ||
170 | }) | ||
171 | .accept(&node)? | ||
172 | } | ||
173 | } | ||
174 | |||
175 | #[cfg(test)] | ||
176 | mod tests { | ||
177 | use ra_syntax::TextRange; | ||
178 | use crate::mock_analysis::{single_file_with_position, single_file_with_range}; | ||
179 | |||
180 | #[test] | ||
181 | fn hover_shows_type_of_an_expression() { | ||
182 | let (analysis, position) = single_file_with_position( | ||
183 | " | ||
184 | pub fn foo() -> u32 { 1 } | ||
185 | |||
186 | fn main() { | ||
187 | let foo_test = foo()<|>; | ||
188 | } | ||
189 | ", | ||
190 | ); | ||
191 | let hover = analysis.hover(position).unwrap().unwrap(); | ||
192 | assert_eq!(hover.range, TextRange::from_to(95.into(), 100.into())); | ||
193 | assert_eq!(hover.info, "u32"); | ||
194 | } | ||
195 | |||
196 | #[test] | ||
197 | fn hover_for_local_variable() { | ||
198 | let (analysis, position) = single_file_with_position("fn func(foo: i32) { fo<|>o; }"); | ||
199 | let hover = analysis.hover(position).unwrap().unwrap(); | ||
200 | assert_eq!(hover.info, "i32"); | ||
201 | } | ||
202 | |||
203 | #[test] | ||
204 | fn hover_for_local_variable_pat() { | ||
205 | let (analysis, position) = single_file_with_position("fn func(fo<|>o: i32) {}"); | ||
206 | let hover = analysis.hover(position).unwrap().unwrap(); | ||
207 | assert_eq!(hover.info, "i32"); | ||
208 | } | ||
209 | |||
210 | #[test] | ||
211 | fn test_type_of_for_function() { | ||
212 | let (analysis, range) = single_file_with_range( | ||
213 | " | ||
214 | pub fn foo() -> u32 { 1 }; | ||
215 | |||
216 | fn main() { | ||
217 | let foo_test = <|>foo()<|>; | ||
218 | } | ||
219 | ", | ||
220 | ); | ||
221 | |||
222 | let type_name = analysis.type_of(range).unwrap().unwrap(); | ||
223 | assert_eq!("u32", &type_name); | ||
224 | } | ||
225 | |||
226 | // FIXME: improve type_of to make this work | ||
227 | #[test] | ||
228 | fn test_type_of_for_expr_1() { | ||
229 | let (analysis, range) = single_file_with_range( | ||
230 | " | ||
231 | fn main() { | ||
232 | let foo = <|>1 + foo_test<|>; | ||
233 | } | ||
234 | ", | ||
235 | ); | ||
236 | |||
237 | let type_name = analysis.type_of(range).unwrap().unwrap(); | ||
238 | assert_eq!("[unknown]", &type_name); | ||
239 | } | ||
240 | |||
241 | // FIXME: improve type_of to make this work | ||
242 | #[test] | ||
243 | fn test_type_of_for_expr_2() { | ||
244 | let (analysis, range) = single_file_with_range( | ||
245 | " | ||
246 | fn main() { | ||
247 | let foo: usize = 1; | ||
248 | let bar = <|>1 + foo_test<|>; | ||
249 | } | ||
250 | ", | ||
251 | ); | ||
252 | |||
253 | let type_name = analysis.type_of(range).unwrap().unwrap(); | ||
254 | assert_eq!("[unknown]", &type_name); | ||
255 | } | ||
256 | |||
257 | } | ||
diff --git a/crates/ra_ide_api/src/imp.rs b/crates/ra_ide_api/src/imp.rs new file mode 100644 index 000000000..7c60ab7d6 --- /dev/null +++ b/crates/ra_ide_api/src/imp.rs | |||
@@ -0,0 +1,309 @@ | |||
1 | use std::sync::Arc; | ||
2 | |||
3 | use salsa::Database; | ||
4 | |||
5 | use hir::{ | ||
6 | self, Problem, source_binder, | ||
7 | }; | ||
8 | use ra_db::{FilesDatabase, SourceRoot, SourceRootId, SyntaxDatabase}; | ||
9 | use ra_ide_api_light::{self, assists, LocalEdit, Severity}; | ||
10 | use ra_syntax::{ | ||
11 | TextRange, AstNode, SourceFile, | ||
12 | ast::{self, NameOwner}, | ||
13 | algo::find_node_at_offset, | ||
14 | SyntaxKind::*, | ||
15 | }; | ||
16 | |||
17 | use crate::{ | ||
18 | AnalysisChange, | ||
19 | Cancelable, NavigationTarget, | ||
20 | CrateId, db, Diagnostic, FileId, FilePosition, FileRange, FileSystemEdit, | ||
21 | Query, RootChange, SourceChange, SourceFileEdit, | ||
22 | symbol_index::{LibrarySymbolsQuery, FileSymbol}, | ||
23 | }; | ||
24 | |||
25 | impl db::RootDatabase { | ||
26 | pub(crate) fn apply_change(&mut self, change: AnalysisChange) { | ||
27 | log::info!("apply_change {:?}", change); | ||
28 | // self.gc_syntax_trees(); | ||
29 | if !change.new_roots.is_empty() { | ||
30 | let mut local_roots = Vec::clone(&self.local_roots()); | ||
31 | for (root_id, is_local) in change.new_roots { | ||
32 | self.query_mut(ra_db::SourceRootQuery) | ||
33 | .set(root_id, Default::default()); | ||
34 | if is_local { | ||
35 | local_roots.push(root_id); | ||
36 | } | ||
37 | } | ||
38 | self.query_mut(ra_db::LocalRootsQuery) | ||
39 | .set((), Arc::new(local_roots)); | ||
40 | } | ||
41 | |||
42 | for (root_id, root_change) in change.roots_changed { | ||
43 | self.apply_root_change(root_id, root_change); | ||
44 | } | ||
45 | for (file_id, text) in change.files_changed { | ||
46 | self.query_mut(ra_db::FileTextQuery).set(file_id, text) | ||
47 | } | ||
48 | if !change.libraries_added.is_empty() { | ||
49 | let mut libraries = Vec::clone(&self.library_roots()); | ||
50 | for library in change.libraries_added { | ||
51 | libraries.push(library.root_id); | ||
52 | self.query_mut(ra_db::SourceRootQuery) | ||
53 | .set(library.root_id, Default::default()); | ||
54 | self.query_mut(LibrarySymbolsQuery) | ||
55 | .set_constant(library.root_id, Arc::new(library.symbol_index)); | ||
56 | self.apply_root_change(library.root_id, library.root_change); | ||
57 | } | ||
58 | self.query_mut(ra_db::LibraryRootsQuery) | ||
59 | .set((), Arc::new(libraries)); | ||
60 | } | ||
61 | if let Some(crate_graph) = change.crate_graph { | ||
62 | self.query_mut(ra_db::CrateGraphQuery) | ||
63 | .set((), Arc::new(crate_graph)) | ||
64 | } | ||
65 | } | ||
66 | |||
67 | fn apply_root_change(&mut self, root_id: SourceRootId, root_change: RootChange) { | ||
68 | let mut source_root = SourceRoot::clone(&self.source_root(root_id)); | ||
69 | for add_file in root_change.added { | ||
70 | self.query_mut(ra_db::FileTextQuery) | ||
71 | .set(add_file.file_id, add_file.text); | ||
72 | self.query_mut(ra_db::FileRelativePathQuery) | ||
73 | .set(add_file.file_id, add_file.path.clone()); | ||
74 | self.query_mut(ra_db::FileSourceRootQuery) | ||
75 | .set(add_file.file_id, root_id); | ||
76 | source_root.files.insert(add_file.path, add_file.file_id); | ||
77 | } | ||
78 | for remove_file in root_change.removed { | ||
79 | self.query_mut(ra_db::FileTextQuery) | ||
80 | .set(remove_file.file_id, Default::default()); | ||
81 | source_root.files.remove(&remove_file.path); | ||
82 | } | ||
83 | self.query_mut(ra_db::SourceRootQuery) | ||
84 | .set(root_id, Arc::new(source_root)); | ||
85 | } | ||
86 | |||
87 | #[allow(unused)] | ||
88 | /// Ideally, we should call this function from time to time to collect heavy | ||
89 | /// syntax trees. However, if we actually do that, everything is recomputed | ||
90 | /// for some reason. Needs investigation. | ||
91 | fn gc_syntax_trees(&mut self) { | ||
92 | self.query(ra_db::SourceFileQuery) | ||
93 | .sweep(salsa::SweepStrategy::default().discard_values()); | ||
94 | self.query(hir::db::SourceFileItemsQuery) | ||
95 | .sweep(salsa::SweepStrategy::default().discard_values()); | ||
96 | self.query(hir::db::FileItemQuery) | ||
97 | .sweep(salsa::SweepStrategy::default().discard_values()); | ||
98 | } | ||
99 | } | ||
100 | |||
101 | impl db::RootDatabase { | ||
102 | /// This returns `Vec` because a module may be included from several places. We | ||
103 | /// don't handle this case yet though, so the Vec has length at most one. | ||
104 | pub(crate) fn parent_module( | ||
105 | &self, | ||
106 | position: FilePosition, | ||
107 | ) -> Cancelable<Vec<NavigationTarget>> { | ||
108 | let module = match source_binder::module_from_position(self, position)? { | ||
109 | None => return Ok(Vec::new()), | ||
110 | Some(it) => it, | ||
111 | }; | ||
112 | let (file_id, ast_module) = match module.declaration_source(self)? { | ||
113 | None => return Ok(Vec::new()), | ||
114 | Some(it) => it, | ||
115 | }; | ||
116 | let name = ast_module.name().unwrap(); | ||
117 | Ok(vec![NavigationTarget { | ||
118 | file_id, | ||
119 | name: name.text().clone(), | ||
120 | range: name.syntax().range(), | ||
121 | kind: MODULE, | ||
122 | ptr: None, | ||
123 | }]) | ||
124 | } | ||
125 | /// Returns `Vec` for the same reason as `parent_module` | ||
126 | pub(crate) fn crate_for(&self, file_id: FileId) -> Cancelable<Vec<CrateId>> { | ||
127 | let module = match source_binder::module_from_file_id(self, file_id)? { | ||
128 | Some(it) => it, | ||
129 | None => return Ok(Vec::new()), | ||
130 | }; | ||
131 | let krate = match module.krate(self)? { | ||
132 | Some(it) => it, | ||
133 | None => return Ok(Vec::new()), | ||
134 | }; | ||
135 | Ok(vec![krate.crate_id()]) | ||
136 | } | ||
137 | pub(crate) fn find_all_refs( | ||
138 | &self, | ||
139 | position: FilePosition, | ||
140 | ) -> Cancelable<Vec<(FileId, TextRange)>> { | ||
141 | let file = self.source_file(position.file_id); | ||
142 | // Find the binding associated with the offset | ||
143 | let (binding, descr) = match find_binding(self, &file, position)? { | ||
144 | None => return Ok(Vec::new()), | ||
145 | Some(it) => it, | ||
146 | }; | ||
147 | |||
148 | let mut ret = binding | ||
149 | .name() | ||
150 | .into_iter() | ||
151 | .map(|name| (position.file_id, name.syntax().range())) | ||
152 | .collect::<Vec<_>>(); | ||
153 | ret.extend( | ||
154 | descr | ||
155 | .scopes(self)? | ||
156 | .find_all_refs(binding) | ||
157 | .into_iter() | ||
158 | .map(|ref_desc| (position.file_id, ref_desc.range)), | ||
159 | ); | ||
160 | |||
161 | return Ok(ret); | ||
162 | |||
163 | fn find_binding<'a>( | ||
164 | db: &db::RootDatabase, | ||
165 | source_file: &'a SourceFile, | ||
166 | position: FilePosition, | ||
167 | ) -> Cancelable<Option<(&'a ast::BindPat, hir::Function)>> { | ||
168 | let syntax = source_file.syntax(); | ||
169 | if let Some(binding) = find_node_at_offset::<ast::BindPat>(syntax, position.offset) { | ||
170 | let descr = ctry!(source_binder::function_from_child_node( | ||
171 | db, | ||
172 | position.file_id, | ||
173 | binding.syntax(), | ||
174 | )?); | ||
175 | return Ok(Some((binding, descr))); | ||
176 | }; | ||
177 | let name_ref = ctry!(find_node_at_offset::<ast::NameRef>(syntax, position.offset)); | ||
178 | let descr = ctry!(source_binder::function_from_child_node( | ||
179 | db, | ||
180 | position.file_id, | ||
181 | name_ref.syntax(), | ||
182 | )?); | ||
183 | let scope = descr.scopes(db)?; | ||
184 | let resolved = ctry!(scope.resolve_local_name(name_ref)); | ||
185 | let resolved = resolved.ptr().resolve(source_file); | ||
186 | let binding = ctry!(find_node_at_offset::<ast::BindPat>( | ||
187 | syntax, | ||
188 | resolved.range().end() | ||
189 | )); | ||
190 | Ok(Some((binding, descr))) | ||
191 | } | ||
192 | } | ||
193 | |||
194 | pub(crate) fn diagnostics(&self, file_id: FileId) -> Cancelable<Vec<Diagnostic>> { | ||
195 | let syntax = self.source_file(file_id); | ||
196 | |||
197 | let mut res = ra_ide_api_light::diagnostics(&syntax) | ||
198 | .into_iter() | ||
199 | .map(|d| Diagnostic { | ||
200 | range: d.range, | ||
201 | message: d.msg, | ||
202 | severity: d.severity, | ||
203 | fix: d.fix.map(|fix| SourceChange::from_local_edit(file_id, fix)), | ||
204 | }) | ||
205 | .collect::<Vec<_>>(); | ||
206 | if let Some(m) = source_binder::module_from_file_id(self, file_id)? { | ||
207 | for (name_node, problem) in m.problems(self)? { | ||
208 | let source_root = self.file_source_root(file_id); | ||
209 | let diag = match problem { | ||
210 | Problem::UnresolvedModule { candidate } => { | ||
211 | let create_file = FileSystemEdit::CreateFile { | ||
212 | source_root, | ||
213 | path: candidate.clone(), | ||
214 | }; | ||
215 | let fix = SourceChange { | ||
216 | label: "create module".to_string(), | ||
217 | source_file_edits: Vec::new(), | ||
218 | file_system_edits: vec![create_file], | ||
219 | cursor_position: None, | ||
220 | }; | ||
221 | Diagnostic { | ||
222 | range: name_node.range(), | ||
223 | message: "unresolved module".to_string(), | ||
224 | severity: Severity::Error, | ||
225 | fix: Some(fix), | ||
226 | } | ||
227 | } | ||
228 | Problem::NotDirOwner { move_to, candidate } => { | ||
229 | let move_file = FileSystemEdit::MoveFile { | ||
230 | src: file_id, | ||
231 | dst_source_root: source_root, | ||
232 | dst_path: move_to.clone(), | ||
233 | }; | ||
234 | let create_file = FileSystemEdit::CreateFile { | ||
235 | source_root, | ||
236 | path: move_to.join(candidate), | ||
237 | }; | ||
238 | let fix = SourceChange { | ||
239 | label: "move file and create module".to_string(), | ||
240 | source_file_edits: Vec::new(), | ||
241 | file_system_edits: vec![move_file, create_file], | ||
242 | cursor_position: None, | ||
243 | }; | ||
244 | Diagnostic { | ||
245 | range: name_node.range(), | ||
246 | message: "can't declare module at this location".to_string(), | ||
247 | severity: Severity::Error, | ||
248 | fix: Some(fix), | ||
249 | } | ||
250 | } | ||
251 | }; | ||
252 | res.push(diag) | ||
253 | } | ||
254 | }; | ||
255 | Ok(res) | ||
256 | } | ||
257 | |||
258 | pub(crate) fn assists(&self, frange: FileRange) -> Vec<SourceChange> { | ||
259 | let file = self.source_file(frange.file_id); | ||
260 | assists::assists(&file, frange.range) | ||
261 | .into_iter() | ||
262 | .map(|local_edit| SourceChange::from_local_edit(frange.file_id, local_edit)) | ||
263 | .collect() | ||
264 | } | ||
265 | |||
266 | pub(crate) fn rename( | ||
267 | &self, | ||
268 | position: FilePosition, | ||
269 | new_name: &str, | ||
270 | ) -> Cancelable<Vec<SourceFileEdit>> { | ||
271 | let res = self | ||
272 | .find_all_refs(position)? | ||
273 | .iter() | ||
274 | .map(|(file_id, text_range)| SourceFileEdit { | ||
275 | file_id: *file_id, | ||
276 | edit: { | ||
277 | let mut builder = ra_text_edit::TextEditBuilder::default(); | ||
278 | builder.replace(*text_range, new_name.into()); | ||
279 | builder.finish() | ||
280 | }, | ||
281 | }) | ||
282 | .collect::<Vec<_>>(); | ||
283 | Ok(res) | ||
284 | } | ||
285 | pub(crate) fn index_resolve(&self, name_ref: &ast::NameRef) -> Cancelable<Vec<FileSymbol>> { | ||
286 | let name = name_ref.text(); | ||
287 | let mut query = Query::new(name.to_string()); | ||
288 | query.exact(); | ||
289 | query.limit(4); | ||
290 | crate::symbol_index::world_symbols(self, query) | ||
291 | } | ||
292 | } | ||
293 | |||
294 | impl SourceChange { | ||
295 | pub(crate) fn from_local_edit(file_id: FileId, edit: LocalEdit) -> SourceChange { | ||
296 | let file_edit = SourceFileEdit { | ||
297 | file_id, | ||
298 | edit: edit.edit, | ||
299 | }; | ||
300 | SourceChange { | ||
301 | label: edit.label, | ||
302 | source_file_edits: vec![file_edit], | ||
303 | file_system_edits: vec![], | ||
304 | cursor_position: edit | ||
305 | .cursor_position | ||
306 | .map(|offset| FilePosition { offset, file_id }), | ||
307 | } | ||
308 | } | ||
309 | } | ||
diff --git a/crates/ra_ide_api/src/lib.rs b/crates/ra_ide_api/src/lib.rs new file mode 100644 index 000000000..7e9ca2034 --- /dev/null +++ b/crates/ra_ide_api/src/lib.rs | |||
@@ -0,0 +1,515 @@ | |||
1 | //! ra_ide_api crate provides "ide-centric" APIs for the rust-analyzer. That is, | ||
2 | //! it generally operates with files and text ranges, and returns results as | ||
3 | //! Strings, suitable for displaying to the human. | ||
4 | //! | ||
5 | //! What powers this API are the `RootDatabase` struct, which defines a `salsa` | ||
6 | //! database, and the `ra_hir` crate, where majority of the analysis happens. | ||
7 | //! However, IDE specific bits of the analysis (most notably completion) happen | ||
8 | //! in this crate. | ||
9 | //! | ||
10 | //! The sibling `ra_ide_api_light` handles thouse bits of IDE functionality | ||
11 | //! which are restricted to a single file and need only syntax. | ||
12 | macro_rules! ctry { | ||
13 | ($expr:expr) => { | ||
14 | match $expr { | ||
15 | None => return Ok(None), | ||
16 | Some(it) => it, | ||
17 | } | ||
18 | }; | ||
19 | } | ||
20 | |||
21 | mod completion; | ||
22 | mod db; | ||
23 | mod goto_defenition; | ||
24 | mod imp; | ||
25 | pub mod mock_analysis; | ||
26 | mod runnables; | ||
27 | mod symbol_index; | ||
28 | |||
29 | mod extend_selection; | ||
30 | mod hover; | ||
31 | mod call_info; | ||
32 | mod syntax_highlighting; | ||
33 | |||
34 | use std::{fmt, sync::Arc}; | ||
35 | |||
36 | use ra_syntax::{SmolStr, SourceFile, TreePtr, SyntaxKind, TextRange, TextUnit}; | ||
37 | use ra_text_edit::TextEdit; | ||
38 | use ra_db::{SyntaxDatabase, FilesDatabase, LocalSyntaxPtr}; | ||
39 | use rayon::prelude::*; | ||
40 | use relative_path::RelativePathBuf; | ||
41 | use rustc_hash::FxHashMap; | ||
42 | use salsa::ParallelDatabase; | ||
43 | |||
44 | use crate::{ | ||
45 | symbol_index::{FileSymbol, SymbolIndex}, | ||
46 | db::LineIndexDatabase, | ||
47 | }; | ||
48 | |||
49 | pub use crate::{ | ||
50 | completion::{CompletionItem, CompletionItemKind, InsertText}, | ||
51 | runnables::{Runnable, RunnableKind}, | ||
52 | }; | ||
53 | pub use ra_ide_api_light::{ | ||
54 | Fold, FoldKind, HighlightedRange, Severity, StructureNode, | ||
55 | LineIndex, LineCol, translate_offset_with_edit, | ||
56 | }; | ||
57 | pub use ra_db::{ | ||
58 | Cancelable, Canceled, CrateGraph, CrateId, FileId, FilePosition, FileRange, SourceRootId | ||
59 | }; | ||
60 | |||
61 | #[derive(Default)] | ||
62 | pub struct AnalysisChange { | ||
63 | new_roots: Vec<(SourceRootId, bool)>, | ||
64 | roots_changed: FxHashMap<SourceRootId, RootChange>, | ||
65 | files_changed: Vec<(FileId, Arc<String>)>, | ||
66 | libraries_added: Vec<LibraryData>, | ||
67 | crate_graph: Option<CrateGraph>, | ||
68 | } | ||
69 | |||
70 | #[derive(Default)] | ||
71 | struct RootChange { | ||
72 | added: Vec<AddFile>, | ||
73 | removed: Vec<RemoveFile>, | ||
74 | } | ||
75 | |||
76 | #[derive(Debug)] | ||
77 | struct AddFile { | ||
78 | file_id: FileId, | ||
79 | path: RelativePathBuf, | ||
80 | text: Arc<String>, | ||
81 | } | ||
82 | |||
83 | #[derive(Debug)] | ||
84 | struct RemoveFile { | ||
85 | file_id: FileId, | ||
86 | path: RelativePathBuf, | ||
87 | } | ||
88 | |||
89 | impl fmt::Debug for AnalysisChange { | ||
90 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | ||
91 | let mut d = fmt.debug_struct("AnalysisChange"); | ||
92 | if !self.new_roots.is_empty() { | ||
93 | d.field("new_roots", &self.new_roots); | ||
94 | } | ||
95 | if !self.roots_changed.is_empty() { | ||
96 | d.field("roots_changed", &self.roots_changed); | ||
97 | } | ||
98 | if !self.files_changed.is_empty() { | ||
99 | d.field("files_changed", &self.files_changed.len()); | ||
100 | } | ||
101 | if !self.libraries_added.is_empty() { | ||
102 | d.field("libraries_added", &self.libraries_added.len()); | ||
103 | } | ||
104 | if !self.crate_graph.is_some() { | ||
105 | d.field("crate_graph", &self.crate_graph); | ||
106 | } | ||
107 | d.finish() | ||
108 | } | ||
109 | } | ||
110 | |||
111 | impl fmt::Debug for RootChange { | ||
112 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | ||
113 | fmt.debug_struct("AnalysisChange") | ||
114 | .field("added", &self.added.len()) | ||
115 | .field("removed", &self.removed.len()) | ||
116 | .finish() | ||
117 | } | ||
118 | } | ||
119 | |||
120 | impl AnalysisChange { | ||
121 | pub fn new() -> AnalysisChange { | ||
122 | AnalysisChange::default() | ||
123 | } | ||
124 | pub fn add_root(&mut self, root_id: SourceRootId, is_local: bool) { | ||
125 | self.new_roots.push((root_id, is_local)); | ||
126 | } | ||
127 | pub fn add_file( | ||
128 | &mut self, | ||
129 | root_id: SourceRootId, | ||
130 | file_id: FileId, | ||
131 | path: RelativePathBuf, | ||
132 | text: Arc<String>, | ||
133 | ) { | ||
134 | let file = AddFile { | ||
135 | file_id, | ||
136 | path, | ||
137 | text, | ||
138 | }; | ||
139 | self.roots_changed | ||
140 | .entry(root_id) | ||
141 | .or_default() | ||
142 | .added | ||
143 | .push(file); | ||
144 | } | ||
145 | pub fn change_file(&mut self, file_id: FileId, new_text: Arc<String>) { | ||
146 | self.files_changed.push((file_id, new_text)) | ||
147 | } | ||
148 | pub fn remove_file(&mut self, root_id: SourceRootId, file_id: FileId, path: RelativePathBuf) { | ||
149 | let file = RemoveFile { file_id, path }; | ||
150 | self.roots_changed | ||
151 | .entry(root_id) | ||
152 | .or_default() | ||
153 | .removed | ||
154 | .push(file); | ||
155 | } | ||
156 | pub fn add_library(&mut self, data: LibraryData) { | ||
157 | self.libraries_added.push(data) | ||
158 | } | ||
159 | pub fn set_crate_graph(&mut self, graph: CrateGraph) { | ||
160 | self.crate_graph = Some(graph); | ||
161 | } | ||
162 | } | ||
163 | |||
164 | #[derive(Debug)] | ||
165 | pub struct SourceChange { | ||
166 | pub label: String, | ||
167 | pub source_file_edits: Vec<SourceFileEdit>, | ||
168 | pub file_system_edits: Vec<FileSystemEdit>, | ||
169 | pub cursor_position: Option<FilePosition>, | ||
170 | } | ||
171 | |||
172 | #[derive(Debug)] | ||
173 | pub struct SourceFileEdit { | ||
174 | pub file_id: FileId, | ||
175 | pub edit: TextEdit, | ||
176 | } | ||
177 | |||
178 | #[derive(Debug)] | ||
179 | pub enum FileSystemEdit { | ||
180 | CreateFile { | ||
181 | source_root: SourceRootId, | ||
182 | path: RelativePathBuf, | ||
183 | }, | ||
184 | MoveFile { | ||
185 | src: FileId, | ||
186 | dst_source_root: SourceRootId, | ||
187 | dst_path: RelativePathBuf, | ||
188 | }, | ||
189 | } | ||
190 | |||
191 | #[derive(Debug)] | ||
192 | pub struct Diagnostic { | ||
193 | pub message: String, | ||
194 | pub range: TextRange, | ||
195 | pub fix: Option<SourceChange>, | ||
196 | pub severity: Severity, | ||
197 | } | ||
198 | |||
199 | #[derive(Debug)] | ||
200 | pub struct Query { | ||
201 | query: String, | ||
202 | lowercased: String, | ||
203 | only_types: bool, | ||
204 | libs: bool, | ||
205 | exact: bool, | ||
206 | limit: usize, | ||
207 | } | ||
208 | |||
209 | impl Query { | ||
210 | pub fn new(query: String) -> Query { | ||
211 | let lowercased = query.to_lowercase(); | ||
212 | Query { | ||
213 | query, | ||
214 | lowercased, | ||
215 | only_types: false, | ||
216 | libs: false, | ||
217 | exact: false, | ||
218 | limit: usize::max_value(), | ||
219 | } | ||
220 | } | ||
221 | pub fn only_types(&mut self) { | ||
222 | self.only_types = true; | ||
223 | } | ||
224 | pub fn libs(&mut self) { | ||
225 | self.libs = true; | ||
226 | } | ||
227 | pub fn exact(&mut self) { | ||
228 | self.exact = true; | ||
229 | } | ||
230 | pub fn limit(&mut self, limit: usize) { | ||
231 | self.limit = limit | ||
232 | } | ||
233 | } | ||
234 | |||
235 | /// `NavigationTarget` represents and element in the editor's UI whihc you can | ||
236 | /// click on to navigate to a particular piece of code. | ||
237 | /// | ||
238 | /// Typically, a `NavigationTarget` corresponds to some element in the source | ||
239 | /// code, like a function or a struct, but this is not strictly required. | ||
240 | #[derive(Debug, Clone)] | ||
241 | pub struct NavigationTarget { | ||
242 | file_id: FileId, | ||
243 | name: SmolStr, | ||
244 | kind: SyntaxKind, | ||
245 | range: TextRange, | ||
246 | // Should be DefId ideally | ||
247 | ptr: Option<LocalSyntaxPtr>, | ||
248 | } | ||
249 | |||
250 | impl NavigationTarget { | ||
251 | fn from_symbol(symbol: FileSymbol) -> NavigationTarget { | ||
252 | NavigationTarget { | ||
253 | file_id: symbol.file_id, | ||
254 | name: symbol.name.clone(), | ||
255 | kind: symbol.ptr.kind(), | ||
256 | range: symbol.ptr.range(), | ||
257 | ptr: Some(symbol.ptr.clone()), | ||
258 | } | ||
259 | } | ||
260 | pub fn name(&self) -> &SmolStr { | ||
261 | &self.name | ||
262 | } | ||
263 | pub fn kind(&self) -> SyntaxKind { | ||
264 | self.kind | ||
265 | } | ||
266 | pub fn file_id(&self) -> FileId { | ||
267 | self.file_id | ||
268 | } | ||
269 | pub fn range(&self) -> TextRange { | ||
270 | self.range | ||
271 | } | ||
272 | } | ||
273 | |||
274 | #[derive(Debug)] | ||
275 | pub struct RangeInfo<T> { | ||
276 | pub range: TextRange, | ||
277 | pub info: T, | ||
278 | } | ||
279 | |||
280 | impl<T> RangeInfo<T> { | ||
281 | fn new(range: TextRange, info: T) -> RangeInfo<T> { | ||
282 | RangeInfo { range, info } | ||
283 | } | ||
284 | } | ||
285 | |||
286 | #[derive(Debug)] | ||
287 | pub struct CallInfo { | ||
288 | pub label: String, | ||
289 | pub doc: Option<String>, | ||
290 | pub parameters: Vec<String>, | ||
291 | pub active_parameter: Option<usize>, | ||
292 | } | ||
293 | |||
294 | /// `AnalysisHost` stores the current state of the world. | ||
295 | #[derive(Debug, Default)] | ||
296 | pub struct AnalysisHost { | ||
297 | db: db::RootDatabase, | ||
298 | } | ||
299 | |||
300 | impl AnalysisHost { | ||
301 | /// Returns a snapshot of the current state, which you can query for | ||
302 | /// semantic information. | ||
303 | pub fn analysis(&self) -> Analysis { | ||
304 | Analysis { | ||
305 | db: self.db.snapshot(), | ||
306 | } | ||
307 | } | ||
308 | /// Applies changes to the current state of the world. If there are | ||
309 | /// outstanding snapshots, they will be canceled. | ||
310 | pub fn apply_change(&mut self, change: AnalysisChange) { | ||
311 | self.db.apply_change(change) | ||
312 | } | ||
313 | } | ||
314 | |||
315 | /// Analysis is a snapshot of a world state at a moment in time. It is the main | ||
316 | /// entry point for asking semantic information about the world. When the world | ||
317 | /// state is advanced using `AnalysisHost::apply_change` method, all existing | ||
318 | /// `Analysis` are canceled (most method return `Err(Canceled)`). | ||
319 | #[derive(Debug)] | ||
320 | pub struct Analysis { | ||
321 | db: salsa::Snapshot<db::RootDatabase>, | ||
322 | } | ||
323 | |||
324 | impl Analysis { | ||
325 | /// Gets the text of the source file. | ||
326 | pub fn file_text(&self, file_id: FileId) -> Arc<String> { | ||
327 | self.db.file_text(file_id) | ||
328 | } | ||
329 | /// Gets the syntax tree of the file. | ||
330 | pub fn file_syntax(&self, file_id: FileId) -> TreePtr<SourceFile> { | ||
331 | self.db.source_file(file_id).clone() | ||
332 | } | ||
333 | /// Gets the file's `LineIndex`: data structure to convert between absolute | ||
334 | /// offsets and line/column representation. | ||
335 | pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> { | ||
336 | self.db.line_index(file_id) | ||
337 | } | ||
338 | /// Selects the next syntactic nodes encopasing the range. | ||
339 | pub fn extend_selection(&self, frange: FileRange) -> TextRange { | ||
340 | extend_selection::extend_selection(&self.db, frange) | ||
341 | } | ||
342 | /// Returns position of the mathcing brace (all types of braces are | ||
343 | /// supported). | ||
344 | pub fn matching_brace(&self, file: &SourceFile, offset: TextUnit) -> Option<TextUnit> { | ||
345 | ra_ide_api_light::matching_brace(file, offset) | ||
346 | } | ||
347 | /// Returns a syntax tree represented as `String`, for debug purposes. | ||
348 | // FIXME: use a better name here. | ||
349 | pub fn syntax_tree(&self, file_id: FileId) -> String { | ||
350 | let file = self.db.source_file(file_id); | ||
351 | ra_ide_api_light::syntax_tree(&file) | ||
352 | } | ||
353 | /// Returns an edit to remove all newlines in the range, cleaning up minor | ||
354 | /// stuff like trailing commas. | ||
355 | pub fn join_lines(&self, frange: FileRange) -> SourceChange { | ||
356 | let file = self.db.source_file(frange.file_id); | ||
357 | SourceChange::from_local_edit( | ||
358 | frange.file_id, | ||
359 | ra_ide_api_light::join_lines(&file, frange.range), | ||
360 | ) | ||
361 | } | ||
362 | /// Returns an edit which should be applied when opening a new line, fixing | ||
363 | /// up minor stuff like continuing the comment. | ||
364 | pub fn on_enter(&self, position: FilePosition) -> Option<SourceChange> { | ||
365 | let file = self.db.source_file(position.file_id); | ||
366 | let edit = ra_ide_api_light::on_enter(&file, position.offset)?; | ||
367 | Some(SourceChange::from_local_edit(position.file_id, edit)) | ||
368 | } | ||
369 | /// Returns an edit which should be applied after `=` was typed. Primarily, | ||
370 | /// this works when adding `let =`. | ||
371 | // FIXME: use a snippet completion instead of this hack here. | ||
372 | pub fn on_eq_typed(&self, position: FilePosition) -> Option<SourceChange> { | ||
373 | let file = self.db.source_file(position.file_id); | ||
374 | let edit = ra_ide_api_light::on_eq_typed(&file, position.offset)?; | ||
375 | Some(SourceChange::from_local_edit(position.file_id, edit)) | ||
376 | } | ||
377 | /// Returns an edit which should be applied when a dot ('.') is typed on a blank line, indenting the line appropriately. | ||
378 | pub fn on_dot_typed(&self, position: FilePosition) -> Option<SourceChange> { | ||
379 | let file = self.db.source_file(position.file_id); | ||
380 | let edit = ra_ide_api_light::on_dot_typed(&file, position.offset)?; | ||
381 | Some(SourceChange::from_local_edit(position.file_id, edit)) | ||
382 | } | ||
383 | /// Returns a tree representation of symbols in the file. Useful to draw a | ||
384 | /// file outline. | ||
385 | pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> { | ||
386 | let file = self.db.source_file(file_id); | ||
387 | ra_ide_api_light::file_structure(&file) | ||
388 | } | ||
389 | /// Returns the set of folding ranges. | ||
390 | pub fn folding_ranges(&self, file_id: FileId) -> Vec<Fold> { | ||
391 | let file = self.db.source_file(file_id); | ||
392 | ra_ide_api_light::folding_ranges(&file) | ||
393 | } | ||
394 | /// Fuzzy searches for a symbol. | ||
395 | pub fn symbol_search(&self, query: Query) -> Cancelable<Vec<NavigationTarget>> { | ||
396 | let res = symbol_index::world_symbols(&*self.db, query)? | ||
397 | .into_iter() | ||
398 | .map(NavigationTarget::from_symbol) | ||
399 | .collect(); | ||
400 | Ok(res) | ||
401 | } | ||
402 | pub fn goto_defenition( | ||
403 | &self, | ||
404 | position: FilePosition, | ||
405 | ) -> Cancelable<Option<Vec<NavigationTarget>>> { | ||
406 | goto_defenition::goto_defenition(&*self.db, position) | ||
407 | } | ||
408 | /// Finds all usages of the reference at point. | ||
409 | pub fn find_all_refs(&self, position: FilePosition) -> Cancelable<Vec<(FileId, TextRange)>> { | ||
410 | self.db.find_all_refs(position) | ||
411 | } | ||
412 | /// Returns a short text descrbing element at position. | ||
413 | pub fn hover(&self, position: FilePosition) -> Cancelable<Option<RangeInfo<String>>> { | ||
414 | hover::hover(&*self.db, position) | ||
415 | } | ||
416 | /// Computes parameter information for the given call expression. | ||
417 | pub fn call_info(&self, position: FilePosition) -> Cancelable<Option<CallInfo>> { | ||
418 | call_info::call_info(&*self.db, position) | ||
419 | } | ||
420 | /// Returns a `mod name;` declaration which created the current module. | ||
421 | pub fn parent_module(&self, position: FilePosition) -> Cancelable<Vec<NavigationTarget>> { | ||
422 | self.db.parent_module(position) | ||
423 | } | ||
424 | /// Returns crates this file belongs too. | ||
425 | pub fn crate_for(&self, file_id: FileId) -> Cancelable<Vec<CrateId>> { | ||
426 | self.db.crate_for(file_id) | ||
427 | } | ||
428 | /// Returns the root file of the given crate. | ||
429 | pub fn crate_root(&self, crate_id: CrateId) -> Cancelable<FileId> { | ||
430 | Ok(self.db.crate_graph().crate_root(crate_id)) | ||
431 | } | ||
432 | /// Returns the set of possible targets to run for the current file. | ||
433 | pub fn runnables(&self, file_id: FileId) -> Cancelable<Vec<Runnable>> { | ||
434 | runnables::runnables(&*self.db, file_id) | ||
435 | } | ||
436 | /// Computes syntax highlighting for the given file. | ||
437 | pub fn highlight(&self, file_id: FileId) -> Cancelable<Vec<HighlightedRange>> { | ||
438 | syntax_highlighting::highlight(&*self.db, file_id) | ||
439 | } | ||
440 | /// Computes completions at the given position. | ||
441 | pub fn completions(&self, position: FilePosition) -> Cancelable<Option<Vec<CompletionItem>>> { | ||
442 | let completions = completion::completions(&self.db, position)?; | ||
443 | Ok(completions.map(|it| it.into())) | ||
444 | } | ||
445 | /// Computes assists (aks code actons aka intentions) for the given | ||
446 | /// position. | ||
447 | pub fn assists(&self, frange: FileRange) -> Cancelable<Vec<SourceChange>> { | ||
448 | Ok(self.db.assists(frange)) | ||
449 | } | ||
450 | /// Computes the set of diagnostics for the given file. | ||
451 | pub fn diagnostics(&self, file_id: FileId) -> Cancelable<Vec<Diagnostic>> { | ||
452 | self.db.diagnostics(file_id) | ||
453 | } | ||
454 | /// Computes the type of the expression at the given position. | ||
455 | pub fn type_of(&self, frange: FileRange) -> Cancelable<Option<String>> { | ||
456 | hover::type_of(&*self.db, frange) | ||
457 | } | ||
458 | /// Returns the edit required to rename reference at the position to the new | ||
459 | /// name. | ||
460 | pub fn rename( | ||
461 | &self, | ||
462 | position: FilePosition, | ||
463 | new_name: &str, | ||
464 | ) -> Cancelable<Vec<SourceFileEdit>> { | ||
465 | self.db.rename(position, new_name) | ||
466 | } | ||
467 | } | ||
468 | |||
469 | pub struct LibraryData { | ||
470 | root_id: SourceRootId, | ||
471 | root_change: RootChange, | ||
472 | symbol_index: SymbolIndex, | ||
473 | } | ||
474 | |||
475 | impl fmt::Debug for LibraryData { | ||
476 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
477 | f.debug_struct("LibraryData") | ||
478 | .field("root_id", &self.root_id) | ||
479 | .field("root_change", &self.root_change) | ||
480 | .field("n_symbols", &self.symbol_index.len()) | ||
481 | .finish() | ||
482 | } | ||
483 | } | ||
484 | |||
485 | impl LibraryData { | ||
486 | pub fn prepare( | ||
487 | root_id: SourceRootId, | ||
488 | files: Vec<(FileId, RelativePathBuf, Arc<String>)>, | ||
489 | ) -> LibraryData { | ||
490 | let symbol_index = SymbolIndex::for_files(files.par_iter().map(|(file_id, _, text)| { | ||
491 | let file = SourceFile::parse(text); | ||
492 | (*file_id, file) | ||
493 | })); | ||
494 | let mut root_change = RootChange::default(); | ||
495 | root_change.added = files | ||
496 | .into_iter() | ||
497 | .map(|(file_id, path, text)| AddFile { | ||
498 | file_id, | ||
499 | path, | ||
500 | text, | ||
501 | }) | ||
502 | .collect(); | ||
503 | LibraryData { | ||
504 | root_id, | ||
505 | root_change, | ||
506 | symbol_index, | ||
507 | } | ||
508 | } | ||
509 | } | ||
510 | |||
511 | #[test] | ||
512 | fn analysis_is_send() { | ||
513 | fn is_send<T: Send>() {} | ||
514 | is_send::<Analysis>(); | ||
515 | } | ||
diff --git a/crates/ra_ide_api/src/mock_analysis.rs b/crates/ra_ide_api/src/mock_analysis.rs new file mode 100644 index 000000000..846c76cfe --- /dev/null +++ b/crates/ra_ide_api/src/mock_analysis.rs | |||
@@ -0,0 +1,135 @@ | |||
1 | use std::sync::Arc; | ||
2 | |||
3 | use relative_path::RelativePathBuf; | ||
4 | use test_utils::{extract_offset, extract_range, parse_fixture, CURSOR_MARKER}; | ||
5 | use ra_db::mock::FileMap; | ||
6 | |||
7 | use crate::{Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId, FilePosition, FileRange, SourceRootId}; | ||
8 | |||
9 | /// Mock analysis is used in test to bootstrap an AnalysisHost/Analysis | ||
10 | /// from a set of in-memory files. | ||
11 | #[derive(Debug, Default)] | ||
12 | pub struct MockAnalysis { | ||
13 | files: Vec<(String, String)>, | ||
14 | } | ||
15 | |||
16 | impl MockAnalysis { | ||
17 | pub fn new() -> MockAnalysis { | ||
18 | MockAnalysis::default() | ||
19 | } | ||
20 | /// Creates `MockAnalysis` using a fixture data in the following format: | ||
21 | /// | ||
22 | /// ```notrust | ||
23 | /// //- /main.rs | ||
24 | /// mod foo; | ||
25 | /// fn main() {} | ||
26 | /// | ||
27 | /// //- /foo.rs | ||
28 | /// struct Baz; | ||
29 | /// ``` | ||
30 | pub fn with_files(fixture: &str) -> MockAnalysis { | ||
31 | let mut res = MockAnalysis::new(); | ||
32 | for entry in parse_fixture(fixture) { | ||
33 | res.add_file(&entry.meta, &entry.text); | ||
34 | } | ||
35 | res | ||
36 | } | ||
37 | |||
38 | /// Same as `with_files`, but requires that a single file contains a `<|>` marker, | ||
39 | /// whose position is also returned. | ||
40 | pub fn with_files_and_position(fixture: &str) -> (MockAnalysis, FilePosition) { | ||
41 | let mut position = None; | ||
42 | let mut res = MockAnalysis::new(); | ||
43 | for entry in parse_fixture(fixture) { | ||
44 | if entry.text.contains(CURSOR_MARKER) { | ||
45 | assert!( | ||
46 | position.is_none(), | ||
47 | "only one marker (<|>) per fixture is allowed" | ||
48 | ); | ||
49 | position = Some(res.add_file_with_position(&entry.meta, &entry.text)); | ||
50 | } else { | ||
51 | res.add_file(&entry.meta, &entry.text); | ||
52 | } | ||
53 | } | ||
54 | let position = position.expect("expected a marker (<|>)"); | ||
55 | (res, position) | ||
56 | } | ||
57 | |||
58 | pub fn add_file(&mut self, path: &str, text: &str) -> FileId { | ||
59 | let file_id = FileId((self.files.len() + 1) as u32); | ||
60 | self.files.push((path.to_string(), text.to_string())); | ||
61 | file_id | ||
62 | } | ||
63 | pub fn add_file_with_position(&mut self, path: &str, text: &str) -> FilePosition { | ||
64 | let (offset, text) = extract_offset(text); | ||
65 | let file_id = FileId((self.files.len() + 1) as u32); | ||
66 | self.files.push((path.to_string(), text.to_string())); | ||
67 | FilePosition { file_id, offset } | ||
68 | } | ||
69 | pub fn add_file_with_range(&mut self, path: &str, text: &str) -> FileRange { | ||
70 | let (range, text) = extract_range(text); | ||
71 | let file_id = FileId((self.files.len() + 1) as u32); | ||
72 | self.files.push((path.to_string(), text.to_string())); | ||
73 | FileRange { file_id, range } | ||
74 | } | ||
75 | pub fn id_of(&self, path: &str) -> FileId { | ||
76 | let (idx, _) = self | ||
77 | .files | ||
78 | .iter() | ||
79 | .enumerate() | ||
80 | .find(|(_, (p, _text))| path == p) | ||
81 | .expect("no file in this mock"); | ||
82 | FileId(idx as u32 + 1) | ||
83 | } | ||
84 | pub fn analysis_host(self) -> AnalysisHost { | ||
85 | let mut host = AnalysisHost::default(); | ||
86 | let mut file_map = FileMap::default(); | ||
87 | let source_root = SourceRootId(0); | ||
88 | let mut change = AnalysisChange::new(); | ||
89 | change.add_root(source_root, true); | ||
90 | let mut crate_graph = CrateGraph::default(); | ||
91 | for (path, contents) in self.files.into_iter() { | ||
92 | assert!(path.starts_with('/')); | ||
93 | let path = RelativePathBuf::from_path(&path[1..]).unwrap(); | ||
94 | let file_id = file_map.add(path.clone()); | ||
95 | if path == "/lib.rs" || path == "/main.rs" { | ||
96 | crate_graph.add_crate_root(file_id); | ||
97 | } | ||
98 | change.add_file(source_root, file_id, path, Arc::new(contents)); | ||
99 | } | ||
100 | change.set_crate_graph(crate_graph); | ||
101 | // change.set_file_resolver(Arc::new(file_map)); | ||
102 | host.apply_change(change); | ||
103 | host | ||
104 | } | ||
105 | pub fn analysis(self) -> Analysis { | ||
106 | self.analysis_host().analysis() | ||
107 | } | ||
108 | } | ||
109 | |||
110 | /// Creates analysis from a multi-file fixture, returns positions marked with <|>. | ||
111 | pub fn analysis_and_position(fixture: &str) -> (Analysis, FilePosition) { | ||
112 | let (mock, position) = MockAnalysis::with_files_and_position(fixture); | ||
113 | (mock.analysis(), position) | ||
114 | } | ||
115 | |||
116 | /// Creates analysis for a single file. | ||
117 | pub fn single_file(code: &str) -> (Analysis, FileId) { | ||
118 | let mut mock = MockAnalysis::new(); | ||
119 | let file_id = mock.add_file("/main.rs", code); | ||
120 | (mock.analysis(), file_id) | ||
121 | } | ||
122 | |||
123 | /// Creates analysis for a single file, returns position marked with <|>. | ||
124 | pub fn single_file_with_position(code: &str) -> (Analysis, FilePosition) { | ||
125 | let mut mock = MockAnalysis::new(); | ||
126 | let pos = mock.add_file_with_position("/main.rs", code); | ||
127 | (mock.analysis(), pos) | ||
128 | } | ||
129 | |||
130 | /// Creates analysis for a single file, returns range marked with a pair of <|>. | ||
131 | pub fn single_file_with_range(code: &str) -> (Analysis, FileRange) { | ||
132 | let mut mock = MockAnalysis::new(); | ||
133 | let pos = mock.add_file_with_range("/main.rs", code); | ||
134 | (mock.analysis(), pos) | ||
135 | } | ||
diff --git a/crates/ra_ide_api/src/runnables.rs b/crates/ra_ide_api/src/runnables.rs new file mode 100644 index 000000000..98b1d2d55 --- /dev/null +++ b/crates/ra_ide_api/src/runnables.rs | |||
@@ -0,0 +1,89 @@ | |||
1 | use itertools::Itertools; | ||
2 | use ra_syntax::{ | ||
3 | TextRange, SyntaxNode, | ||
4 | ast::{self, AstNode, NameOwner, ModuleItemOwner}, | ||
5 | }; | ||
6 | use ra_db::{Cancelable, SyntaxDatabase}; | ||
7 | |||
8 | use crate::{db::RootDatabase, FileId}; | ||
9 | |||
10 | #[derive(Debug)] | ||
11 | pub struct Runnable { | ||
12 | pub range: TextRange, | ||
13 | pub kind: RunnableKind, | ||
14 | } | ||
15 | |||
16 | #[derive(Debug)] | ||
17 | pub enum RunnableKind { | ||
18 | Test { name: String }, | ||
19 | TestMod { path: String }, | ||
20 | Bin, | ||
21 | } | ||
22 | |||
23 | pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Cancelable<Vec<Runnable>> { | ||
24 | let source_file = db.source_file(file_id); | ||
25 | let res = source_file | ||
26 | .syntax() | ||
27 | .descendants() | ||
28 | .filter_map(|i| runnable(db, file_id, i)) | ||
29 | .collect(); | ||
30 | Ok(res) | ||
31 | } | ||
32 | |||
33 | fn runnable(db: &RootDatabase, file_id: FileId, item: &SyntaxNode) -> Option<Runnable> { | ||
34 | if let Some(fn_def) = ast::FnDef::cast(item) { | ||
35 | runnable_fn(fn_def) | ||
36 | } else if let Some(m) = ast::Module::cast(item) { | ||
37 | runnable_mod(db, file_id, m) | ||
38 | } else { | ||
39 | None | ||
40 | } | ||
41 | } | ||
42 | |||
43 | fn runnable_fn(fn_def: &ast::FnDef) -> Option<Runnable> { | ||
44 | let name = fn_def.name()?.text(); | ||
45 | let kind = if name == "main" { | ||
46 | RunnableKind::Bin | ||
47 | } else if fn_def.has_atom_attr("test") { | ||
48 | RunnableKind::Test { | ||
49 | name: name.to_string(), | ||
50 | } | ||
51 | } else { | ||
52 | return None; | ||
53 | }; | ||
54 | Some(Runnable { | ||
55 | range: fn_def.syntax().range(), | ||
56 | kind, | ||
57 | }) | ||
58 | } | ||
59 | |||
60 | fn runnable_mod(db: &RootDatabase, file_id: FileId, module: &ast::Module) -> Option<Runnable> { | ||
61 | let has_test_function = module | ||
62 | .item_list()? | ||
63 | .items() | ||
64 | .filter_map(|it| match it.kind() { | ||
65 | ast::ModuleItemKind::FnDef(it) => Some(it), | ||
66 | _ => None, | ||
67 | }) | ||
68 | .any(|f| f.has_atom_attr("test")); | ||
69 | if !has_test_function { | ||
70 | return None; | ||
71 | } | ||
72 | let range = module.syntax().range(); | ||
73 | let module = | ||
74 | hir::source_binder::module_from_child_node(db, file_id, module.syntax()).ok()??; | ||
75 | |||
76 | // FIXME: thread cancellation instead of `.ok`ing | ||
77 | let path = module | ||
78 | .path_to_root(db) | ||
79 | .ok()? | ||
80 | .into_iter() | ||
81 | .rev() | ||
82 | .filter_map(|it| it.name(db).ok()) | ||
83 | .filter_map(|it| it) | ||
84 | .join("::"); | ||
85 | Some(Runnable { | ||
86 | range, | ||
87 | kind: RunnableKind::TestMod { path }, | ||
88 | }) | ||
89 | } | ||
diff --git a/crates/ra_ide_api/src/symbol_index.rs b/crates/ra_ide_api/src/symbol_index.rs new file mode 100644 index 000000000..8dd15b40e --- /dev/null +++ b/crates/ra_ide_api/src/symbol_index.rs | |||
@@ -0,0 +1,222 @@ | |||
1 | //! This module handles fuzzy-searching of functions, structs and other symbols | ||
2 | //! by name across the whole workspace and dependencies. | ||
3 | //! | ||
4 | //! It works by building an incrementally-updated text-search index of all | ||
5 | //! symbols. The backbone of the index is the **awesome** `fst` crate by | ||
6 | //! @BurntSushi. | ||
7 | //! | ||
8 | //! In a nutshell, you give a set of strings to the `fst`, and it builds a | ||
9 | //! finite state machine describing this set of strtings. The strings which | ||
10 | //! could fuzzy-match a pattern can also be described by a finite state machine. | ||
11 | //! What is freakingly cool is that you can now traverse both state machines in | ||
12 | //! lock-step to enumerate the strings which are both in the input set and | ||
13 | //! fuzz-match the query. Or, more formally, given two langauges described by | ||
14 | //! fsts, one can build an product fst which describes the intersection of the | ||
15 | //! languages. | ||
16 | //! | ||
17 | //! `fst` does not support cheap updating of the index, but it supports unioning | ||
18 | //! of state machines. So, to account for changing source code, we build an fst | ||
19 | //! for each library (which is assumed to never change) and an fst for each rust | ||
20 | //! file in the current workspace, and run a query aginst the union of all | ||
21 | //! thouse fsts. | ||
22 | use std::{ | ||
23 | cmp::Ordering, | ||
24 | hash::{Hash, Hasher}, | ||
25 | sync::Arc, | ||
26 | }; | ||
27 | |||
28 | use fst::{self, Streamer}; | ||
29 | use ra_syntax::{ | ||
30 | SyntaxNode, SourceFile, SmolStr, TreePtr, AstNode, | ||
31 | algo::{visit::{visitor, Visitor}, find_covering_node}, | ||
32 | SyntaxKind::{self, *}, | ||
33 | ast::{self, NameOwner}, | ||
34 | }; | ||
35 | use ra_db::{SourceRootId, FilesDatabase, LocalSyntaxPtr}; | ||
36 | use salsa::ParallelDatabase; | ||
37 | use rayon::prelude::*; | ||
38 | |||
39 | use crate::{ | ||
40 | Cancelable, FileId, Query, | ||
41 | db::RootDatabase, | ||
42 | }; | ||
43 | |||
44 | salsa::query_group! { | ||
45 | pub(crate) trait SymbolsDatabase: hir::db::HirDatabase { | ||
46 | fn file_symbols(file_id: FileId) -> Cancelable<Arc<SymbolIndex>> { | ||
47 | type FileSymbolsQuery; | ||
48 | } | ||
49 | fn library_symbols(id: SourceRootId) -> Arc<SymbolIndex> { | ||
50 | type LibrarySymbolsQuery; | ||
51 | storage input; | ||
52 | } | ||
53 | } | ||
54 | } | ||
55 | |||
56 | fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Cancelable<Arc<SymbolIndex>> { | ||
57 | db.check_canceled()?; | ||
58 | let source_file = db.source_file(file_id); | ||
59 | let mut symbols = source_file | ||
60 | .syntax() | ||
61 | .descendants() | ||
62 | .filter_map(to_symbol) | ||
63 | .map(move |(name, ptr)| FileSymbol { name, ptr, file_id }) | ||
64 | .collect::<Vec<_>>(); | ||
65 | |||
66 | for (name, text_range) in hir::source_binder::macro_symbols(db, file_id)? { | ||
67 | let node = find_covering_node(source_file.syntax(), text_range); | ||
68 | let ptr = LocalSyntaxPtr::new(node); | ||
69 | symbols.push(FileSymbol { file_id, name, ptr }) | ||
70 | } | ||
71 | |||
72 | Ok(Arc::new(SymbolIndex::new(symbols))) | ||
73 | } | ||
74 | |||
75 | pub(crate) fn world_symbols(db: &RootDatabase, query: Query) -> Cancelable<Vec<FileSymbol>> { | ||
76 | /// Need to wrap Snapshot to provide `Clone` impl for `map_with` | ||
77 | struct Snap(salsa::Snapshot<RootDatabase>); | ||
78 | impl Clone for Snap { | ||
79 | fn clone(&self) -> Snap { | ||
80 | Snap(self.0.snapshot()) | ||
81 | } | ||
82 | } | ||
83 | |||
84 | let buf: Vec<Arc<SymbolIndex>> = if query.libs { | ||
85 | let snap = Snap(db.snapshot()); | ||
86 | db.library_roots() | ||
87 | .par_iter() | ||
88 | .map_with(snap, |db, &lib_id| db.0.library_symbols(lib_id)) | ||
89 | .collect() | ||
90 | } else { | ||
91 | let mut files = Vec::new(); | ||
92 | for &root in db.local_roots().iter() { | ||
93 | let sr = db.source_root(root); | ||
94 | files.extend(sr.files.values().map(|&it| it)) | ||
95 | } | ||
96 | |||
97 | let snap = Snap(db.snapshot()); | ||
98 | files | ||
99 | .par_iter() | ||
100 | .map_with(snap, |db, &file_id| db.0.file_symbols(file_id)) | ||
101 | .filter_map(|it| it.ok()) | ||
102 | .collect() | ||
103 | }; | ||
104 | Ok(query.search(&buf)) | ||
105 | } | ||
106 | |||
107 | #[derive(Default, Debug)] | ||
108 | pub(crate) struct SymbolIndex { | ||
109 | symbols: Vec<FileSymbol>, | ||
110 | map: fst::Map, | ||
111 | } | ||
112 | |||
113 | impl PartialEq for SymbolIndex { | ||
114 | fn eq(&self, other: &SymbolIndex) -> bool { | ||
115 | self.symbols == other.symbols | ||
116 | } | ||
117 | } | ||
118 | |||
119 | impl Eq for SymbolIndex {} | ||
120 | |||
121 | impl Hash for SymbolIndex { | ||
122 | fn hash<H: Hasher>(&self, hasher: &mut H) { | ||
123 | self.symbols.hash(hasher) | ||
124 | } | ||
125 | } | ||
126 | |||
127 | impl SymbolIndex { | ||
128 | fn new(mut symbols: Vec<FileSymbol>) -> SymbolIndex { | ||
129 | fn cmp(s1: &FileSymbol, s2: &FileSymbol) -> Ordering { | ||
130 | unicase::Ascii::new(s1.name.as_str()).cmp(&unicase::Ascii::new(s2.name.as_str())) | ||
131 | } | ||
132 | symbols.par_sort_by(cmp); | ||
133 | symbols.dedup_by(|s1, s2| cmp(s1, s2) == Ordering::Equal); | ||
134 | let names = symbols.iter().map(|it| it.name.as_str().to_lowercase()); | ||
135 | let map = fst::Map::from_iter(names.into_iter().zip(0u64..)).unwrap(); | ||
136 | SymbolIndex { symbols, map } | ||
137 | } | ||
138 | |||
139 | pub(crate) fn len(&self) -> usize { | ||
140 | self.symbols.len() | ||
141 | } | ||
142 | |||
143 | pub(crate) fn for_files( | ||
144 | files: impl ParallelIterator<Item = (FileId, TreePtr<SourceFile>)>, | ||
145 | ) -> SymbolIndex { | ||
146 | let symbols = files | ||
147 | .flat_map(|(file_id, file)| { | ||
148 | file.syntax() | ||
149 | .descendants() | ||
150 | .filter_map(to_symbol) | ||
151 | .map(move |(name, ptr)| FileSymbol { name, ptr, file_id }) | ||
152 | .collect::<Vec<_>>() | ||
153 | }) | ||
154 | .collect::<Vec<_>>(); | ||
155 | SymbolIndex::new(symbols) | ||
156 | } | ||
157 | } | ||
158 | |||
159 | impl Query { | ||
160 | pub(crate) fn search(self, indices: &[Arc<SymbolIndex>]) -> Vec<FileSymbol> { | ||
161 | let mut op = fst::map::OpBuilder::new(); | ||
162 | for file_symbols in indices.iter() { | ||
163 | let automaton = fst::automaton::Subsequence::new(&self.lowercased); | ||
164 | op = op.add(file_symbols.map.search(automaton)) | ||
165 | } | ||
166 | let mut stream = op.union(); | ||
167 | let mut res = Vec::new(); | ||
168 | while let Some((_, indexed_values)) = stream.next() { | ||
169 | if res.len() >= self.limit { | ||
170 | break; | ||
171 | } | ||
172 | for indexed_value in indexed_values { | ||
173 | let file_symbols = &indices[indexed_value.index]; | ||
174 | let idx = indexed_value.value as usize; | ||
175 | |||
176 | let symbol = &file_symbols.symbols[idx]; | ||
177 | if self.only_types && !is_type(symbol.ptr.kind()) { | ||
178 | continue; | ||
179 | } | ||
180 | if self.exact && symbol.name != self.query { | ||
181 | continue; | ||
182 | } | ||
183 | res.push(symbol.clone()); | ||
184 | } | ||
185 | } | ||
186 | res | ||
187 | } | ||
188 | } | ||
189 | |||
190 | fn is_type(kind: SyntaxKind) -> bool { | ||
191 | match kind { | ||
192 | STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_DEF => true, | ||
193 | _ => false, | ||
194 | } | ||
195 | } | ||
196 | |||
197 | /// The actual data that is stored in the index. It should be as compact as | ||
198 | /// possible. | ||
199 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
200 | pub(crate) struct FileSymbol { | ||
201 | pub(crate) file_id: FileId, | ||
202 | pub(crate) name: SmolStr, | ||
203 | pub(crate) ptr: LocalSyntaxPtr, | ||
204 | } | ||
205 | |||
206 | fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, LocalSyntaxPtr)> { | ||
207 | fn decl<N: NameOwner>(node: &N) -> Option<(SmolStr, LocalSyntaxPtr)> { | ||
208 | let name = node.name()?.text().clone(); | ||
209 | let ptr = LocalSyntaxPtr::new(node.syntax()); | ||
210 | Some((name, ptr)) | ||
211 | } | ||
212 | visitor() | ||
213 | .visit(decl::<ast::FnDef>) | ||
214 | .visit(decl::<ast::StructDef>) | ||
215 | .visit(decl::<ast::EnumDef>) | ||
216 | .visit(decl::<ast::TraitDef>) | ||
217 | .visit(decl::<ast::Module>) | ||
218 | .visit(decl::<ast::TypeDef>) | ||
219 | .visit(decl::<ast::ConstDef>) | ||
220 | .visit(decl::<ast::StaticDef>) | ||
221 | .accept(node)? | ||
222 | } | ||
diff --git a/crates/ra_ide_api/src/syntax_highlighting.rs b/crates/ra_ide_api/src/syntax_highlighting.rs new file mode 100644 index 000000000..cb19e9515 --- /dev/null +++ b/crates/ra_ide_api/src/syntax_highlighting.rs | |||
@@ -0,0 +1,92 @@ | |||
1 | use ra_syntax::{ast, AstNode,}; | ||
2 | use ra_db::SyntaxDatabase; | ||
3 | |||
4 | use crate::{ | ||
5 | FileId, Cancelable, HighlightedRange, | ||
6 | db::RootDatabase, | ||
7 | }; | ||
8 | |||
9 | pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Cancelable<Vec<HighlightedRange>> { | ||
10 | let source_file = db.source_file(file_id); | ||
11 | let mut res = ra_ide_api_light::highlight(source_file.syntax()); | ||
12 | for macro_call in source_file | ||
13 | .syntax() | ||
14 | .descendants() | ||
15 | .filter_map(ast::MacroCall::cast) | ||
16 | { | ||
17 | if let Some((off, exp)) = hir::MacroDef::ast_expand(macro_call) { | ||
18 | let mapped_ranges = ra_ide_api_light::highlight(&exp.syntax()) | ||
19 | .into_iter() | ||
20 | .filter_map(|r| { | ||
21 | let mapped_range = exp.map_range_back(r.range)?; | ||
22 | let res = HighlightedRange { | ||
23 | range: mapped_range + off, | ||
24 | tag: r.tag, | ||
25 | }; | ||
26 | Some(res) | ||
27 | }); | ||
28 | res.extend(mapped_ranges); | ||
29 | } | ||
30 | } | ||
31 | Ok(res) | ||
32 | } | ||
33 | |||
34 | #[cfg(test)] | ||
35 | mod tests { | ||
36 | use crate::mock_analysis::single_file; | ||
37 | use test_utils::assert_eq_dbg; | ||
38 | |||
39 | #[test] | ||
40 | fn highlights_code_inside_macros() { | ||
41 | let (analysis, file_id) = single_file( | ||
42 | " | ||
43 | fn main() { | ||
44 | ctry!({ let x = 92; x}); | ||
45 | vec![{ let x = 92; x}]; | ||
46 | } | ||
47 | ", | ||
48 | ); | ||
49 | let highlights = analysis.highlight(file_id).unwrap(); | ||
50 | assert_eq_dbg( | ||
51 | r#"[HighlightedRange { range: [13; 15), tag: "keyword" }, | ||
52 | HighlightedRange { range: [16; 20), tag: "function" }, | ||
53 | HighlightedRange { range: [41; 46), tag: "macro" }, | ||
54 | HighlightedRange { range: [49; 52), tag: "keyword" }, | ||
55 | HighlightedRange { range: [57; 59), tag: "literal" }, | ||
56 | HighlightedRange { range: [82; 86), tag: "macro" }, | ||
57 | HighlightedRange { range: [89; 92), tag: "keyword" }, | ||
58 | HighlightedRange { range: [97; 99), tag: "literal" }, | ||
59 | HighlightedRange { range: [49; 52), tag: "keyword" }, | ||
60 | HighlightedRange { range: [53; 54), tag: "function" }, | ||
61 | HighlightedRange { range: [57; 59), tag: "literal" }, | ||
62 | HighlightedRange { range: [61; 62), tag: "text" }, | ||
63 | HighlightedRange { range: [89; 92), tag: "keyword" }, | ||
64 | HighlightedRange { range: [93; 94), tag: "function" }, | ||
65 | HighlightedRange { range: [97; 99), tag: "literal" }, | ||
66 | HighlightedRange { range: [101; 102), tag: "text" }]"#, | ||
67 | &highlights, | ||
68 | ) | ||
69 | } | ||
70 | |||
71 | // FIXME: this test is not really necessary: artifact of the inital hacky | ||
72 | // macros implementation. | ||
73 | #[test] | ||
74 | fn highlight_query_group_macro() { | ||
75 | let (analysis, file_id) = single_file( | ||
76 | " | ||
77 | salsa::query_group! { | ||
78 | pub trait HirDatabase: SyntaxDatabase {} | ||
79 | } | ||
80 | ", | ||
81 | ); | ||
82 | let highlights = analysis.highlight(file_id).unwrap(); | ||
83 | assert_eq_dbg( | ||
84 | r#"[HighlightedRange { range: [20; 32), tag: "macro" }, | ||
85 | HighlightedRange { range: [13; 18), tag: "text" }, | ||
86 | HighlightedRange { range: [51; 54), tag: "keyword" }, | ||
87 | HighlightedRange { range: [55; 60), tag: "keyword" }, | ||
88 | HighlightedRange { range: [61; 72), tag: "function" }]"#, | ||
89 | &highlights, | ||
90 | ) | ||
91 | } | ||
92 | } | ||