diff options
author | bors[bot] <26634292+bors[bot]@users.noreply.github.com> | 2020-08-13 16:59:50 +0100 |
---|---|---|
committer | GitHub <[email protected]> | 2020-08-13 16:59:50 +0100 |
commit | 018a6cac072767dfd630c22e6d9ce134b7bb09af (patch) | |
tree | 4293492e643f9a604c5f30e051289bcea182694c /crates/ide/src/completion/completion_context.rs | |
parent | 00fb411f3edea72a1a9739f7df6f21cca045730b (diff) | |
parent | 6bc2633c90cedad057c5201d1ab7f67b57247004 (diff) |
Merge #5750
5750: Rename ra_ide -> ide
r=matklad a=matklad
bors r+
🤖
Co-authored-by: Aleksey Kladov <[email protected]>
Diffstat (limited to 'crates/ide/src/completion/completion_context.rs')
-rw-r--r-- | crates/ide/src/completion/completion_context.rs | 465 |
1 files changed, 465 insertions, 0 deletions
diff --git a/crates/ide/src/completion/completion_context.rs b/crates/ide/src/completion/completion_context.rs new file mode 100644 index 000000000..047ecd9d7 --- /dev/null +++ b/crates/ide/src/completion/completion_context.rs | |||
@@ -0,0 +1,465 @@ | |||
1 | //! FIXME: write short doc here | ||
2 | |||
3 | use base_db::SourceDatabase; | ||
4 | use hir::{Semantics, SemanticsScope, Type}; | ||
5 | use ide_db::RootDatabase; | ||
6 | use syntax::{ | ||
7 | algo::{find_covering_element, find_node_at_offset}, | ||
8 | ast, match_ast, AstNode, NodeOrToken, | ||
9 | SyntaxKind::*, | ||
10 | SyntaxNode, SyntaxToken, TextRange, TextSize, | ||
11 | }; | ||
12 | use text_edit::Indel; | ||
13 | |||
14 | use super::patterns::{ | ||
15 | has_bind_pat_parent, has_block_expr_parent, has_impl_as_prev_sibling, has_impl_parent, | ||
16 | has_item_list_or_source_file_parent, has_ref_parent, has_trait_as_prev_sibling, | ||
17 | has_trait_parent, if_is_prev, is_in_loop_body, is_match_arm, unsafe_is_prev, | ||
18 | }; | ||
19 | use crate::{call_info::ActiveParameter, completion::CompletionConfig, FilePosition}; | ||
20 | use test_utils::mark; | ||
21 | |||
22 | /// `CompletionContext` is created early during completion to figure out, where | ||
23 | /// exactly is the cursor, syntax-wise. | ||
24 | #[derive(Debug)] | ||
25 | pub(crate) struct CompletionContext<'a> { | ||
26 | pub(super) sema: Semantics<'a, RootDatabase>, | ||
27 | pub(super) scope: SemanticsScope<'a>, | ||
28 | pub(super) db: &'a RootDatabase, | ||
29 | pub(super) config: &'a CompletionConfig, | ||
30 | pub(super) position: FilePosition, | ||
31 | /// The token before the cursor, in the original file. | ||
32 | pub(super) original_token: SyntaxToken, | ||
33 | /// The token before the cursor, in the macro-expanded file. | ||
34 | pub(super) token: SyntaxToken, | ||
35 | pub(super) krate: Option<hir::Crate>, | ||
36 | pub(super) expected_type: Option<Type>, | ||
37 | pub(super) name_ref_syntax: Option<ast::NameRef>, | ||
38 | pub(super) function_syntax: Option<ast::Fn>, | ||
39 | pub(super) use_item_syntax: Option<ast::Use>, | ||
40 | pub(super) record_lit_syntax: Option<ast::RecordExpr>, | ||
41 | pub(super) record_pat_syntax: Option<ast::RecordPat>, | ||
42 | pub(super) record_field_syntax: Option<ast::RecordExprField>, | ||
43 | pub(super) impl_def: Option<ast::Impl>, | ||
44 | /// FIXME: `ActiveParameter` is string-based, which is very very wrong | ||
45 | pub(super) active_parameter: Option<ActiveParameter>, | ||
46 | pub(super) is_param: bool, | ||
47 | /// If a name-binding or reference to a const in a pattern. | ||
48 | /// Irrefutable patterns (like let) are excluded. | ||
49 | pub(super) is_pat_binding_or_const: bool, | ||
50 | /// A single-indent path, like `foo`. `::foo` should not be considered a trivial path. | ||
51 | pub(super) is_trivial_path: bool, | ||
52 | /// If not a trivial path, the prefix (qualifier). | ||
53 | pub(super) path_prefix: Option<hir::Path>, | ||
54 | pub(super) after_if: bool, | ||
55 | /// `true` if we are a statement or a last expr in the block. | ||
56 | pub(super) can_be_stmt: bool, | ||
57 | /// `true` if we expect an expression at the cursor position. | ||
58 | pub(super) is_expr: bool, | ||
59 | /// Something is typed at the "top" level, in module or impl/trait. | ||
60 | pub(super) is_new_item: bool, | ||
61 | /// The receiver if this is a field or method access, i.e. writing something.<|> | ||
62 | pub(super) dot_receiver: Option<ast::Expr>, | ||
63 | pub(super) dot_receiver_is_ambiguous_float_literal: bool, | ||
64 | /// If this is a call (method or function) in particular, i.e. the () are already there. | ||
65 | pub(super) is_call: bool, | ||
66 | /// Like `is_call`, but for tuple patterns. | ||
67 | pub(super) is_pattern_call: bool, | ||
68 | /// If this is a macro call, i.e. the () are already there. | ||
69 | pub(super) is_macro_call: bool, | ||
70 | pub(super) is_path_type: bool, | ||
71 | pub(super) has_type_args: bool, | ||
72 | pub(super) attribute_under_caret: Option<ast::Attr>, | ||
73 | pub(super) unsafe_is_prev: bool, | ||
74 | pub(super) if_is_prev: bool, | ||
75 | pub(super) block_expr_parent: bool, | ||
76 | pub(super) bind_pat_parent: bool, | ||
77 | pub(super) ref_pat_parent: bool, | ||
78 | pub(super) in_loop_body: bool, | ||
79 | pub(super) has_trait_parent: bool, | ||
80 | pub(super) has_impl_parent: bool, | ||
81 | pub(super) trait_as_prev_sibling: bool, | ||
82 | pub(super) impl_as_prev_sibling: bool, | ||
83 | pub(super) is_match_arm: bool, | ||
84 | pub(super) has_item_list_or_source_file_parent: bool, | ||
85 | } | ||
86 | |||
87 | impl<'a> CompletionContext<'a> { | ||
88 | pub(super) fn new( | ||
89 | db: &'a RootDatabase, | ||
90 | position: FilePosition, | ||
91 | config: &'a CompletionConfig, | ||
92 | ) -> Option<CompletionContext<'a>> { | ||
93 | let sema = Semantics::new(db); | ||
94 | |||
95 | let original_file = sema.parse(position.file_id); | ||
96 | |||
97 | // Insert a fake ident to get a valid parse tree. We will use this file | ||
98 | // to determine context, though the original_file will be used for | ||
99 | // actual completion. | ||
100 | let file_with_fake_ident = { | ||
101 | let parse = db.parse(position.file_id); | ||
102 | let edit = Indel::insert(position.offset, "intellijRulezz".to_string()); | ||
103 | parse.reparse(&edit).tree() | ||
104 | }; | ||
105 | let fake_ident_token = | ||
106 | file_with_fake_ident.syntax().token_at_offset(position.offset).right_biased().unwrap(); | ||
107 | |||
108 | let krate = sema.to_module_def(position.file_id).map(|m| m.krate()); | ||
109 | let original_token = | ||
110 | original_file.syntax().token_at_offset(position.offset).left_biased()?; | ||
111 | let token = sema.descend_into_macros(original_token.clone()); | ||
112 | let scope = sema.scope_at_offset(&token.parent(), position.offset); | ||
113 | let mut ctx = CompletionContext { | ||
114 | sema, | ||
115 | scope, | ||
116 | db, | ||
117 | config, | ||
118 | original_token, | ||
119 | token, | ||
120 | position, | ||
121 | krate, | ||
122 | expected_type: None, | ||
123 | name_ref_syntax: None, | ||
124 | function_syntax: None, | ||
125 | use_item_syntax: None, | ||
126 | record_lit_syntax: None, | ||
127 | record_pat_syntax: None, | ||
128 | record_field_syntax: None, | ||
129 | impl_def: None, | ||
130 | active_parameter: ActiveParameter::at(db, position), | ||
131 | is_param: false, | ||
132 | is_pat_binding_or_const: false, | ||
133 | is_trivial_path: false, | ||
134 | path_prefix: None, | ||
135 | after_if: false, | ||
136 | can_be_stmt: false, | ||
137 | is_expr: false, | ||
138 | is_new_item: false, | ||
139 | dot_receiver: None, | ||
140 | is_call: false, | ||
141 | is_pattern_call: false, | ||
142 | is_macro_call: false, | ||
143 | is_path_type: false, | ||
144 | has_type_args: false, | ||
145 | dot_receiver_is_ambiguous_float_literal: false, | ||
146 | attribute_under_caret: None, | ||
147 | unsafe_is_prev: false, | ||
148 | in_loop_body: false, | ||
149 | ref_pat_parent: false, | ||
150 | bind_pat_parent: false, | ||
151 | block_expr_parent: false, | ||
152 | has_trait_parent: false, | ||
153 | has_impl_parent: false, | ||
154 | trait_as_prev_sibling: false, | ||
155 | impl_as_prev_sibling: false, | ||
156 | if_is_prev: false, | ||
157 | is_match_arm: false, | ||
158 | has_item_list_or_source_file_parent: false, | ||
159 | }; | ||
160 | |||
161 | let mut original_file = original_file.syntax().clone(); | ||
162 | let mut hypothetical_file = file_with_fake_ident.syntax().clone(); | ||
163 | let mut offset = position.offset; | ||
164 | let mut fake_ident_token = fake_ident_token; | ||
165 | |||
166 | // Are we inside a macro call? | ||
167 | while let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = ( | ||
168 | find_node_at_offset::<ast::MacroCall>(&original_file, offset), | ||
169 | find_node_at_offset::<ast::MacroCall>(&hypothetical_file, offset), | ||
170 | ) { | ||
171 | if actual_macro_call.path().as_ref().map(|s| s.syntax().text()) | ||
172 | != macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text()) | ||
173 | { | ||
174 | break; | ||
175 | } | ||
176 | let hypothetical_args = match macro_call_with_fake_ident.token_tree() { | ||
177 | Some(tt) => tt, | ||
178 | None => break, | ||
179 | }; | ||
180 | if let (Some(actual_expansion), Some(hypothetical_expansion)) = ( | ||
181 | ctx.sema.expand(&actual_macro_call), | ||
182 | ctx.sema.expand_hypothetical( | ||
183 | &actual_macro_call, | ||
184 | &hypothetical_args, | ||
185 | fake_ident_token, | ||
186 | ), | ||
187 | ) { | ||
188 | let new_offset = hypothetical_expansion.1.text_range().start(); | ||
189 | if new_offset > actual_expansion.text_range().end() { | ||
190 | break; | ||
191 | } | ||
192 | original_file = actual_expansion; | ||
193 | hypothetical_file = hypothetical_expansion.0; | ||
194 | fake_ident_token = hypothetical_expansion.1; | ||
195 | offset = new_offset; | ||
196 | } else { | ||
197 | break; | ||
198 | } | ||
199 | } | ||
200 | ctx.fill_keyword_patterns(&hypothetical_file, offset); | ||
201 | ctx.fill(&original_file, hypothetical_file, offset); | ||
202 | Some(ctx) | ||
203 | } | ||
204 | |||
205 | // The range of the identifier that is being completed. | ||
206 | pub(crate) fn source_range(&self) -> TextRange { | ||
207 | // check kind of macro-expanded token, but use range of original token | ||
208 | if self.token.kind() == IDENT || self.token.kind().is_keyword() { | ||
209 | mark::hit!(completes_if_prefix_is_keyword); | ||
210 | self.original_token.text_range() | ||
211 | } else { | ||
212 | TextRange::empty(self.position.offset) | ||
213 | } | ||
214 | } | ||
215 | |||
216 | fn fill_keyword_patterns(&mut self, file_with_fake_ident: &SyntaxNode, offset: TextSize) { | ||
217 | let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased().unwrap(); | ||
218 | let syntax_element = NodeOrToken::Token(fake_ident_token); | ||
219 | self.block_expr_parent = has_block_expr_parent(syntax_element.clone()); | ||
220 | self.unsafe_is_prev = unsafe_is_prev(syntax_element.clone()); | ||
221 | self.if_is_prev = if_is_prev(syntax_element.clone()); | ||
222 | self.bind_pat_parent = has_bind_pat_parent(syntax_element.clone()); | ||
223 | self.ref_pat_parent = has_ref_parent(syntax_element.clone()); | ||
224 | self.in_loop_body = is_in_loop_body(syntax_element.clone()); | ||
225 | self.has_trait_parent = has_trait_parent(syntax_element.clone()); | ||
226 | self.has_impl_parent = has_impl_parent(syntax_element.clone()); | ||
227 | self.impl_as_prev_sibling = has_impl_as_prev_sibling(syntax_element.clone()); | ||
228 | self.trait_as_prev_sibling = has_trait_as_prev_sibling(syntax_element.clone()); | ||
229 | self.is_match_arm = is_match_arm(syntax_element.clone()); | ||
230 | self.has_item_list_or_source_file_parent = | ||
231 | has_item_list_or_source_file_parent(syntax_element); | ||
232 | } | ||
233 | |||
234 | fn fill( | ||
235 | &mut self, | ||
236 | original_file: &SyntaxNode, | ||
237 | file_with_fake_ident: SyntaxNode, | ||
238 | offset: TextSize, | ||
239 | ) { | ||
240 | // FIXME: this is wrong in at least two cases: | ||
241 | // * when there's no token `foo(<|>)` | ||
242 | // * when there is a token, but it happens to have type of it's own | ||
243 | self.expected_type = self | ||
244 | .token | ||
245 | .ancestors() | ||
246 | .find_map(|node| { | ||
247 | let ty = match_ast! { | ||
248 | match node { | ||
249 | ast::Pat(it) => self.sema.type_of_pat(&it), | ||
250 | ast::Expr(it) => self.sema.type_of_expr(&it), | ||
251 | _ => return None, | ||
252 | } | ||
253 | }; | ||
254 | Some(ty) | ||
255 | }) | ||
256 | .flatten(); | ||
257 | self.attribute_under_caret = find_node_at_offset(&file_with_fake_ident, offset); | ||
258 | |||
259 | // First, let's try to complete a reference to some declaration. | ||
260 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&file_with_fake_ident, offset) { | ||
261 | // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`. | ||
262 | // See RFC#1685. | ||
263 | if is_node::<ast::Param>(name_ref.syntax()) { | ||
264 | self.is_param = true; | ||
265 | return; | ||
266 | } | ||
267 | // FIXME: remove this (V) duplication and make the check more precise | ||
268 | if name_ref.syntax().ancestors().find_map(ast::RecordPatFieldList::cast).is_some() { | ||
269 | self.record_pat_syntax = | ||
270 | self.sema.find_node_at_offset_with_macros(&original_file, offset); | ||
271 | } | ||
272 | self.classify_name_ref(original_file, name_ref, offset); | ||
273 | } | ||
274 | |||
275 | // Otherwise, see if this is a declaration. We can use heuristics to | ||
276 | // suggest declaration names, see `CompletionKind::Magic`. | ||
277 | if let Some(name) = find_node_at_offset::<ast::Name>(&file_with_fake_ident, offset) { | ||
278 | if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::IdentPat::cast) { | ||
279 | self.is_pat_binding_or_const = true; | ||
280 | if bind_pat.at_token().is_some() | ||
281 | || bind_pat.ref_token().is_some() | ||
282 | || bind_pat.mut_token().is_some() | ||
283 | { | ||
284 | self.is_pat_binding_or_const = false; | ||
285 | } | ||
286 | if bind_pat.syntax().parent().and_then(ast::RecordPatFieldList::cast).is_some() { | ||
287 | self.is_pat_binding_or_const = false; | ||
288 | } | ||
289 | if let Some(let_stmt) = bind_pat.syntax().ancestors().find_map(ast::LetStmt::cast) { | ||
290 | if let Some(pat) = let_stmt.pat() { | ||
291 | if pat.syntax().text_range().contains_range(bind_pat.syntax().text_range()) | ||
292 | { | ||
293 | self.is_pat_binding_or_const = false; | ||
294 | } | ||
295 | } | ||
296 | } | ||
297 | } | ||
298 | if is_node::<ast::Param>(name.syntax()) { | ||
299 | self.is_param = true; | ||
300 | return; | ||
301 | } | ||
302 | // FIXME: remove this (^) duplication and make the check more precise | ||
303 | if name.syntax().ancestors().find_map(ast::RecordPatFieldList::cast).is_some() { | ||
304 | self.record_pat_syntax = | ||
305 | self.sema.find_node_at_offset_with_macros(&original_file, offset); | ||
306 | } | ||
307 | } | ||
308 | } | ||
309 | |||
310 | fn classify_name_ref( | ||
311 | &mut self, | ||
312 | original_file: &SyntaxNode, | ||
313 | name_ref: ast::NameRef, | ||
314 | offset: TextSize, | ||
315 | ) { | ||
316 | self.name_ref_syntax = | ||
317 | find_node_at_offset(&original_file, name_ref.syntax().text_range().start()); | ||
318 | let name_range = name_ref.syntax().text_range(); | ||
319 | if ast::RecordExprField::for_field_name(&name_ref).is_some() { | ||
320 | self.record_lit_syntax = | ||
321 | self.sema.find_node_at_offset_with_macros(&original_file, offset); | ||
322 | } | ||
323 | |||
324 | self.impl_def = self | ||
325 | .sema | ||
326 | .ancestors_with_macros(self.token.parent()) | ||
327 | .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) | ||
328 | .find_map(ast::Impl::cast); | ||
329 | |||
330 | let top_node = name_ref | ||
331 | .syntax() | ||
332 | .ancestors() | ||
333 | .take_while(|it| it.text_range() == name_range) | ||
334 | .last() | ||
335 | .unwrap(); | ||
336 | |||
337 | match top_node.parent().map(|it| it.kind()) { | ||
338 | Some(SOURCE_FILE) | Some(ITEM_LIST) => { | ||
339 | self.is_new_item = true; | ||
340 | return; | ||
341 | } | ||
342 | _ => (), | ||
343 | } | ||
344 | |||
345 | self.use_item_syntax = | ||
346 | self.sema.ancestors_with_macros(self.token.parent()).find_map(ast::Use::cast); | ||
347 | |||
348 | self.function_syntax = self | ||
349 | .sema | ||
350 | .ancestors_with_macros(self.token.parent()) | ||
351 | .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) | ||
352 | .find_map(ast::Fn::cast); | ||
353 | |||
354 | self.record_field_syntax = self | ||
355 | .sema | ||
356 | .ancestors_with_macros(self.token.parent()) | ||
357 | .take_while(|it| { | ||
358 | it.kind() != SOURCE_FILE && it.kind() != MODULE && it.kind() != CALL_EXPR | ||
359 | }) | ||
360 | .find_map(ast::RecordExprField::cast); | ||
361 | |||
362 | let parent = match name_ref.syntax().parent() { | ||
363 | Some(it) => it, | ||
364 | None => return, | ||
365 | }; | ||
366 | |||
367 | if let Some(segment) = ast::PathSegment::cast(parent.clone()) { | ||
368 | let path = segment.parent_path(); | ||
369 | self.is_call = path | ||
370 | .syntax() | ||
371 | .parent() | ||
372 | .and_then(ast::PathExpr::cast) | ||
373 | .and_then(|it| it.syntax().parent().and_then(ast::CallExpr::cast)) | ||
374 | .is_some(); | ||
375 | self.is_macro_call = path.syntax().parent().and_then(ast::MacroCall::cast).is_some(); | ||
376 | self.is_pattern_call = | ||
377 | path.syntax().parent().and_then(ast::TupleStructPat::cast).is_some(); | ||
378 | |||
379 | self.is_path_type = path.syntax().parent().and_then(ast::PathType::cast).is_some(); | ||
380 | self.has_type_args = segment.generic_arg_list().is_some(); | ||
381 | |||
382 | let hygiene = hir::Hygiene::new(self.db, self.position.file_id.into()); | ||
383 | if let Some(path) = hir::Path::from_src(path.clone(), &hygiene) { | ||
384 | if let Some(path_prefix) = path.qualifier() { | ||
385 | self.path_prefix = Some(path_prefix); | ||
386 | return; | ||
387 | } | ||
388 | } | ||
389 | |||
390 | if path.qualifier().is_none() { | ||
391 | self.is_trivial_path = true; | ||
392 | |||
393 | // Find either enclosing expr statement (thing with `;`) or a | ||
394 | // block. If block, check that we are the last expr. | ||
395 | self.can_be_stmt = name_ref | ||
396 | .syntax() | ||
397 | .ancestors() | ||
398 | .find_map(|node| { | ||
399 | if let Some(stmt) = ast::ExprStmt::cast(node.clone()) { | ||
400 | return Some( | ||
401 | stmt.syntax().text_range() == name_ref.syntax().text_range(), | ||
402 | ); | ||
403 | } | ||
404 | if let Some(block) = ast::BlockExpr::cast(node) { | ||
405 | return Some( | ||
406 | block.expr().map(|e| e.syntax().text_range()) | ||
407 | == Some(name_ref.syntax().text_range()), | ||
408 | ); | ||
409 | } | ||
410 | None | ||
411 | }) | ||
412 | .unwrap_or(false); | ||
413 | self.is_expr = path.syntax().parent().and_then(ast::PathExpr::cast).is_some(); | ||
414 | |||
415 | if let Some(off) = name_ref.syntax().text_range().start().checked_sub(2.into()) { | ||
416 | if let Some(if_expr) = | ||
417 | self.sema.find_node_at_offset_with_macros::<ast::IfExpr>(original_file, off) | ||
418 | { | ||
419 | if if_expr.syntax().text_range().end() | ||
420 | < name_ref.syntax().text_range().start() | ||
421 | { | ||
422 | self.after_if = true; | ||
423 | } | ||
424 | } | ||
425 | } | ||
426 | } | ||
427 | } | ||
428 | if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) { | ||
429 | // The receiver comes before the point of insertion of the fake | ||
430 | // ident, so it should have the same range in the non-modified file | ||
431 | self.dot_receiver = field_expr | ||
432 | .expr() | ||
433 | .map(|e| e.syntax().text_range()) | ||
434 | .and_then(|r| find_node_with_range(original_file, r)); | ||
435 | self.dot_receiver_is_ambiguous_float_literal = | ||
436 | if let Some(ast::Expr::Literal(l)) = &self.dot_receiver { | ||
437 | match l.kind() { | ||
438 | ast::LiteralKind::FloatNumber { .. } => l.token().text().ends_with('.'), | ||
439 | _ => false, | ||
440 | } | ||
441 | } else { | ||
442 | false | ||
443 | } | ||
444 | } | ||
445 | if let Some(method_call_expr) = ast::MethodCallExpr::cast(parent) { | ||
446 | // As above | ||
447 | self.dot_receiver = method_call_expr | ||
448 | .expr() | ||
449 | .map(|e| e.syntax().text_range()) | ||
450 | .and_then(|r| find_node_with_range(original_file, r)); | ||
451 | self.is_call = true; | ||
452 | } | ||
453 | } | ||
454 | } | ||
455 | |||
456 | fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<N> { | ||
457 | find_covering_element(syntax, range).ancestors().find_map(N::cast) | ||
458 | } | ||
459 | |||
460 | fn is_node<N: AstNode>(node: &SyntaxNode) -> bool { | ||
461 | match node.ancestors().find_map(N::cast) { | ||
462 | None => false, | ||
463 | Some(n) => n.syntax().text_range() == node.text_range(), | ||
464 | } | ||
465 | } | ||