diff options
-rw-r--r-- | crates/ra_ide/src/completion.rs | 1 | ||||
-rw-r--r-- | crates/ra_ide/src/completion/complete_keyword.rs | 271 | ||||
-rw-r--r-- | crates/ra_ide/src/completion/completion_context.rs | 28 | ||||
-rw-r--r-- | crates/ra_ide/src/completion/patterns.rs | 117 |
4 files changed, 183 insertions, 234 deletions
diff --git a/crates/ra_ide/src/completion.rs b/crates/ra_ide/src/completion.rs index d890b69d2..fa37b6955 100644 --- a/crates/ra_ide/src/completion.rs +++ b/crates/ra_ide/src/completion.rs | |||
@@ -15,6 +15,7 @@ mod complete_unqualified_path; | |||
15 | mod complete_postfix; | 15 | mod complete_postfix; |
16 | mod complete_macro_in_item_position; | 16 | mod complete_macro_in_item_position; |
17 | mod complete_trait_impl; | 17 | mod complete_trait_impl; |
18 | mod patterns; | ||
18 | #[cfg(test)] | 19 | #[cfg(test)] |
19 | mod test_utils; | 20 | mod test_utils; |
20 | 21 | ||
diff --git a/crates/ra_ide/src/completion/complete_keyword.rs b/crates/ra_ide/src/completion/complete_keyword.rs index 36280b703..5b56c6275 100644 --- a/crates/ra_ide/src/completion/complete_keyword.rs +++ b/crates/ra_ide/src/completion/complete_keyword.rs | |||
@@ -1,12 +1,6 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use ra_syntax::{ | 3 | use ra_syntax::ast; |
4 | algo::non_trivia_sibling, | ||
5 | ast::{self, LoopBodyOwner}, | ||
6 | match_ast, AstNode, Direction, NodeOrToken, SyntaxElement, | ||
7 | SyntaxKind::*, | ||
8 | SyntaxToken, | ||
9 | }; | ||
10 | 4 | ||
11 | use crate::completion::{ | 5 | use crate::completion::{ |
12 | CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions, | 6 | CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions, |
@@ -53,110 +47,56 @@ fn keyword(ctx: &CompletionContext, kw: &str, snippet: &str) -> CompletionItem { | |||
53 | .build() | 47 | .build() |
54 | } | 48 | } |
55 | 49 | ||
56 | fn add_top_level_keywords(acc: &mut Completions, ctx: &CompletionContext) { | 50 | fn add_keyword( |
57 | if let Some(token) = previous_non_triva_element(&ctx.token).and_then(|it| it.into_token()) { | 51 | ctx: &CompletionContext, |
58 | if token.kind() == UNSAFE_KW { | 52 | acc: &mut Completions, |
59 | acc.add(keyword(ctx, "impl", "impl $0 {}")); | 53 | kw: &str, |
60 | acc.add(keyword(ctx, "trait", "trait $0 {}")); | 54 | snippet: &str, |
61 | acc.add(keyword(ctx, "fn", "fn $0() {}")); | 55 | should_add: bool, |
62 | return; | 56 | ) { |
63 | } | 57 | if should_add { |
58 | acc.add(keyword(ctx, kw, snippet)); | ||
64 | } | 59 | } |
65 | acc.add(keyword(ctx, "impl", "impl $0 {}")); | ||
66 | acc.add(keyword(ctx, "enum", "enum $0 {}")); | ||
67 | acc.add(keyword(ctx, "struct", "struct $0 {}")); | ||
68 | acc.add(keyword(ctx, "trait", "trait $0 {}")); | ||
69 | acc.add(keyword(ctx, "fn", "fn $0() {}")); | ||
70 | acc.add(keyword(ctx, "unsafe", "unsafe ")); | ||
71 | } | 60 | } |
72 | 61 | ||
73 | pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionContext) { | 62 | pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionContext) { |
74 | if ctx.is_new_item { | 63 | add_keyword(ctx, acc, "fn", "fn $0() {}", ctx.is_new_item || ctx.block_expr_parent); |
75 | add_top_level_keywords(acc, ctx); | 64 | add_keyword(ctx, acc, "type", "type ", ctx.is_new_item || ctx.block_expr_parent); |
76 | return; | 65 | add_keyword(ctx, acc, "fn", "fn $0() {}", ctx.is_new_item || ctx.block_expr_parent); |
77 | } | 66 | add_keyword(ctx, acc, "impl", "impl $0 {}", ctx.is_new_item); |
78 | if !ctx.is_trivial_path { | 67 | add_keyword(ctx, acc, "trait", "impl $0 {}", ctx.is_new_item); |
79 | return; | 68 | add_keyword(ctx, acc, "enum", "enum $0 {}", ctx.is_new_item && !ctx.after_unsafe); |
80 | } | 69 | add_keyword(ctx, acc, "struct", "struct $0 {}", ctx.is_new_item && !ctx.after_unsafe); |
70 | add_keyword(ctx, acc, "union", "union $0 {}", ctx.is_new_item && !ctx.after_unsafe); | ||
71 | add_keyword(ctx, acc, "match", "match $0 {}", ctx.block_expr_parent); | ||
72 | add_keyword(ctx, acc, "loop", "loop {$0}", ctx.block_expr_parent); | ||
73 | add_keyword(ctx, acc, "while", "while $0 {}", ctx.block_expr_parent); | ||
74 | add_keyword(ctx, acc, "let", "let ", ctx.after_if || ctx.block_expr_parent); | ||
75 | add_keyword(ctx, acc, "let", "let ", ctx.after_if || ctx.block_expr_parent); | ||
76 | add_keyword(ctx, acc, "else", "else {$0}", ctx.after_if); | ||
77 | add_keyword(ctx, acc, "else if", "else if $0 {}", ctx.after_if); | ||
78 | add_keyword(ctx, acc, "mod", "mod $0 {}", ctx.is_new_item || ctx.block_expr_parent); | ||
79 | add_keyword(ctx, acc, "mut", "mut ", ctx.bind_pat_parent || ctx.ref_pat_parent); | ||
80 | add_keyword(ctx, acc, "true", "true", !ctx.is_new_item); // this should be defined properly | ||
81 | add_keyword(ctx, acc, "false", "false", !ctx.is_new_item); // this should be defined properly | ||
82 | add_keyword(ctx, acc, "const", "const ", ctx.is_new_item || ctx.block_expr_parent); | ||
83 | add_keyword(ctx, acc, "type", "type ", ctx.is_new_item || ctx.block_expr_parent); | ||
84 | add_keyword(ctx, acc, "static", "static ", ctx.is_new_item || ctx.block_expr_parent); | ||
85 | add_keyword(ctx, acc, "extern", "extern ", ctx.is_new_item || ctx.block_expr_parent); | ||
86 | add_keyword(ctx, acc, "unsafe", "unsafe ", ctx.is_new_item || ctx.block_expr_parent); | ||
87 | add_keyword(ctx, acc, "continue", "continue;", ctx.in_loop_body && ctx.can_be_stmt); | ||
88 | add_keyword(ctx, acc, "break", "break;", ctx.in_loop_body && ctx.can_be_stmt); | ||
89 | add_keyword(ctx, acc, "continue", "continue", ctx.in_loop_body && !ctx.can_be_stmt); | ||
90 | add_keyword(ctx, acc, "break", "break", ctx.in_loop_body && !ctx.can_be_stmt); | ||
91 | complete_use_tree_keyword(acc, ctx); | ||
81 | 92 | ||
82 | let fn_def = match &ctx.function_syntax { | 93 | let fn_def = match &ctx.function_syntax { |
83 | Some(it) => it, | 94 | Some(it) => it, |
84 | None => return, | 95 | None => return, |
85 | }; | 96 | }; |
86 | acc.add(keyword(ctx, "if", "if $0 {}")); | ||
87 | acc.add(keyword(ctx, "match", "match $0 {}")); | ||
88 | acc.add(keyword(ctx, "while", "while $0 {}")); | ||
89 | acc.add(keyword(ctx, "loop", "loop {$0}")); | ||
90 | |||
91 | if ctx.after_if { | ||
92 | acc.add(keyword(ctx, "else", "else {$0}")); | ||
93 | acc.add(keyword(ctx, "else if", "else if $0 {}")); | ||
94 | } | ||
95 | if is_in_loop_body(&ctx.token) { | ||
96 | if ctx.can_be_stmt { | ||
97 | acc.add(keyword(ctx, "continue", "continue;")); | ||
98 | acc.add(keyword(ctx, "break", "break;")); | ||
99 | } else { | ||
100 | acc.add(keyword(ctx, "continue", "continue")); | ||
101 | acc.add(keyword(ctx, "break", "break")); | ||
102 | } | ||
103 | } | ||
104 | acc.add_all(complete_return(ctx, &fn_def, ctx.can_be_stmt)); | 97 | acc.add_all(complete_return(ctx, &fn_def, ctx.can_be_stmt)); |
105 | } | 98 | } |
106 | 99 | ||
107 | fn previous_non_triva_element(token: &SyntaxToken) -> Option<SyntaxElement> { | ||
108 | // trying to get first non triva sibling if we have one | ||
109 | let token_sibling = non_trivia_sibling(NodeOrToken::Token(token.to_owned()), Direction::Prev); | ||
110 | let mut wrapped = if let Some(sibling) = token_sibling { | ||
111 | sibling | ||
112 | } else { | ||
113 | // if not trying to find first ancestor which has such a sibling | ||
114 | let node = token.parent(); | ||
115 | let range = node.text_range(); | ||
116 | let top_node = node.ancestors().take_while(|it| it.text_range() == range).last()?; | ||
117 | let prev_sibling_node = top_node.ancestors().find(|it| { | ||
118 | non_trivia_sibling(NodeOrToken::Node(it.to_owned()), Direction::Prev).is_some() | ||
119 | })?; | ||
120 | non_trivia_sibling(NodeOrToken::Node(prev_sibling_node), Direction::Prev)? | ||
121 | }; | ||
122 | // traversing the tree down to get the last token or node, i.e. the closest one | ||
123 | loop { | ||
124 | if let Some(token) = wrapped.as_token() { | ||
125 | return Some(NodeOrToken::Token(token.clone())); | ||
126 | } else { | ||
127 | let new = wrapped.as_node().and_then(|n| n.last_child_or_token()); | ||
128 | if new.is_some() { | ||
129 | wrapped = new.unwrap().clone(); | ||
130 | } else { | ||
131 | return Some(wrapped); | ||
132 | } | ||
133 | } | ||
134 | } | ||
135 | } | ||
136 | |||
137 | fn is_in_loop_body(leaf: &SyntaxToken) -> bool { | ||
138 | // FIXME move this to CompletionContext and make it handle macros | ||
139 | for node in leaf.parent().ancestors() { | ||
140 | if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR { | ||
141 | break; | ||
142 | } | ||
143 | let loop_body = match_ast! { | ||
144 | match node { | ||
145 | ast::ForExpr(it) => it.loop_body(), | ||
146 | ast::WhileExpr(it) => it.loop_body(), | ||
147 | ast::LoopExpr(it) => it.loop_body(), | ||
148 | _ => None, | ||
149 | } | ||
150 | }; | ||
151 | if let Some(body) = loop_body { | ||
152 | if body.syntax().text_range().contains_range(leaf.text_range()) { | ||
153 | return true; | ||
154 | } | ||
155 | } | ||
156 | } | ||
157 | false | ||
158 | } | ||
159 | |||
160 | fn complete_return( | 100 | fn complete_return( |
161 | ctx: &CompletionContext, | 101 | ctx: &CompletionContext, |
162 | fn_def: &ast::FnDef, | 102 | fn_def: &ast::FnDef, |
@@ -321,139 +261,6 @@ mod tests { | |||
321 | } | 261 | } |
322 | 262 | ||
323 | #[test] | 263 | #[test] |
324 | fn completes_unsafe_context_in_item_position_with_non_empty_token() { | ||
325 | assert_debug_snapshot!( | ||
326 | do_keyword_completion( | ||
327 | r" | ||
328 | mod my_mod { | ||
329 | unsafe i<|> | ||
330 | } | ||
331 | ", | ||
332 | ), | ||
333 | @r###" | ||
334 | [ | ||
335 | CompletionItem { | ||
336 | label: "fn", | ||
337 | source_range: 57..58, | ||
338 | delete: 57..58, | ||
339 | insert: "fn $0() {}", | ||
340 | kind: Keyword, | ||
341 | }, | ||
342 | CompletionItem { | ||
343 | label: "impl", | ||
344 | source_range: 57..58, | ||
345 | delete: 57..58, | ||
346 | insert: "impl $0 {}", | ||
347 | kind: Keyword, | ||
348 | }, | ||
349 | CompletionItem { | ||
350 | label: "trait", | ||
351 | source_range: 57..58, | ||
352 | delete: 57..58, | ||
353 | insert: "trait $0 {}", | ||
354 | kind: Keyword, | ||
355 | }, | ||
356 | ] | ||
357 | "### | ||
358 | ); | ||
359 | } | ||
360 | |||
361 | #[test] | ||
362 | fn completes_unsafe_context_in_item_position_with_empty_token() { | ||
363 | assert_debug_snapshot!( | ||
364 | do_keyword_completion( | ||
365 | r" | ||
366 | mod my_mod { | ||
367 | unsafe <|> | ||
368 | } | ||
369 | ", | ||
370 | ), | ||
371 | @r###" | ||
372 | [ | ||
373 | CompletionItem { | ||
374 | label: "fn", | ||
375 | source_range: 57..57, | ||
376 | delete: 57..57, | ||
377 | insert: "fn $0() {}", | ||
378 | kind: Keyword, | ||
379 | }, | ||
380 | CompletionItem { | ||
381 | label: "impl", | ||
382 | source_range: 57..57, | ||
383 | delete: 57..57, | ||
384 | insert: "impl $0 {}", | ||
385 | kind: Keyword, | ||
386 | }, | ||
387 | CompletionItem { | ||
388 | label: "trait", | ||
389 | source_range: 57..57, | ||
390 | delete: 57..57, | ||
391 | insert: "trait $0 {}", | ||
392 | kind: Keyword, | ||
393 | }, | ||
394 | ] | ||
395 | "### | ||
396 | ); | ||
397 | } | ||
398 | |||
399 | #[test] | ||
400 | fn completes_keywords_in_item_position_with_empty_token() { | ||
401 | assert_debug_snapshot!( | ||
402 | do_keyword_completion( | ||
403 | r" | ||
404 | <|> | ||
405 | ", | ||
406 | ), | ||
407 | @r###" | ||
408 | [ | ||
409 | CompletionItem { | ||
410 | label: "enum", | ||
411 | source_range: 17..17, | ||
412 | delete: 17..17, | ||
413 | insert: "enum $0 {}", | ||
414 | kind: Keyword, | ||
415 | }, | ||
416 | CompletionItem { | ||
417 | label: "fn", | ||
418 | source_range: 17..17, | ||
419 | delete: 17..17, | ||
420 | insert: "fn $0() {}", | ||
421 | kind: Keyword, | ||
422 | }, | ||
423 | CompletionItem { | ||
424 | label: "impl", | ||
425 | source_range: 17..17, | ||
426 | delete: 17..17, | ||
427 | insert: "impl $0 {}", | ||
428 | kind: Keyword, | ||
429 | }, | ||
430 | CompletionItem { | ||
431 | label: "struct", | ||
432 | source_range: 17..17, | ||
433 | delete: 17..17, | ||
434 | insert: "struct $0 {}", | ||
435 | kind: Keyword, | ||
436 | }, | ||
437 | CompletionItem { | ||
438 | label: "trait", | ||
439 | source_range: 17..17, | ||
440 | delete: 17..17, | ||
441 | insert: "trait $0 {}", | ||
442 | kind: Keyword, | ||
443 | }, | ||
444 | CompletionItem { | ||
445 | label: "unsafe", | ||
446 | source_range: 17..17, | ||
447 | delete: 17..17, | ||
448 | insert: "unsafe ", | ||
449 | kind: Keyword, | ||
450 | }, | ||
451 | ] | ||
452 | "### | ||
453 | ); | ||
454 | } | ||
455 | |||
456 | #[test] | ||
457 | fn completes_else_after_if() { | 264 | fn completes_else_after_if() { |
458 | assert_debug_snapshot!( | 265 | assert_debug_snapshot!( |
459 | do_keyword_completion( | 266 | do_keyword_completion( |
diff --git a/crates/ra_ide/src/completion/completion_context.rs b/crates/ra_ide/src/completion/completion_context.rs index c4646b727..1ef07d8f4 100644 --- a/crates/ra_ide/src/completion/completion_context.rs +++ b/crates/ra_ide/src/completion/completion_context.rs | |||
@@ -5,12 +5,16 @@ use ra_db::SourceDatabase; | |||
5 | use ra_ide_db::RootDatabase; | 5 | use ra_ide_db::RootDatabase; |
6 | use ra_syntax::{ | 6 | use ra_syntax::{ |
7 | algo::{find_covering_element, find_node_at_offset}, | 7 | algo::{find_covering_element, find_node_at_offset}, |
8 | ast, match_ast, AstNode, | 8 | ast, match_ast, AstNode, NodeOrToken, |
9 | SyntaxKind::*, | 9 | SyntaxKind::*, |
10 | SyntaxNode, SyntaxToken, TextRange, TextSize, | 10 | SyntaxNode, SyntaxToken, TextRange, TextSize, |
11 | }; | 11 | }; |
12 | use ra_text_edit::Indel; | 12 | use ra_text_edit::Indel; |
13 | 13 | ||
14 | use super::patterns::{ | ||
15 | goes_after_unsafe, has_bind_pat_parent, has_block_expr_parent, has_ref_pat_parent, | ||
16 | is_in_loop_body, | ||
17 | }; | ||
14 | use crate::{call_info::ActiveParameter, completion::CompletionConfig, FilePosition}; | 18 | use crate::{call_info::ActiveParameter, completion::CompletionConfig, FilePosition}; |
15 | use test_utils::mark; | 19 | use test_utils::mark; |
16 | 20 | ||
@@ -60,6 +64,11 @@ pub(crate) struct CompletionContext<'a> { | |||
60 | pub(super) is_path_type: bool, | 64 | pub(super) is_path_type: bool, |
61 | pub(super) has_type_args: bool, | 65 | pub(super) has_type_args: bool, |
62 | pub(super) attribute_under_caret: Option<ast::Attr>, | 66 | pub(super) attribute_under_caret: Option<ast::Attr>, |
67 | pub(super) after_unsafe: bool, | ||
68 | pub(super) block_expr_parent: bool, | ||
69 | pub(super) bind_pat_parent: bool, | ||
70 | pub(super) ref_pat_parent: bool, | ||
71 | pub(super) in_loop_body: bool, | ||
63 | } | 72 | } |
64 | 73 | ||
65 | impl<'a> CompletionContext<'a> { | 74 | impl<'a> CompletionContext<'a> { |
@@ -118,6 +127,11 @@ impl<'a> CompletionContext<'a> { | |||
118 | has_type_args: false, | 127 | has_type_args: false, |
119 | dot_receiver_is_ambiguous_float_literal: false, | 128 | dot_receiver_is_ambiguous_float_literal: false, |
120 | attribute_under_caret: None, | 129 | attribute_under_caret: None, |
130 | after_unsafe: false, | ||
131 | in_loop_body: false, | ||
132 | ref_pat_parent: false, | ||
133 | bind_pat_parent: false, | ||
134 | block_expr_parent: false, | ||
121 | }; | 135 | }; |
122 | 136 | ||
123 | let mut original_file = original_file.syntax().clone(); | 137 | let mut original_file = original_file.syntax().clone(); |
@@ -159,7 +173,7 @@ impl<'a> CompletionContext<'a> { | |||
159 | break; | 173 | break; |
160 | } | 174 | } |
161 | } | 175 | } |
162 | 176 | ctx.fill_keyword_patterns(&hypothetical_file, offset); | |
163 | ctx.fill(&original_file, hypothetical_file, offset); | 177 | ctx.fill(&original_file, hypothetical_file, offset); |
164 | Some(ctx) | 178 | Some(ctx) |
165 | } | 179 | } |
@@ -188,6 +202,16 @@ impl<'a> CompletionContext<'a> { | |||
188 | self.sema.scope_at_offset(&self.token.parent(), self.offset) | 202 | self.sema.scope_at_offset(&self.token.parent(), self.offset) |
189 | } | 203 | } |
190 | 204 | ||
205 | fn fill_keyword_patterns(&mut self, file_with_fake_ident: &SyntaxNode, offset: TextSize) { | ||
206 | let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased().unwrap(); | ||
207 | let syntax_element = NodeOrToken::Token(fake_ident_token.clone()); | ||
208 | self.block_expr_parent = has_block_expr_parent(syntax_element.clone()); | ||
209 | self.after_unsafe = goes_after_unsafe(syntax_element.clone()); | ||
210 | self.bind_pat_parent = has_bind_pat_parent(syntax_element.clone()); | ||
211 | self.ref_pat_parent = has_ref_pat_parent(syntax_element.clone()); | ||
212 | self.in_loop_body = is_in_loop_body(syntax_element.clone()); | ||
213 | } | ||
214 | |||
191 | fn fill( | 215 | fn fill( |
192 | &mut self, | 216 | &mut self, |
193 | original_file: &SyntaxNode, | 217 | original_file: &SyntaxNode, |
diff --git a/crates/ra_ide/src/completion/patterns.rs b/crates/ra_ide/src/completion/patterns.rs new file mode 100644 index 000000000..b55f23fbe --- /dev/null +++ b/crates/ra_ide/src/completion/patterns.rs | |||
@@ -0,0 +1,117 @@ | |||
1 | use ra_syntax::{ | ||
2 | algo::non_trivia_sibling, | ||
3 | ast::{self, LoopBodyOwner}, | ||
4 | match_ast, AstNode, Direction, NodeOrToken, SyntaxElement, | ||
5 | SyntaxKind::*, | ||
6 | SyntaxNode, | ||
7 | }; | ||
8 | |||
9 | pub(crate) fn inside_impl(element: SyntaxElement) -> bool { | ||
10 | let node = match element { | ||
11 | NodeOrToken::Node(node) => node, | ||
12 | NodeOrToken::Token(token) => token.parent(), | ||
13 | }; | ||
14 | node.ancestors().find(|it| it.kind() == IMPL_DEF).is_some() | ||
15 | } | ||
16 | |||
17 | pub(crate) fn has_bind_pat_parent(element: SyntaxElement) -> bool { | ||
18 | let node = match element { | ||
19 | NodeOrToken::Node(node) => node, | ||
20 | NodeOrToken::Token(token) => token.parent(), | ||
21 | }; | ||
22 | node.ancestors().find(|it| it.kind() == BIND_PAT).is_some() | ||
23 | } | ||
24 | |||
25 | pub(crate) fn has_ref_pat_parent(element: SyntaxElement) -> bool { | ||
26 | let node = match element { | ||
27 | NodeOrToken::Node(node) => node, | ||
28 | NodeOrToken::Token(token) => token.parent(), | ||
29 | }; | ||
30 | node.ancestors().find(|it| it.kind() == REF_PAT).is_some() | ||
31 | } | ||
32 | |||
33 | pub(crate) fn goes_after_unsafe(element: SyntaxElement) -> bool { | ||
34 | if let Some(token) = previous_non_triva_element(element).and_then(|it| it.into_token()) { | ||
35 | if token.kind() == UNSAFE_KW { | ||
36 | return true; | ||
37 | } | ||
38 | } | ||
39 | false | ||
40 | } | ||
41 | |||
42 | pub(crate) fn has_block_expr_parent(element: SyntaxElement) -> bool { | ||
43 | not_same_range_parent(element).filter(|it| it.kind() == BLOCK_EXPR).is_some() | ||
44 | } | ||
45 | |||
46 | pub(crate) fn has_item_list_parent(element: SyntaxElement) -> bool { | ||
47 | not_same_range_parent(element).filter(|it| it.kind() == ITEM_LIST).is_some() | ||
48 | } | ||
49 | |||
50 | pub(crate) fn is_in_loop_body(element: SyntaxElement) -> bool { | ||
51 | let leaf = match element { | ||
52 | NodeOrToken::Node(node) => node, | ||
53 | NodeOrToken::Token(token) => token.parent(), | ||
54 | }; | ||
55 | for node in leaf.ancestors() { | ||
56 | if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR { | ||
57 | break; | ||
58 | } | ||
59 | let loop_body = match_ast! { | ||
60 | match node { | ||
61 | ast::ForExpr(it) => it.loop_body(), | ||
62 | ast::WhileExpr(it) => it.loop_body(), | ||
63 | ast::LoopExpr(it) => it.loop_body(), | ||
64 | _ => None, | ||
65 | } | ||
66 | }; | ||
67 | if let Some(body) = loop_body { | ||
68 | if body.syntax().text_range().contains_range(leaf.text_range()) { | ||
69 | return true; | ||
70 | } | ||
71 | } | ||
72 | } | ||
73 | false | ||
74 | } | ||
75 | |||
76 | fn not_same_range_parent(element: SyntaxElement) -> Option<SyntaxNode> { | ||
77 | let node = match element { | ||
78 | NodeOrToken::Node(node) => node, | ||
79 | NodeOrToken::Token(token) => token.parent(), | ||
80 | }; | ||
81 | let range = node.text_range(); | ||
82 | node.ancestors().take_while(|it| it.text_range() == range).last().and_then(|it| it.parent()) | ||
83 | } | ||
84 | |||
85 | fn previous_non_triva_element(element: SyntaxElement) -> Option<SyntaxElement> { | ||
86 | // trying to get first non triva sibling if we have one | ||
87 | let token_sibling = non_trivia_sibling(element.clone(), Direction::Prev); | ||
88 | let mut wrapped = if let Some(sibling) = token_sibling { | ||
89 | sibling | ||
90 | } else { | ||
91 | // if not trying to find first ancestor which has such a sibling | ||
92 | let node = match element { | ||
93 | NodeOrToken::Node(node) => node, | ||
94 | NodeOrToken::Token(token) => token.parent(), | ||
95 | }; | ||
96 | let range = node.text_range(); | ||
97 | let top_node = node.ancestors().take_while(|it| it.text_range() == range).last()?; | ||
98 | let prev_sibling_node = top_node.ancestors().find(|it| { | ||
99 | non_trivia_sibling(NodeOrToken::Node(it.to_owned()), Direction::Prev).is_some() | ||
100 | })?; | ||
101 | non_trivia_sibling(NodeOrToken::Node(prev_sibling_node), Direction::Prev)? | ||
102 | }; | ||
103 | //I think you can avoid this loop if you use SyntaxToken::prev_token -- unlike prev_sibling_or_token, it works across parents. | ||
104 | // traversing the tree down to get the last token or node, i.e. the closest one | ||
105 | loop { | ||
106 | if let Some(token) = wrapped.as_token() { | ||
107 | return Some(NodeOrToken::Token(token.clone())); | ||
108 | } else { | ||
109 | let new = wrapped.as_node().and_then(|n| n.last_child_or_token()); | ||
110 | if new.is_some() { | ||
111 | wrapped = new.unwrap().clone(); | ||
112 | } else { | ||
113 | return Some(wrapped); | ||
114 | } | ||
115 | } | ||
116 | } | ||
117 | } | ||