diff options
Diffstat (limited to 'crates/ide_db/src')
-rw-r--r-- | crates/ide_db/src/apply_change.rs | 2 | ||||
-rw-r--r-- | crates/ide_db/src/call_info.rs | 10 | ||||
-rw-r--r-- | crates/ide_db/src/call_info/tests.rs | 27 | ||||
-rw-r--r-- | crates/ide_db/src/helpers.rs | 8 | ||||
-rw-r--r-- | crates/ide_db/src/helpers/famous_defs_fixture.rs | 8 | ||||
-rw-r--r-- | crates/ide_db/src/helpers/import_assets.rs | 22 | ||||
-rw-r--r-- | crates/ide_db/src/helpers/insert_use.rs | 587 | ||||
-rw-r--r-- | crates/ide_db/src/helpers/insert_use/tests.rs | 42 | ||||
-rw-r--r-- | crates/ide_db/src/helpers/merge_imports.rs | 309 | ||||
-rw-r--r-- | crates/ide_db/src/helpers/rust_doc.rs | 34 | ||||
-rw-r--r-- | crates/ide_db/src/search.rs | 58 |
11 files changed, 602 insertions, 505 deletions
diff --git a/crates/ide_db/src/apply_change.rs b/crates/ide_db/src/apply_change.rs index 6c67425d7..eac5ef6b9 100644 --- a/crates/ide_db/src/apply_change.rs +++ b/crates/ide_db/src/apply_change.rs | |||
@@ -201,7 +201,7 @@ impl RootDatabase { | |||
201 | hir::db::InternImplTraitIdQuery | 201 | hir::db::InternImplTraitIdQuery |
202 | hir::db::InternClosureQuery | 202 | hir::db::InternClosureQuery |
203 | hir::db::AssociatedTyValueQuery | 203 | hir::db::AssociatedTyValueQuery |
204 | hir::db::TraitSolveQuery | 204 | hir::db::TraitSolveQueryQuery |
205 | 205 | ||
206 | // SymbolsDatabase | 206 | // SymbolsDatabase |
207 | crate::symbol_index::FileSymbolsQuery | 207 | crate::symbol_index::FileSymbolsQuery |
diff --git a/crates/ide_db/src/call_info.rs b/crates/ide_db/src/call_info.rs index e583a52f4..bad277a95 100644 --- a/crates/ide_db/src/call_info.rs +++ b/crates/ide_db/src/call_info.rs | |||
@@ -4,8 +4,9 @@ use either::Either; | |||
4 | use hir::{HasAttrs, HirDisplay, Semantics, Type}; | 4 | use hir::{HasAttrs, HirDisplay, Semantics, Type}; |
5 | use stdx::format_to; | 5 | use stdx::format_to; |
6 | use syntax::{ | 6 | use syntax::{ |
7 | algo, | ||
7 | ast::{self, ArgListOwner, NameOwner}, | 8 | ast::{self, ArgListOwner, NameOwner}, |
8 | match_ast, AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize, | 9 | match_ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextSize, |
9 | }; | 10 | }; |
10 | 11 | ||
11 | use crate::RootDatabase; | 12 | use crate::RootDatabase; |
@@ -43,7 +44,12 @@ pub fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> | |||
43 | let sema = Semantics::new(db); | 44 | let sema = Semantics::new(db); |
44 | let file = sema.parse(position.file_id); | 45 | let file = sema.parse(position.file_id); |
45 | let file = file.syntax(); | 46 | let file = file.syntax(); |
46 | let token = file.token_at_offset(position.offset).next()?; | 47 | let token = file |
48 | .token_at_offset(position.offset) | ||
49 | .left_biased() | ||
50 | // if the cursor is sandwiched between two space tokens and the call is unclosed | ||
51 | // this prevents us from leaving the CallExpression | ||
52 | .and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?; | ||
47 | let token = sema.descend_into_macros(token); | 53 | let token = sema.descend_into_macros(token); |
48 | 54 | ||
49 | let (callable, active_parameter) = call_info_impl(&sema, token)?; | 55 | let (callable, active_parameter) = call_info_impl(&sema, token)?; |
diff --git a/crates/ide_db/src/call_info/tests.rs b/crates/ide_db/src/call_info/tests.rs index 281a081a3..be1cc12de 100644 --- a/crates/ide_db/src/call_info/tests.rs +++ b/crates/ide_db/src/call_info/tests.rs | |||
@@ -522,3 +522,30 @@ fn main(f: fn(i32, f64) -> char) { | |||
522 | "#]], | 522 | "#]], |
523 | ) | 523 | ) |
524 | } | 524 | } |
525 | |||
526 | #[test] | ||
527 | fn call_info_for_unclosed_call() { | ||
528 | check( | ||
529 | r#" | ||
530 | fn foo(foo: u32, bar: u32) {} | ||
531 | fn main() { | ||
532 | foo($0 | ||
533 | }"#, | ||
534 | expect![[r#" | ||
535 | fn foo(foo: u32, bar: u32) | ||
536 | (<foo: u32>, bar: u32) | ||
537 | "#]], | ||
538 | ); | ||
539 | // check with surrounding space | ||
540 | check( | ||
541 | r#" | ||
542 | fn foo(foo: u32, bar: u32) {} | ||
543 | fn main() { | ||
544 | foo( $0 | ||
545 | }"#, | ||
546 | expect![[r#" | ||
547 | fn foo(foo: u32, bar: u32) | ||
548 | (<foo: u32>, bar: u32) | ||
549 | "#]], | ||
550 | ) | ||
551 | } | ||
diff --git a/crates/ide_db/src/helpers.rs b/crates/ide_db/src/helpers.rs index 66798ea3a..21b48237a 100644 --- a/crates/ide_db/src/helpers.rs +++ b/crates/ide_db/src/helpers.rs | |||
@@ -1,6 +1,8 @@ | |||
1 | //! A module with ide helpers for high-level ide features. | 1 | //! A module with ide helpers for high-level ide features. |
2 | pub mod insert_use; | ||
3 | pub mod import_assets; | 2 | pub mod import_assets; |
3 | pub mod insert_use; | ||
4 | pub mod merge_imports; | ||
5 | pub mod rust_doc; | ||
4 | 6 | ||
5 | use std::collections::VecDeque; | 7 | use std::collections::VecDeque; |
6 | 8 | ||
@@ -113,6 +115,10 @@ impl FamousDefs<'_, '_> { | |||
113 | self.find_module("core:iter") | 115 | self.find_module("core:iter") |
114 | } | 116 | } |
115 | 117 | ||
118 | pub fn core_ops_Deref(&self) -> Option<Trait> { | ||
119 | self.find_trait("core:ops:Deref") | ||
120 | } | ||
121 | |||
116 | fn find_trait(&self, path: &str) -> Option<Trait> { | 122 | fn find_trait(&self, path: &str) -> Option<Trait> { |
117 | match self.find_def(path)? { | 123 | match self.find_def(path)? { |
118 | hir::ScopeDef::ModuleDef(hir::ModuleDef::Trait(it)) => Some(it), | 124 | hir::ScopeDef::ModuleDef(hir::ModuleDef::Trait(it)) => Some(it), |
diff --git a/crates/ide_db/src/helpers/famous_defs_fixture.rs b/crates/ide_db/src/helpers/famous_defs_fixture.rs index 4d79e064e..29ae12dcf 100644 --- a/crates/ide_db/src/helpers/famous_defs_fixture.rs +++ b/crates/ide_db/src/helpers/famous_defs_fixture.rs | |||
@@ -112,6 +112,12 @@ pub mod ops { | |||
112 | type Output; | 112 | type Output; |
113 | extern "rust-call" fn call_once(self, args: Args) -> Self::Output; | 113 | extern "rust-call" fn call_once(self, args: Args) -> Self::Output; |
114 | } | 114 | } |
115 | |||
116 | #[lang = "deref"] | ||
117 | pub trait Deref { | ||
118 | type Target: ?Sized; | ||
119 | fn deref(&self) -> &Self::Target; | ||
120 | } | ||
115 | } | 121 | } |
116 | 122 | ||
117 | pub mod option { | 123 | pub mod option { |
@@ -141,3 +147,5 @@ mod return_keyword {} | |||
141 | 147 | ||
142 | /// Docs for prim_str | 148 | /// Docs for prim_str |
143 | mod prim_str {} | 149 | mod prim_str {} |
150 | |||
151 | pub use core::ops; \ No newline at end of file | ||
diff --git a/crates/ide_db/src/helpers/import_assets.rs b/crates/ide_db/src/helpers/import_assets.rs index 8ce648367..91d6a4665 100644 --- a/crates/ide_db/src/helpers/import_assets.rs +++ b/crates/ide_db/src/helpers/import_assets.rs | |||
@@ -436,6 +436,8 @@ fn trait_applicable_items( | |||
436 | }) | 436 | }) |
437 | .collect(); | 437 | .collect(); |
438 | 438 | ||
439 | let related_dyn_traits = | ||
440 | trait_candidate.receiver_ty.applicable_inherent_traits(db).collect::<FxHashSet<_>>(); | ||
439 | let mut located_imports = FxHashSet::default(); | 441 | let mut located_imports = FxHashSet::default(); |
440 | 442 | ||
441 | if trait_assoc_item { | 443 | if trait_assoc_item { |
@@ -451,12 +453,16 @@ fn trait_applicable_items( | |||
451 | return None; | 453 | return None; |
452 | } | 454 | } |
453 | } | 455 | } |
456 | let located_trait = assoc.containing_trait(db)?; | ||
457 | if related_dyn_traits.contains(&located_trait) { | ||
458 | return None; | ||
459 | } | ||
454 | 460 | ||
455 | let item = ItemInNs::from(ModuleDef::from(assoc.containing_trait(db)?)); | 461 | let trait_item = ItemInNs::from(ModuleDef::from(located_trait)); |
456 | let original_item = assoc_to_item(assoc); | 462 | let original_item = assoc_to_item(assoc); |
457 | located_imports.insert(LocatedImport::new( | 463 | located_imports.insert(LocatedImport::new( |
458 | mod_path(item)?, | 464 | mod_path(trait_item)?, |
459 | item, | 465 | trait_item, |
460 | original_item, | 466 | original_item, |
461 | mod_path(original_item), | 467 | mod_path(original_item), |
462 | )); | 468 | )); |
@@ -473,11 +479,15 @@ fn trait_applicable_items( | |||
473 | |_, function| { | 479 | |_, function| { |
474 | let assoc = function.as_assoc_item(db)?; | 480 | let assoc = function.as_assoc_item(db)?; |
475 | if required_assoc_items.contains(&assoc) { | 481 | if required_assoc_items.contains(&assoc) { |
476 | let item = ItemInNs::from(ModuleDef::from(assoc.containing_trait(db)?)); | 482 | let located_trait = assoc.containing_trait(db)?; |
483 | if related_dyn_traits.contains(&located_trait) { | ||
484 | return None; | ||
485 | } | ||
486 | let trait_item = ItemInNs::from(ModuleDef::from(located_trait)); | ||
477 | let original_item = assoc_to_item(assoc); | 487 | let original_item = assoc_to_item(assoc); |
478 | located_imports.insert(LocatedImport::new( | 488 | located_imports.insert(LocatedImport::new( |
479 | mod_path(item)?, | 489 | mod_path(trait_item)?, |
480 | item, | 490 | trait_item, |
481 | original_item, | 491 | original_item, |
482 | mod_path(original_item), | 492 | mod_path(original_item), |
483 | )); | 493 | )); |
diff --git a/crates/ide_db/src/helpers/insert_use.rs b/crates/ide_db/src/helpers/insert_use.rs index be3a22725..55cdc4da3 100644 --- a/crates/ide_db/src/helpers/insert_use.rs +++ b/crates/ide_db/src/helpers/insert_use.rs | |||
@@ -1,19 +1,17 @@ | |||
1 | //! Handle syntactic aspects of inserting a new `use`. | 1 | //! Handle syntactic aspects of inserting a new `use`. |
2 | use std::{cmp::Ordering, iter::successors}; | 2 | use std::cmp::Ordering; |
3 | 3 | ||
4 | use hir::Semantics; | 4 | use hir::Semantics; |
5 | use itertools::{EitherOrBoth, Itertools}; | ||
6 | use syntax::{ | 5 | use syntax::{ |
7 | algo::SyntaxRewriter, | 6 | algo, |
8 | ast::{ | 7 | ast::{self, make, AstNode, PathSegmentKind}, |
9 | self, | 8 | ted, AstToken, Direction, NodeOrToken, SyntaxNode, SyntaxToken, |
10 | edit::{AstNodeEdit, IndentLevel}, | ||
11 | make, AstNode, AttrsOwner, PathSegmentKind, VisibilityOwner, | ||
12 | }, | ||
13 | AstToken, InsertPosition, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken, | ||
14 | }; | 9 | }; |
15 | 10 | ||
16 | use crate::RootDatabase; | 11 | use crate::{ |
12 | helpers::merge_imports::{try_merge_imports, use_tree_path_cmp, MergeBehavior}, | ||
13 | RootDatabase, | ||
14 | }; | ||
17 | 15 | ||
18 | pub use hir::PrefixKind; | 16 | pub use hir::PrefixKind; |
19 | 17 | ||
@@ -42,13 +40,18 @@ impl ImportScope { | |||
42 | } | 40 | } |
43 | 41 | ||
44 | /// Determines the containing syntax node in which to insert a `use` statement affecting `position`. | 42 | /// Determines the containing syntax node in which to insert a `use` statement affecting `position`. |
45 | pub fn find_insert_use_container( | 43 | pub fn find_insert_use_container_with_macros( |
46 | position: &SyntaxNode, | 44 | position: &SyntaxNode, |
47 | sema: &Semantics<'_, RootDatabase>, | 45 | sema: &Semantics<'_, RootDatabase>, |
48 | ) -> Option<Self> { | 46 | ) -> Option<Self> { |
49 | sema.ancestors_with_macros(position.clone()).find_map(Self::from) | 47 | sema.ancestors_with_macros(position.clone()).find_map(Self::from) |
50 | } | 48 | } |
51 | 49 | ||
50 | /// Determines the containing syntax node in which to insert a `use` statement affecting `position`. | ||
51 | pub fn find_insert_use_container(position: &SyntaxNode) -> Option<Self> { | ||
52 | std::iter::successors(Some(position.clone()), SyntaxNode::parent).find_map(Self::from) | ||
53 | } | ||
54 | |||
52 | pub fn as_syntax_node(&self) -> &SyntaxNode { | 55 | pub fn as_syntax_node(&self) -> &SyntaxNode { |
53 | match self { | 56 | match self { |
54 | ImportScope::File(file) => file.syntax(), | 57 | ImportScope::File(file) => file.syntax(), |
@@ -56,434 +59,32 @@ impl ImportScope { | |||
56 | } | 59 | } |
57 | } | 60 | } |
58 | 61 | ||
59 | fn indent_level(&self) -> IndentLevel { | 62 | pub fn clone_for_update(&self) -> Self { |
60 | match self { | 63 | match self { |
61 | ImportScope::File(file) => file.indent_level(), | 64 | ImportScope::File(file) => ImportScope::File(file.clone_for_update()), |
62 | ImportScope::Module(item_list) => item_list.indent_level() + 1, | 65 | ImportScope::Module(item_list) => ImportScope::Module(item_list.clone_for_update()), |
63 | } | 66 | } |
64 | } | 67 | } |
65 | |||
66 | fn first_insert_pos(&self) -> (InsertPosition<SyntaxElement>, AddBlankLine) { | ||
67 | match self { | ||
68 | ImportScope::File(_) => (InsertPosition::First, AddBlankLine::AfterTwice), | ||
69 | // don't insert the imports before the item list's opening curly brace | ||
70 | ImportScope::Module(item_list) => item_list | ||
71 | .l_curly_token() | ||
72 | .map(|b| (InsertPosition::After(b.into()), AddBlankLine::Around)) | ||
73 | .unwrap_or((InsertPosition::First, AddBlankLine::AfterTwice)), | ||
74 | } | ||
75 | } | ||
76 | |||
77 | fn insert_pos_after_last_inner_element(&self) -> (InsertPosition<SyntaxElement>, AddBlankLine) { | ||
78 | self.as_syntax_node() | ||
79 | .children_with_tokens() | ||
80 | .filter(|child| match child { | ||
81 | NodeOrToken::Node(node) => is_inner_attribute(node.clone()), | ||
82 | NodeOrToken::Token(token) => is_inner_comment(token.clone()), | ||
83 | }) | ||
84 | .last() | ||
85 | .map(|last_inner_element| { | ||
86 | (InsertPosition::After(last_inner_element), AddBlankLine::BeforeTwice) | ||
87 | }) | ||
88 | .unwrap_or_else(|| self.first_insert_pos()) | ||
89 | } | ||
90 | } | ||
91 | |||
92 | fn is_inner_attribute(node: SyntaxNode) -> bool { | ||
93 | ast::Attr::cast(node).map(|attr| attr.kind()) == Some(ast::AttrKind::Inner) | ||
94 | } | ||
95 | |||
96 | fn is_inner_comment(token: SyntaxToken) -> bool { | ||
97 | ast::Comment::cast(token).and_then(|comment| comment.kind().doc) | ||
98 | == Some(ast::CommentPlacement::Inner) | ||
99 | } | 68 | } |
100 | 69 | ||
101 | /// Insert an import path into the given file/node. A `merge` value of none indicates that no import merging is allowed to occur. | 70 | /// Insert an import path into the given file/node. A `merge` value of none indicates that no import merging is allowed to occur. |
102 | pub fn insert_use<'a>( | 71 | pub fn insert_use<'a>(scope: &ImportScope, path: ast::Path, cfg: InsertUseConfig) { |
103 | scope: &ImportScope, | ||
104 | path: ast::Path, | ||
105 | cfg: InsertUseConfig, | ||
106 | ) -> SyntaxRewriter<'a> { | ||
107 | let _p = profile::span("insert_use"); | 72 | let _p = profile::span("insert_use"); |
108 | let mut rewriter = SyntaxRewriter::default(); | 73 | let use_item = |
109 | let use_item = make::use_(None, make::use_tree(path.clone(), None, None, false)); | 74 | make::use_(None, make::use_tree(path.clone(), None, None, false)).clone_for_update(); |
110 | // merge into existing imports if possible | 75 | // merge into existing imports if possible |
111 | if let Some(mb) = cfg.merge { | 76 | if let Some(mb) = cfg.merge { |
112 | for existing_use in scope.as_syntax_node().children().filter_map(ast::Use::cast) { | 77 | for existing_use in scope.as_syntax_node().children().filter_map(ast::Use::cast) { |
113 | if let Some(merged) = try_merge_imports(&existing_use, &use_item, mb) { | 78 | if let Some(merged) = try_merge_imports(&existing_use, &use_item, mb) { |
114 | rewriter.replace(existing_use.syntax(), merged.syntax()); | 79 | ted::replace(existing_use.syntax(), merged.syntax()); |
115 | return rewriter; | 80 | return; |
116 | } | 81 | } |
117 | } | 82 | } |
118 | } | 83 | } |
119 | 84 | ||
120 | // either we weren't allowed to merge or there is no import that fits the merge conditions | 85 | // either we weren't allowed to merge or there is no import that fits the merge conditions |
121 | // so look for the place we have to insert to | 86 | // so look for the place we have to insert to |
122 | let (insert_position, add_blank) = find_insert_position(scope, path, cfg.group); | 87 | insert_use_(scope, path, cfg.group, use_item); |
123 | |||
124 | let indent = if let ident_level @ 1..=usize::MAX = scope.indent_level().0 as usize { | ||
125 | Some(make::tokens::whitespace(&" ".repeat(4 * ident_level)).into()) | ||
126 | } else { | ||
127 | None | ||
128 | }; | ||
129 | |||
130 | let to_insert: Vec<SyntaxElement> = { | ||
131 | let mut buf = Vec::new(); | ||
132 | |||
133 | match add_blank { | ||
134 | AddBlankLine::Before | AddBlankLine::Around => { | ||
135 | buf.push(make::tokens::single_newline().into()) | ||
136 | } | ||
137 | AddBlankLine::BeforeTwice => buf.push(make::tokens::blank_line().into()), | ||
138 | _ => (), | ||
139 | } | ||
140 | |||
141 | if add_blank.has_before() { | ||
142 | if let Some(indent) = indent.clone() { | ||
143 | cov_mark::hit!(insert_use_indent_before); | ||
144 | buf.push(indent); | ||
145 | } | ||
146 | } | ||
147 | |||
148 | buf.push(use_item.syntax().clone().into()); | ||
149 | |||
150 | match add_blank { | ||
151 | AddBlankLine::After | AddBlankLine::Around => { | ||
152 | buf.push(make::tokens::single_newline().into()) | ||
153 | } | ||
154 | AddBlankLine::AfterTwice => buf.push(make::tokens::blank_line().into()), | ||
155 | _ => (), | ||
156 | } | ||
157 | |||
158 | // only add indentation *after* our stuff if there's another node directly after it | ||
159 | if add_blank.has_after() && matches!(insert_position, InsertPosition::Before(_)) { | ||
160 | if let Some(indent) = indent { | ||
161 | cov_mark::hit!(insert_use_indent_after); | ||
162 | buf.push(indent); | ||
163 | } | ||
164 | } else if add_blank.has_after() && matches!(insert_position, InsertPosition::After(_)) { | ||
165 | cov_mark::hit!(insert_use_no_indent_after); | ||
166 | } | ||
167 | |||
168 | buf | ||
169 | }; | ||
170 | |||
171 | match insert_position { | ||
172 | InsertPosition::First => { | ||
173 | rewriter.insert_many_as_first_children(scope.as_syntax_node(), to_insert) | ||
174 | } | ||
175 | InsertPosition::Last => return rewriter, // actually unreachable | ||
176 | InsertPosition::Before(anchor) => rewriter.insert_many_before(&anchor, to_insert), | ||
177 | InsertPosition::After(anchor) => rewriter.insert_many_after(&anchor, to_insert), | ||
178 | } | ||
179 | rewriter | ||
180 | } | ||
181 | |||
182 | fn eq_visibility(vis0: Option<ast::Visibility>, vis1: Option<ast::Visibility>) -> bool { | ||
183 | match (vis0, vis1) { | ||
184 | (None, None) => true, | ||
185 | // FIXME: Don't use the string representation to check for equality | ||
186 | // spaces inside of the node would break this comparison | ||
187 | (Some(vis0), Some(vis1)) => vis0.to_string() == vis1.to_string(), | ||
188 | _ => false, | ||
189 | } | ||
190 | } | ||
191 | |||
192 | fn eq_attrs( | ||
193 | attrs0: impl Iterator<Item = ast::Attr>, | ||
194 | attrs1: impl Iterator<Item = ast::Attr>, | ||
195 | ) -> bool { | ||
196 | let attrs0 = attrs0.map(|attr| attr.to_string()); | ||
197 | let attrs1 = attrs1.map(|attr| attr.to_string()); | ||
198 | attrs0.eq(attrs1) | ||
199 | } | ||
200 | |||
201 | pub fn try_merge_imports( | ||
202 | lhs: &ast::Use, | ||
203 | rhs: &ast::Use, | ||
204 | merge_behavior: MergeBehavior, | ||
205 | ) -> Option<ast::Use> { | ||
206 | // don't merge imports with different visibilities | ||
207 | if !eq_visibility(lhs.visibility(), rhs.visibility()) { | ||
208 | return None; | ||
209 | } | ||
210 | if !eq_attrs(lhs.attrs(), rhs.attrs()) { | ||
211 | return None; | ||
212 | } | ||
213 | |||
214 | let lhs_tree = lhs.use_tree()?; | ||
215 | let rhs_tree = rhs.use_tree()?; | ||
216 | let merged = try_merge_trees(&lhs_tree, &rhs_tree, merge_behavior)?; | ||
217 | Some(lhs.with_use_tree(merged).clone_for_update()) | ||
218 | } | ||
219 | |||
220 | pub fn try_merge_trees( | ||
221 | lhs: &ast::UseTree, | ||
222 | rhs: &ast::UseTree, | ||
223 | merge: MergeBehavior, | ||
224 | ) -> Option<ast::UseTree> { | ||
225 | let lhs_path = lhs.path()?; | ||
226 | let rhs_path = rhs.path()?; | ||
227 | |||
228 | let (lhs_prefix, rhs_prefix) = common_prefix(&lhs_path, &rhs_path)?; | ||
229 | let (lhs, rhs) = if is_simple_path(lhs) | ||
230 | && is_simple_path(rhs) | ||
231 | && lhs_path == lhs_prefix | ||
232 | && rhs_path == rhs_prefix | ||
233 | { | ||
234 | (lhs.clone(), rhs.clone()) | ||
235 | } else { | ||
236 | (lhs.split_prefix(&lhs_prefix), rhs.split_prefix(&rhs_prefix)) | ||
237 | }; | ||
238 | recursive_merge(&lhs, &rhs, merge).map(|it| it.clone_for_update()) | ||
239 | } | ||
240 | |||
241 | /// Recursively "zips" together lhs and rhs. | ||
242 | fn recursive_merge( | ||
243 | lhs: &ast::UseTree, | ||
244 | rhs: &ast::UseTree, | ||
245 | merge: MergeBehavior, | ||
246 | ) -> Option<ast::UseTree> { | ||
247 | let mut use_trees = lhs | ||
248 | .use_tree_list() | ||
249 | .into_iter() | ||
250 | .flat_map(|list| list.use_trees()) | ||
251 | // we use Option here to early return from this function(this is not the same as a `filter` op) | ||
252 | .map(|tree| match merge.is_tree_allowed(&tree) { | ||
253 | true => Some(tree), | ||
254 | false => None, | ||
255 | }) | ||
256 | .collect::<Option<Vec<_>>>()?; | ||
257 | use_trees.sort_unstable_by(|a, b| path_cmp_for_sort(a.path(), b.path())); | ||
258 | for rhs_t in rhs.use_tree_list().into_iter().flat_map(|list| list.use_trees()) { | ||
259 | if !merge.is_tree_allowed(&rhs_t) { | ||
260 | return None; | ||
261 | } | ||
262 | let rhs_path = rhs_t.path(); | ||
263 | match use_trees.binary_search_by(|lhs_t| { | ||
264 | let (lhs_t, rhs_t) = match lhs_t | ||
265 | .path() | ||
266 | .zip(rhs_path.clone()) | ||
267 | .and_then(|(lhs, rhs)| common_prefix(&lhs, &rhs)) | ||
268 | { | ||
269 | Some((lhs_p, rhs_p)) => (lhs_t.split_prefix(&lhs_p), rhs_t.split_prefix(&rhs_p)), | ||
270 | None => (lhs_t.clone(), rhs_t.clone()), | ||
271 | }; | ||
272 | |||
273 | path_cmp_bin_search(lhs_t.path(), rhs_t.path()) | ||
274 | }) { | ||
275 | Ok(idx) => { | ||
276 | let lhs_t = &mut use_trees[idx]; | ||
277 | let lhs_path = lhs_t.path()?; | ||
278 | let rhs_path = rhs_path?; | ||
279 | let (lhs_prefix, rhs_prefix) = common_prefix(&lhs_path, &rhs_path)?; | ||
280 | if lhs_prefix == lhs_path && rhs_prefix == rhs_path { | ||
281 | let tree_is_self = |tree: ast::UseTree| { | ||
282 | tree.path().as_ref().map(path_is_self).unwrap_or(false) | ||
283 | }; | ||
284 | // check if only one of the two trees has a tree list, and whether that then contains `self` or not. | ||
285 | // If this is the case we can skip this iteration since the path without the list is already included in the other one via `self` | ||
286 | let tree_contains_self = |tree: &ast::UseTree| { | ||
287 | tree.use_tree_list() | ||
288 | .map(|tree_list| tree_list.use_trees().any(tree_is_self)) | ||
289 | .unwrap_or(false) | ||
290 | }; | ||
291 | match (tree_contains_self(&lhs_t), tree_contains_self(&rhs_t)) { | ||
292 | (true, false) => continue, | ||
293 | (false, true) => { | ||
294 | *lhs_t = rhs_t; | ||
295 | continue; | ||
296 | } | ||
297 | _ => (), | ||
298 | } | ||
299 | |||
300 | // glob imports arent part of the use-tree lists so we need to special handle them here as well | ||
301 | // this special handling is only required for when we merge a module import into a glob import of said module | ||
302 | // see the `merge_self_glob` or `merge_mod_into_glob` tests | ||
303 | if lhs_t.star_token().is_some() || rhs_t.star_token().is_some() { | ||
304 | *lhs_t = make::use_tree( | ||
305 | make::path_unqualified(make::path_segment_self()), | ||
306 | None, | ||
307 | None, | ||
308 | false, | ||
309 | ); | ||
310 | use_trees.insert(idx, make::glob_use_tree()); | ||
311 | continue; | ||
312 | } | ||
313 | |||
314 | if lhs_t.use_tree_list().is_none() && rhs_t.use_tree_list().is_none() { | ||
315 | continue; | ||
316 | } | ||
317 | } | ||
318 | let lhs = lhs_t.split_prefix(&lhs_prefix); | ||
319 | let rhs = rhs_t.split_prefix(&rhs_prefix); | ||
320 | match recursive_merge(&lhs, &rhs, merge) { | ||
321 | Some(use_tree) => use_trees[idx] = use_tree, | ||
322 | None => return None, | ||
323 | } | ||
324 | } | ||
325 | Err(_) | ||
326 | if merge == MergeBehavior::Last | ||
327 | && use_trees.len() > 0 | ||
328 | && rhs_t.use_tree_list().is_some() => | ||
329 | { | ||
330 | return None | ||
331 | } | ||
332 | Err(idx) => { | ||
333 | use_trees.insert(idx, rhs_t); | ||
334 | } | ||
335 | } | ||
336 | } | ||
337 | Some(lhs.with_use_tree_list(make::use_tree_list(use_trees))) | ||
338 | } | ||
339 | |||
340 | /// Traverses both paths until they differ, returning the common prefix of both. | ||
341 | fn common_prefix(lhs: &ast::Path, rhs: &ast::Path) -> Option<(ast::Path, ast::Path)> { | ||
342 | let mut res = None; | ||
343 | let mut lhs_curr = first_path(&lhs); | ||
344 | let mut rhs_curr = first_path(&rhs); | ||
345 | loop { | ||
346 | match (lhs_curr.segment(), rhs_curr.segment()) { | ||
347 | (Some(lhs), Some(rhs)) if lhs.syntax().text() == rhs.syntax().text() => (), | ||
348 | _ => break res, | ||
349 | } | ||
350 | res = Some((lhs_curr.clone(), rhs_curr.clone())); | ||
351 | |||
352 | match lhs_curr.parent_path().zip(rhs_curr.parent_path()) { | ||
353 | Some((lhs, rhs)) => { | ||
354 | lhs_curr = lhs; | ||
355 | rhs_curr = rhs; | ||
356 | } | ||
357 | _ => break res, | ||
358 | } | ||
359 | } | ||
360 | } | ||
361 | |||
362 | fn is_simple_path(use_tree: &ast::UseTree) -> bool { | ||
363 | use_tree.use_tree_list().is_none() && use_tree.star_token().is_none() | ||
364 | } | ||
365 | |||
366 | fn path_is_self(path: &ast::Path) -> bool { | ||
367 | path.segment().and_then(|seg| seg.self_token()).is_some() && path.qualifier().is_none() | ||
368 | } | ||
369 | |||
370 | #[inline] | ||
371 | fn first_segment(path: &ast::Path) -> Option<ast::PathSegment> { | ||
372 | first_path(path).segment() | ||
373 | } | ||
374 | |||
375 | fn first_path(path: &ast::Path) -> ast::Path { | ||
376 | successors(Some(path.clone()), ast::Path::qualifier).last().unwrap() | ||
377 | } | ||
378 | |||
379 | fn segment_iter(path: &ast::Path) -> impl Iterator<Item = ast::PathSegment> + Clone { | ||
380 | // cant make use of SyntaxNode::siblings, because the returned Iterator is not clone | ||
381 | successors(first_segment(path), |p| p.parent_path().parent_path().and_then(|p| p.segment())) | ||
382 | } | ||
383 | |||
384 | fn path_len(path: ast::Path) -> usize { | ||
385 | segment_iter(&path).count() | ||
386 | } | ||
387 | |||
388 | /// Orders paths in the following way: | ||
389 | /// the sole self token comes first, after that come uppercase identifiers, then lowercase identifiers | ||
390 | // FIXME: rustfmt sorts lowercase idents before uppercase, in general we want to have the same ordering rustfmt has | ||
391 | // which is `self` and `super` first, then identifier imports with lowercase ones first, then glob imports and at last list imports. | ||
392 | // Example foo::{self, foo, baz, Baz, Qux, *, {Bar}} | ||
393 | fn path_cmp_for_sort(a: Option<ast::Path>, b: Option<ast::Path>) -> Ordering { | ||
394 | match (a, b) { | ||
395 | (None, None) => Ordering::Equal, | ||
396 | (None, Some(_)) => Ordering::Less, | ||
397 | (Some(_), None) => Ordering::Greater, | ||
398 | (Some(ref a), Some(ref b)) => match (path_is_self(a), path_is_self(b)) { | ||
399 | (true, true) => Ordering::Equal, | ||
400 | (true, false) => Ordering::Less, | ||
401 | (false, true) => Ordering::Greater, | ||
402 | (false, false) => path_cmp_short(a, b), | ||
403 | }, | ||
404 | } | ||
405 | } | ||
406 | |||
407 | /// Path comparison func for binary searching for merging. | ||
408 | fn path_cmp_bin_search(lhs: Option<ast::Path>, rhs: Option<ast::Path>) -> Ordering { | ||
409 | match (lhs.as_ref().and_then(first_segment), rhs.as_ref().and_then(first_segment)) { | ||
410 | (None, None) => Ordering::Equal, | ||
411 | (None, Some(_)) => Ordering::Less, | ||
412 | (Some(_), None) => Ordering::Greater, | ||
413 | (Some(ref a), Some(ref b)) => path_segment_cmp(a, b), | ||
414 | } | ||
415 | } | ||
416 | |||
417 | /// Short circuiting comparison, if both paths are equal until one of them ends they are considered | ||
418 | /// equal | ||
419 | fn path_cmp_short(a: &ast::Path, b: &ast::Path) -> Ordering { | ||
420 | let a = segment_iter(a); | ||
421 | let b = segment_iter(b); | ||
422 | // cmp_by would be useful for us here but that is currently unstable | ||
423 | // cmp doesnt work due the lifetimes on text's return type | ||
424 | a.zip(b) | ||
425 | .find_map(|(a, b)| match path_segment_cmp(&a, &b) { | ||
426 | Ordering::Equal => None, | ||
427 | ord => Some(ord), | ||
428 | }) | ||
429 | .unwrap_or(Ordering::Equal) | ||
430 | } | ||
431 | |||
432 | /// Compares to paths, if one ends earlier than the other the has_tl parameters decide which is | ||
433 | /// greater as a a path that has a tree list should be greater, while one that just ends without | ||
434 | /// a tree list should be considered less. | ||
435 | fn use_tree_path_cmp(a: &ast::Path, a_has_tl: bool, b: &ast::Path, b_has_tl: bool) -> Ordering { | ||
436 | let a_segments = segment_iter(a); | ||
437 | let b_segments = segment_iter(b); | ||
438 | // cmp_by would be useful for us here but that is currently unstable | ||
439 | // cmp doesnt work due the lifetimes on text's return type | ||
440 | a_segments | ||
441 | .zip_longest(b_segments) | ||
442 | .find_map(|zipped| match zipped { | ||
443 | EitherOrBoth::Both(ref a, ref b) => match path_segment_cmp(a, b) { | ||
444 | Ordering::Equal => None, | ||
445 | ord => Some(ord), | ||
446 | }, | ||
447 | EitherOrBoth::Left(_) if !b_has_tl => Some(Ordering::Greater), | ||
448 | EitherOrBoth::Left(_) => Some(Ordering::Less), | ||
449 | EitherOrBoth::Right(_) if !a_has_tl => Some(Ordering::Less), | ||
450 | EitherOrBoth::Right(_) => Some(Ordering::Greater), | ||
451 | }) | ||
452 | .unwrap_or(Ordering::Equal) | ||
453 | } | ||
454 | |||
455 | fn path_segment_cmp(a: &ast::PathSegment, b: &ast::PathSegment) -> Ordering { | ||
456 | let a = a.kind().and_then(|kind| match kind { | ||
457 | PathSegmentKind::Name(name_ref) => Some(name_ref), | ||
458 | _ => None, | ||
459 | }); | ||
460 | let b = b.kind().and_then(|kind| match kind { | ||
461 | PathSegmentKind::Name(name_ref) => Some(name_ref), | ||
462 | _ => None, | ||
463 | }); | ||
464 | a.as_ref().map(ast::NameRef::text).cmp(&b.as_ref().map(ast::NameRef::text)) | ||
465 | } | ||
466 | |||
467 | /// What type of merges are allowed. | ||
468 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] | ||
469 | pub enum MergeBehavior { | ||
470 | /// Merge everything together creating deeply nested imports. | ||
471 | Full, | ||
472 | /// Only merge the last import level, doesn't allow import nesting. | ||
473 | Last, | ||
474 | } | ||
475 | |||
476 | impl MergeBehavior { | ||
477 | #[inline] | ||
478 | fn is_tree_allowed(&self, tree: &ast::UseTree) -> bool { | ||
479 | match self { | ||
480 | MergeBehavior::Full => true, | ||
481 | // only simple single segment paths are allowed | ||
482 | MergeBehavior::Last => { | ||
483 | tree.use_tree_list().is_none() && tree.path().map(path_len) <= Some(1) | ||
484 | } | ||
485 | } | ||
486 | } | ||
487 | } | 88 | } |
488 | 89 | ||
489 | #[derive(Eq, PartialEq, PartialOrd, Ord)] | 90 | #[derive(Eq, PartialEq, PartialOrd, Ord)] |
@@ -500,7 +101,7 @@ impl ImportGroup { | |||
500 | fn new(path: &ast::Path) -> ImportGroup { | 101 | fn new(path: &ast::Path) -> ImportGroup { |
501 | let default = ImportGroup::ExternCrate; | 102 | let default = ImportGroup::ExternCrate; |
502 | 103 | ||
503 | let first_segment = match first_segment(path) { | 104 | let first_segment = match path.first_segment() { |
504 | Some(it) => it, | 105 | Some(it) => it, |
505 | None => return default, | 106 | None => return default, |
506 | }; | 107 | }; |
@@ -520,32 +121,15 @@ impl ImportGroup { | |||
520 | } | 121 | } |
521 | } | 122 | } |
522 | 123 | ||
523 | #[derive(PartialEq, Eq)] | 124 | fn insert_use_( |
524 | enum AddBlankLine { | ||
525 | Before, | ||
526 | BeforeTwice, | ||
527 | Around, | ||
528 | After, | ||
529 | AfterTwice, | ||
530 | } | ||
531 | |||
532 | impl AddBlankLine { | ||
533 | fn has_before(&self) -> bool { | ||
534 | matches!(self, AddBlankLine::Before | AddBlankLine::BeforeTwice | AddBlankLine::Around) | ||
535 | } | ||
536 | fn has_after(&self) -> bool { | ||
537 | matches!(self, AddBlankLine::After | AddBlankLine::AfterTwice | AddBlankLine::Around) | ||
538 | } | ||
539 | } | ||
540 | |||
541 | fn find_insert_position( | ||
542 | scope: &ImportScope, | 125 | scope: &ImportScope, |
543 | insert_path: ast::Path, | 126 | insert_path: ast::Path, |
544 | group_imports: bool, | 127 | group_imports: bool, |
545 | ) -> (InsertPosition<SyntaxElement>, AddBlankLine) { | 128 | use_item: ast::Use, |
129 | ) { | ||
130 | let scope_syntax = scope.as_syntax_node(); | ||
546 | let group = ImportGroup::new(&insert_path); | 131 | let group = ImportGroup::new(&insert_path); |
547 | let path_node_iter = scope | 132 | let path_node_iter = scope_syntax |
548 | .as_syntax_node() | ||
549 | .children() | 133 | .children() |
550 | .filter_map(|node| ast::Use::cast(node.clone()).zip(Some(node))) | 134 | .filter_map(|node| ast::Use::cast(node.clone()).zip(Some(node))) |
551 | .flat_map(|(use_, node)| { | 135 | .flat_map(|(use_, node)| { |
@@ -557,9 +141,14 @@ fn find_insert_position( | |||
557 | 141 | ||
558 | if !group_imports { | 142 | if !group_imports { |
559 | if let Some((_, _, node)) = path_node_iter.last() { | 143 | if let Some((_, _, node)) = path_node_iter.last() { |
560 | return (InsertPosition::After(node.into()), AddBlankLine::Before); | 144 | cov_mark::hit!(insert_no_grouping_last); |
145 | ted::insert(ted::Position::after(node), use_item.syntax()); | ||
146 | } else { | ||
147 | cov_mark::hit!(insert_no_grouping_last2); | ||
148 | ted::insert(ted::Position::first_child_of(scope_syntax), make::tokens::blank_line()); | ||
149 | ted::insert(ted::Position::first_child_of(scope_syntax), use_item.syntax()); | ||
561 | } | 150 | } |
562 | return (InsertPosition::First, AddBlankLine::AfterTwice); | 151 | return; |
563 | } | 152 | } |
564 | 153 | ||
565 | // Iterator that discards anything thats not in the required grouping | 154 | // Iterator that discards anything thats not in the required grouping |
@@ -572,43 +161,91 @@ fn find_insert_position( | |||
572 | // track the last element we iterated over, if this is still None after the iteration then that means we never iterated in the first place | 161 | // track the last element we iterated over, if this is still None after the iteration then that means we never iterated in the first place |
573 | let mut last = None; | 162 | let mut last = None; |
574 | // find the element that would come directly after our new import | 163 | // find the element that would come directly after our new import |
575 | let post_insert = group_iter.inspect(|(.., node)| last = Some(node.clone())).find( | 164 | let post_insert: Option<(_, _, SyntaxNode)> = group_iter |
576 | |&(ref path, has_tl, _)| { | 165 | .inspect(|(.., node)| last = Some(node.clone())) |
166 | .find(|&(ref path, has_tl, _)| { | ||
577 | use_tree_path_cmp(&insert_path, false, path, has_tl) != Ordering::Greater | 167 | use_tree_path_cmp(&insert_path, false, path, has_tl) != Ordering::Greater |
578 | }, | 168 | }); |
579 | ); | ||
580 | 169 | ||
581 | match post_insert { | 170 | if let Some((.., node)) = post_insert { |
171 | cov_mark::hit!(insert_group); | ||
582 | // insert our import before that element | 172 | // insert our import before that element |
583 | Some((.., node)) => (InsertPosition::Before(node.into()), AddBlankLine::After), | 173 | return ted::insert(ted::Position::before(node), use_item.syntax()); |
174 | } | ||
175 | if let Some(node) = last { | ||
176 | cov_mark::hit!(insert_group_last); | ||
584 | // there is no element after our new import, so append it to the end of the group | 177 | // there is no element after our new import, so append it to the end of the group |
585 | None => match last { | 178 | return ted::insert(ted::Position::after(node), use_item.syntax()); |
586 | Some(node) => (InsertPosition::After(node.into()), AddBlankLine::Before), | 179 | } |
587 | // the group we were looking for actually doesnt exist, so insert | 180 | |
181 | // the group we were looking for actually doesn't exist, so insert | ||
182 | |||
183 | let mut last = None; | ||
184 | // find the group that comes after where we want to insert | ||
185 | let post_group = path_node_iter | ||
186 | .inspect(|(.., node)| last = Some(node.clone())) | ||
187 | .find(|(p, ..)| ImportGroup::new(p) > group); | ||
188 | if let Some((.., node)) = post_group { | ||
189 | cov_mark::hit!(insert_group_new_group); | ||
190 | ted::insert(ted::Position::before(&node), use_item.syntax()); | ||
191 | if let Some(node) = algo::non_trivia_sibling(node.into(), Direction::Prev) { | ||
192 | ted::insert(ted::Position::after(node), make::tokens::single_newline()); | ||
193 | } | ||
194 | return; | ||
195 | } | ||
196 | // there is no such group, so append after the last one | ||
197 | if let Some(node) = last { | ||
198 | cov_mark::hit!(insert_group_no_group); | ||
199 | ted::insert(ted::Position::after(&node), use_item.syntax()); | ||
200 | ted::insert(ted::Position::after(node), make::tokens::single_newline()); | ||
201 | return; | ||
202 | } | ||
203 | // there are no imports in this file at all | ||
204 | if let Some(last_inner_element) = scope_syntax | ||
205 | .children_with_tokens() | ||
206 | .filter(|child| match child { | ||
207 | NodeOrToken::Node(node) => is_inner_attribute(node.clone()), | ||
208 | NodeOrToken::Token(token) => is_inner_comment(token.clone()), | ||
209 | }) | ||
210 | .last() | ||
211 | { | ||
212 | cov_mark::hit!(insert_group_empty_inner_attr); | ||
213 | ted::insert(ted::Position::after(&last_inner_element), use_item.syntax()); | ||
214 | ted::insert(ted::Position::after(last_inner_element), make::tokens::single_newline()); | ||
215 | return; | ||
216 | } | ||
217 | match scope { | ||
218 | ImportScope::File(_) => { | ||
219 | cov_mark::hit!(insert_group_empty_file); | ||
220 | ted::insert(ted::Position::first_child_of(scope_syntax), make::tokens::blank_line()); | ||
221 | ted::insert(ted::Position::first_child_of(scope_syntax), use_item.syntax()) | ||
222 | } | ||
223 | // don't insert the imports before the item list's opening curly brace | ||
224 | ImportScope::Module(item_list) => match item_list.l_curly_token() { | ||
225 | Some(b) => { | ||
226 | cov_mark::hit!(insert_group_empty_module); | ||
227 | ted::insert(ted::Position::after(&b), make::tokens::single_newline()); | ||
228 | ted::insert(ted::Position::after(&b), use_item.syntax()); | ||
229 | } | ||
588 | None => { | 230 | None => { |
589 | // similar concept here to the `last` from above | 231 | // This should never happens, broken module syntax node |
590 | let mut last = None; | 232 | ted::insert( |
591 | // find the group that comes after where we want to insert | 233 | ted::Position::first_child_of(scope_syntax), |
592 | let post_group = path_node_iter | 234 | make::tokens::blank_line(), |
593 | .inspect(|(.., node)| last = Some(node.clone())) | 235 | ); |
594 | .find(|(p, ..)| ImportGroup::new(p) > group); | 236 | ted::insert(ted::Position::first_child_of(scope_syntax), use_item.syntax()); |
595 | match post_group { | ||
596 | Some((.., node)) => { | ||
597 | (InsertPosition::Before(node.into()), AddBlankLine::AfterTwice) | ||
598 | } | ||
599 | // there is no such group, so append after the last one | ||
600 | None => match last { | ||
601 | Some(node) => { | ||
602 | (InsertPosition::After(node.into()), AddBlankLine::BeforeTwice) | ||
603 | } | ||
604 | // there are no imports in this file at all | ||
605 | None => scope.insert_pos_after_last_inner_element(), | ||
606 | }, | ||
607 | } | ||
608 | } | 237 | } |
609 | }, | 238 | }, |
610 | } | 239 | } |
611 | } | 240 | } |
612 | 241 | ||
242 | fn is_inner_attribute(node: SyntaxNode) -> bool { | ||
243 | ast::Attr::cast(node).map(|attr| attr.kind()) == Some(ast::AttrKind::Inner) | ||
244 | } | ||
245 | |||
246 | fn is_inner_comment(token: SyntaxToken) -> bool { | ||
247 | ast::Comment::cast(token).and_then(|comment| comment.kind().doc) | ||
248 | == Some(ast::CommentPlacement::Inner) | ||
249 | } | ||
613 | #[cfg(test)] | 250 | #[cfg(test)] |
614 | mod tests; | 251 | mod tests; |
diff --git a/crates/ide_db/src/helpers/insert_use/tests.rs b/crates/ide_db/src/helpers/insert_use/tests.rs index 3d151e629..048c213e2 100644 --- a/crates/ide_db/src/helpers/insert_use/tests.rs +++ b/crates/ide_db/src/helpers/insert_use/tests.rs | |||
@@ -5,6 +5,7 @@ use test_utils::assert_eq_text; | |||
5 | 5 | ||
6 | #[test] | 6 | #[test] |
7 | fn insert_not_group() { | 7 | fn insert_not_group() { |
8 | cov_mark::check!(insert_no_grouping_last); | ||
8 | check( | 9 | check( |
9 | "use external_crate2::bar::A", | 10 | "use external_crate2::bar::A", |
10 | r" | 11 | r" |
@@ -27,6 +28,21 @@ use external_crate2::bar::A;", | |||
27 | } | 28 | } |
28 | 29 | ||
29 | #[test] | 30 | #[test] |
31 | fn insert_not_group_empty() { | ||
32 | cov_mark::check!(insert_no_grouping_last2); | ||
33 | check( | ||
34 | "use external_crate2::bar::A", | ||
35 | r"", | ||
36 | r"use external_crate2::bar::A; | ||
37 | |||
38 | ", | ||
39 | None, | ||
40 | false, | ||
41 | false, | ||
42 | ); | ||
43 | } | ||
44 | |||
45 | #[test] | ||
30 | fn insert_existing() { | 46 | fn insert_existing() { |
31 | check_full("std::fs", "use std::fs;", "use std::fs;") | 47 | check_full("std::fs", "use std::fs;", "use std::fs;") |
32 | } | 48 | } |
@@ -51,21 +67,21 @@ use std::bar::G;", | |||
51 | 67 | ||
52 | #[test] | 68 | #[test] |
53 | fn insert_start_indent() { | 69 | fn insert_start_indent() { |
54 | cov_mark::check!(insert_use_indent_after); | ||
55 | check_none( | 70 | check_none( |
56 | "std::bar::AA", | 71 | "std::bar::AA", |
57 | r" | 72 | r" |
58 | use std::bar::B; | 73 | use std::bar::B; |
59 | use std::bar::D;", | 74 | use std::bar::C;", |
60 | r" | 75 | r" |
61 | use std::bar::AA; | 76 | use std::bar::AA; |
62 | use std::bar::B; | 77 | use std::bar::B; |
63 | use std::bar::D;", | 78 | use std::bar::C;", |
64 | ) | 79 | ); |
65 | } | 80 | } |
66 | 81 | ||
67 | #[test] | 82 | #[test] |
68 | fn insert_middle() { | 83 | fn insert_middle() { |
84 | cov_mark::check!(insert_group); | ||
69 | check_none( | 85 | check_none( |
70 | "std::bar::EE", | 86 | "std::bar::EE", |
71 | r" | 87 | r" |
@@ -102,6 +118,7 @@ fn insert_middle_indent() { | |||
102 | 118 | ||
103 | #[test] | 119 | #[test] |
104 | fn insert_end() { | 120 | fn insert_end() { |
121 | cov_mark::check!(insert_group_last); | ||
105 | check_none( | 122 | check_none( |
106 | "std::bar::ZZ", | 123 | "std::bar::ZZ", |
107 | r" | 124 | r" |
@@ -120,7 +137,6 @@ use std::bar::ZZ;", | |||
120 | 137 | ||
121 | #[test] | 138 | #[test] |
122 | fn insert_end_indent() { | 139 | fn insert_end_indent() { |
123 | cov_mark::check!(insert_use_indent_before); | ||
124 | check_none( | 140 | check_none( |
125 | "std::bar::ZZ", | 141 | "std::bar::ZZ", |
126 | r" | 142 | r" |
@@ -201,6 +217,7 @@ fn insert_first_matching_group() { | |||
201 | 217 | ||
202 | #[test] | 218 | #[test] |
203 | fn insert_missing_group_std() { | 219 | fn insert_missing_group_std() { |
220 | cov_mark::check!(insert_group_new_group); | ||
204 | check_none( | 221 | check_none( |
205 | "std::fmt", | 222 | "std::fmt", |
206 | r" | 223 | r" |
@@ -216,6 +233,7 @@ fn insert_missing_group_std() { | |||
216 | 233 | ||
217 | #[test] | 234 | #[test] |
218 | fn insert_missing_group_self() { | 235 | fn insert_missing_group_self() { |
236 | cov_mark::check!(insert_group_no_group); | ||
219 | check_none( | 237 | check_none( |
220 | "self::fmt", | 238 | "self::fmt", |
221 | r" | 239 | r" |
@@ -242,6 +260,7 @@ fn main() {}", | |||
242 | 260 | ||
243 | #[test] | 261 | #[test] |
244 | fn insert_empty_file() { | 262 | fn insert_empty_file() { |
263 | cov_mark::check!(insert_group_empty_file); | ||
245 | // empty files will get two trailing newlines | 264 | // empty files will get two trailing newlines |
246 | // this is due to the test case insert_no_imports above | 265 | // this is due to the test case insert_no_imports above |
247 | check_full( | 266 | check_full( |
@@ -255,7 +274,7 @@ fn insert_empty_file() { | |||
255 | 274 | ||
256 | #[test] | 275 | #[test] |
257 | fn insert_empty_module() { | 276 | fn insert_empty_module() { |
258 | cov_mark::check!(insert_use_no_indent_after); | 277 | cov_mark::check!(insert_group_empty_module); |
259 | check( | 278 | check( |
260 | "foo::bar", | 279 | "foo::bar", |
261 | "mod x {}", | 280 | "mod x {}", |
@@ -270,6 +289,7 @@ fn insert_empty_module() { | |||
270 | 289 | ||
271 | #[test] | 290 | #[test] |
272 | fn insert_after_inner_attr() { | 291 | fn insert_after_inner_attr() { |
292 | cov_mark::check!(insert_group_empty_inner_attr); | ||
273 | check_full( | 293 | check_full( |
274 | "foo::bar", | 294 | "foo::bar", |
275 | r"#![allow(unused_imports)]", | 295 | r"#![allow(unused_imports)]", |
@@ -615,7 +635,7 @@ fn check( | |||
615 | if module { | 635 | if module { |
616 | syntax = syntax.descendants().find_map(ast::Module::cast).unwrap().syntax().clone(); | 636 | syntax = syntax.descendants().find_map(ast::Module::cast).unwrap().syntax().clone(); |
617 | } | 637 | } |
618 | let file = super::ImportScope::from(syntax).unwrap(); | 638 | let file = super::ImportScope::from(syntax.clone_for_update()).unwrap(); |
619 | let path = ast::SourceFile::parse(&format!("use {};", path)) | 639 | let path = ast::SourceFile::parse(&format!("use {};", path)) |
620 | .tree() | 640 | .tree() |
621 | .syntax() | 641 | .syntax() |
@@ -623,12 +643,8 @@ fn check( | |||
623 | .find_map(ast::Path::cast) | 643 | .find_map(ast::Path::cast) |
624 | .unwrap(); | 644 | .unwrap(); |
625 | 645 | ||
626 | let rewriter = insert_use( | 646 | insert_use(&file, path, InsertUseConfig { merge: mb, prefix_kind: PrefixKind::Plain, group }); |
627 | &file, | 647 | let result = file.as_syntax_node().to_string(); |
628 | path, | ||
629 | InsertUseConfig { merge: mb, prefix_kind: PrefixKind::Plain, group }, | ||
630 | ); | ||
631 | let result = rewriter.rewrite(file.as_syntax_node()).to_string(); | ||
632 | assert_eq_text!(ra_fixture_after, &result); | 648 | assert_eq_text!(ra_fixture_after, &result); |
633 | } | 649 | } |
634 | 650 | ||
diff --git a/crates/ide_db/src/helpers/merge_imports.rs b/crates/ide_db/src/helpers/merge_imports.rs new file mode 100644 index 000000000..3f5bbef7f --- /dev/null +++ b/crates/ide_db/src/helpers/merge_imports.rs | |||
@@ -0,0 +1,309 @@ | |||
1 | //! Handle syntactic aspects of merging UseTrees. | ||
2 | use std::cmp::Ordering; | ||
3 | |||
4 | use itertools::{EitherOrBoth, Itertools}; | ||
5 | use syntax::ast::{ | ||
6 | self, edit::AstNodeEdit, make, AstNode, AttrsOwner, PathSegmentKind, VisibilityOwner, | ||
7 | }; | ||
8 | |||
9 | /// What type of merges are allowed. | ||
10 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] | ||
11 | pub enum MergeBehavior { | ||
12 | /// Merge everything together creating deeply nested imports. | ||
13 | Full, | ||
14 | /// Only merge the last import level, doesn't allow import nesting. | ||
15 | Last, | ||
16 | } | ||
17 | |||
18 | impl MergeBehavior { | ||
19 | #[inline] | ||
20 | fn is_tree_allowed(&self, tree: &ast::UseTree) -> bool { | ||
21 | match self { | ||
22 | MergeBehavior::Full => true, | ||
23 | // only simple single segment paths are allowed | ||
24 | MergeBehavior::Last => { | ||
25 | tree.use_tree_list().is_none() && tree.path().map(path_len) <= Some(1) | ||
26 | } | ||
27 | } | ||
28 | } | ||
29 | } | ||
30 | |||
31 | pub fn try_merge_imports( | ||
32 | lhs: &ast::Use, | ||
33 | rhs: &ast::Use, | ||
34 | merge_behavior: MergeBehavior, | ||
35 | ) -> Option<ast::Use> { | ||
36 | // don't merge imports with different visibilities | ||
37 | if !eq_visibility(lhs.visibility(), rhs.visibility()) { | ||
38 | return None; | ||
39 | } | ||
40 | if !eq_attrs(lhs.attrs(), rhs.attrs()) { | ||
41 | return None; | ||
42 | } | ||
43 | |||
44 | let lhs_tree = lhs.use_tree()?; | ||
45 | let rhs_tree = rhs.use_tree()?; | ||
46 | let merged = try_merge_trees(&lhs_tree, &rhs_tree, merge_behavior)?; | ||
47 | Some(lhs.with_use_tree(merged).clone_for_update()) | ||
48 | } | ||
49 | |||
50 | pub fn try_merge_trees( | ||
51 | lhs: &ast::UseTree, | ||
52 | rhs: &ast::UseTree, | ||
53 | merge: MergeBehavior, | ||
54 | ) -> Option<ast::UseTree> { | ||
55 | let lhs_path = lhs.path()?; | ||
56 | let rhs_path = rhs.path()?; | ||
57 | |||
58 | let (lhs_prefix, rhs_prefix) = common_prefix(&lhs_path, &rhs_path)?; | ||
59 | let (lhs, rhs) = if lhs.is_simple_path() | ||
60 | && rhs.is_simple_path() | ||
61 | && lhs_path == lhs_prefix | ||
62 | && rhs_path == rhs_prefix | ||
63 | { | ||
64 | (lhs.clone(), rhs.clone()) | ||
65 | } else { | ||
66 | (lhs.split_prefix(&lhs_prefix), rhs.split_prefix(&rhs_prefix)) | ||
67 | }; | ||
68 | recursive_merge(&lhs, &rhs, merge) | ||
69 | } | ||
70 | |||
71 | /// Recursively "zips" together lhs and rhs. | ||
72 | fn recursive_merge( | ||
73 | lhs: &ast::UseTree, | ||
74 | rhs: &ast::UseTree, | ||
75 | merge: MergeBehavior, | ||
76 | ) -> Option<ast::UseTree> { | ||
77 | let mut use_trees = lhs | ||
78 | .use_tree_list() | ||
79 | .into_iter() | ||
80 | .flat_map(|list| list.use_trees()) | ||
81 | // we use Option here to early return from this function(this is not the same as a `filter` op) | ||
82 | .map(|tree| match merge.is_tree_allowed(&tree) { | ||
83 | true => Some(tree), | ||
84 | false => None, | ||
85 | }) | ||
86 | .collect::<Option<Vec<_>>>()?; | ||
87 | use_trees.sort_unstable_by(|a, b| path_cmp_for_sort(a.path(), b.path())); | ||
88 | for rhs_t in rhs.use_tree_list().into_iter().flat_map(|list| list.use_trees()) { | ||
89 | if !merge.is_tree_allowed(&rhs_t) { | ||
90 | return None; | ||
91 | } | ||
92 | let rhs_path = rhs_t.path(); | ||
93 | match use_trees.binary_search_by(|lhs_t| { | ||
94 | let (lhs_t, rhs_t) = match lhs_t | ||
95 | .path() | ||
96 | .zip(rhs_path.clone()) | ||
97 | .and_then(|(lhs, rhs)| common_prefix(&lhs, &rhs)) | ||
98 | { | ||
99 | Some((lhs_p, rhs_p)) => (lhs_t.split_prefix(&lhs_p), rhs_t.split_prefix(&rhs_p)), | ||
100 | None => (lhs_t.clone(), rhs_t.clone()), | ||
101 | }; | ||
102 | |||
103 | path_cmp_bin_search(lhs_t.path(), rhs_t.path()) | ||
104 | }) { | ||
105 | Ok(idx) => { | ||
106 | let lhs_t = &mut use_trees[idx]; | ||
107 | let lhs_path = lhs_t.path()?; | ||
108 | let rhs_path = rhs_path?; | ||
109 | let (lhs_prefix, rhs_prefix) = common_prefix(&lhs_path, &rhs_path)?; | ||
110 | if lhs_prefix == lhs_path && rhs_prefix == rhs_path { | ||
111 | let tree_is_self = |tree: ast::UseTree| { | ||
112 | tree.path().as_ref().map(path_is_self).unwrap_or(false) | ||
113 | }; | ||
114 | // check if only one of the two trees has a tree list, and whether that then contains `self` or not. | ||
115 | // If this is the case we can skip this iteration since the path without the list is already included in the other one via `self` | ||
116 | let tree_contains_self = |tree: &ast::UseTree| { | ||
117 | tree.use_tree_list() | ||
118 | .map(|tree_list| tree_list.use_trees().any(tree_is_self)) | ||
119 | .unwrap_or(false) | ||
120 | }; | ||
121 | match (tree_contains_self(&lhs_t), tree_contains_self(&rhs_t)) { | ||
122 | (true, false) => continue, | ||
123 | (false, true) => { | ||
124 | *lhs_t = rhs_t; | ||
125 | continue; | ||
126 | } | ||
127 | _ => (), | ||
128 | } | ||
129 | |||
130 | // glob imports arent part of the use-tree lists so we need to special handle them here as well | ||
131 | // this special handling is only required for when we merge a module import into a glob import of said module | ||
132 | // see the `merge_self_glob` or `merge_mod_into_glob` tests | ||
133 | if lhs_t.star_token().is_some() || rhs_t.star_token().is_some() { | ||
134 | *lhs_t = make::use_tree( | ||
135 | make::path_unqualified(make::path_segment_self()), | ||
136 | None, | ||
137 | None, | ||
138 | false, | ||
139 | ); | ||
140 | use_trees.insert(idx, make::glob_use_tree()); | ||
141 | continue; | ||
142 | } | ||
143 | |||
144 | if lhs_t.use_tree_list().is_none() && rhs_t.use_tree_list().is_none() { | ||
145 | continue; | ||
146 | } | ||
147 | } | ||
148 | let lhs = lhs_t.split_prefix(&lhs_prefix); | ||
149 | let rhs = rhs_t.split_prefix(&rhs_prefix); | ||
150 | match recursive_merge(&lhs, &rhs, merge) { | ||
151 | Some(use_tree) => use_trees[idx] = use_tree, | ||
152 | None => return None, | ||
153 | } | ||
154 | } | ||
155 | Err(_) | ||
156 | if merge == MergeBehavior::Last | ||
157 | && use_trees.len() > 0 | ||
158 | && rhs_t.use_tree_list().is_some() => | ||
159 | { | ||
160 | return None | ||
161 | } | ||
162 | Err(idx) => { | ||
163 | use_trees.insert(idx, rhs_t); | ||
164 | } | ||
165 | } | ||
166 | } | ||
167 | |||
168 | Some(if let Some(old) = lhs.use_tree_list() { | ||
169 | lhs.replace_descendant(old, make::use_tree_list(use_trees)).clone_for_update() | ||
170 | } else { | ||
171 | lhs.clone() | ||
172 | }) | ||
173 | } | ||
174 | |||
175 | /// Traverses both paths until they differ, returning the common prefix of both. | ||
176 | fn common_prefix(lhs: &ast::Path, rhs: &ast::Path) -> Option<(ast::Path, ast::Path)> { | ||
177 | let mut res = None; | ||
178 | let mut lhs_curr = lhs.first_qualifier_or_self(); | ||
179 | let mut rhs_curr = rhs.first_qualifier_or_self(); | ||
180 | loop { | ||
181 | match (lhs_curr.segment(), rhs_curr.segment()) { | ||
182 | (Some(lhs), Some(rhs)) if lhs.syntax().text() == rhs.syntax().text() => (), | ||
183 | _ => break res, | ||
184 | } | ||
185 | res = Some((lhs_curr.clone(), rhs_curr.clone())); | ||
186 | |||
187 | match lhs_curr.parent_path().zip(rhs_curr.parent_path()) { | ||
188 | Some((lhs, rhs)) => { | ||
189 | lhs_curr = lhs; | ||
190 | rhs_curr = rhs; | ||
191 | } | ||
192 | _ => break res, | ||
193 | } | ||
194 | } | ||
195 | } | ||
196 | |||
197 | /// Orders paths in the following way: | ||
198 | /// the sole self token comes first, after that come uppercase identifiers, then lowercase identifiers | ||
199 | // FIXME: rustfmt sorts lowercase idents before uppercase, in general we want to have the same ordering rustfmt has | ||
200 | // which is `self` and `super` first, then identifier imports with lowercase ones first, then glob imports and at last list imports. | ||
201 | // Example foo::{self, foo, baz, Baz, Qux, *, {Bar}} | ||
202 | fn path_cmp_for_sort(a: Option<ast::Path>, b: Option<ast::Path>) -> Ordering { | ||
203 | match (a, b) { | ||
204 | (None, None) => Ordering::Equal, | ||
205 | (None, Some(_)) => Ordering::Less, | ||
206 | (Some(_), None) => Ordering::Greater, | ||
207 | (Some(ref a), Some(ref b)) => match (path_is_self(a), path_is_self(b)) { | ||
208 | (true, true) => Ordering::Equal, | ||
209 | (true, false) => Ordering::Less, | ||
210 | (false, true) => Ordering::Greater, | ||
211 | (false, false) => path_cmp_short(a, b), | ||
212 | }, | ||
213 | } | ||
214 | } | ||
215 | |||
216 | /// Path comparison func for binary searching for merging. | ||
217 | fn path_cmp_bin_search(lhs: Option<ast::Path>, rhs: Option<ast::Path>) -> Ordering { | ||
218 | match ( | ||
219 | lhs.as_ref().and_then(ast::Path::first_segment), | ||
220 | rhs.as_ref().and_then(ast::Path::first_segment), | ||
221 | ) { | ||
222 | (None, None) => Ordering::Equal, | ||
223 | (None, Some(_)) => Ordering::Less, | ||
224 | (Some(_), None) => Ordering::Greater, | ||
225 | (Some(ref a), Some(ref b)) => path_segment_cmp(a, b), | ||
226 | } | ||
227 | } | ||
228 | |||
229 | /// Short circuiting comparison, if both paths are equal until one of them ends they are considered | ||
230 | /// equal | ||
231 | fn path_cmp_short(a: &ast::Path, b: &ast::Path) -> Ordering { | ||
232 | let a = a.segments(); | ||
233 | let b = b.segments(); | ||
234 | // cmp_by would be useful for us here but that is currently unstable | ||
235 | // cmp doesn't work due the lifetimes on text's return type | ||
236 | a.zip(b) | ||
237 | .find_map(|(a, b)| match path_segment_cmp(&a, &b) { | ||
238 | Ordering::Equal => None, | ||
239 | ord => Some(ord), | ||
240 | }) | ||
241 | .unwrap_or(Ordering::Equal) | ||
242 | } | ||
243 | |||
244 | /// Compares two paths, if one ends earlier than the other the has_tl parameters decide which is | ||
245 | /// greater as a a path that has a tree list should be greater, while one that just ends without | ||
246 | /// a tree list should be considered less. | ||
247 | pub(super) fn use_tree_path_cmp( | ||
248 | a: &ast::Path, | ||
249 | a_has_tl: bool, | ||
250 | b: &ast::Path, | ||
251 | b_has_tl: bool, | ||
252 | ) -> Ordering { | ||
253 | let a_segments = a.segments(); | ||
254 | let b_segments = b.segments(); | ||
255 | // cmp_by would be useful for us here but that is currently unstable | ||
256 | // cmp doesn't work due the lifetimes on text's return type | ||
257 | a_segments | ||
258 | .zip_longest(b_segments) | ||
259 | .find_map(|zipped| match zipped { | ||
260 | EitherOrBoth::Both(ref a, ref b) => match path_segment_cmp(a, b) { | ||
261 | Ordering::Equal => None, | ||
262 | ord => Some(ord), | ||
263 | }, | ||
264 | EitherOrBoth::Left(_) if !b_has_tl => Some(Ordering::Greater), | ||
265 | EitherOrBoth::Left(_) => Some(Ordering::Less), | ||
266 | EitherOrBoth::Right(_) if !a_has_tl => Some(Ordering::Less), | ||
267 | EitherOrBoth::Right(_) => Some(Ordering::Greater), | ||
268 | }) | ||
269 | .unwrap_or(Ordering::Equal) | ||
270 | } | ||
271 | |||
272 | fn path_segment_cmp(a: &ast::PathSegment, b: &ast::PathSegment) -> Ordering { | ||
273 | let a = a.kind().and_then(|kind| match kind { | ||
274 | PathSegmentKind::Name(name_ref) => Some(name_ref), | ||
275 | _ => None, | ||
276 | }); | ||
277 | let b = b.kind().and_then(|kind| match kind { | ||
278 | PathSegmentKind::Name(name_ref) => Some(name_ref), | ||
279 | _ => None, | ||
280 | }); | ||
281 | a.as_ref().map(ast::NameRef::text).cmp(&b.as_ref().map(ast::NameRef::text)) | ||
282 | } | ||
283 | |||
284 | fn eq_visibility(vis0: Option<ast::Visibility>, vis1: Option<ast::Visibility>) -> bool { | ||
285 | match (vis0, vis1) { | ||
286 | (None, None) => true, | ||
287 | // FIXME: Don't use the string representation to check for equality | ||
288 | // spaces inside of the node would break this comparison | ||
289 | (Some(vis0), Some(vis1)) => vis0.to_string() == vis1.to_string(), | ||
290 | _ => false, | ||
291 | } | ||
292 | } | ||
293 | |||
294 | fn eq_attrs( | ||
295 | attrs0: impl Iterator<Item = ast::Attr>, | ||
296 | attrs1: impl Iterator<Item = ast::Attr>, | ||
297 | ) -> bool { | ||
298 | let attrs0 = attrs0.map(|attr| attr.to_string()); | ||
299 | let attrs1 = attrs1.map(|attr| attr.to_string()); | ||
300 | attrs0.eq(attrs1) | ||
301 | } | ||
302 | |||
303 | fn path_is_self(path: &ast::Path) -> bool { | ||
304 | path.segment().and_then(|seg| seg.self_token()).is_some() && path.qualifier().is_none() | ||
305 | } | ||
306 | |||
307 | fn path_len(path: ast::Path) -> usize { | ||
308 | path.segments().count() | ||
309 | } | ||
diff --git a/crates/ide_db/src/helpers/rust_doc.rs b/crates/ide_db/src/helpers/rust_doc.rs new file mode 100644 index 000000000..e27e23867 --- /dev/null +++ b/crates/ide_db/src/helpers/rust_doc.rs | |||
@@ -0,0 +1,34 @@ | |||
1 | //! Rustdoc specific doc comment handling | ||
2 | |||
3 | // stripped down version of https://github.com/rust-lang/rust/blob/392ba2ba1a7d6c542d2459fb8133bebf62a4a423/src/librustdoc/html/markdown.rs#L810-L933 | ||
4 | pub fn is_rust_fence(s: &str) -> bool { | ||
5 | let mut seen_rust_tags = false; | ||
6 | let mut seen_other_tags = false; | ||
7 | |||
8 | let tokens = s | ||
9 | .trim() | ||
10 | .split(|c| c == ',' || c == ' ' || c == '\t') | ||
11 | .map(str::trim) | ||
12 | .filter(|t| !t.is_empty()); | ||
13 | |||
14 | for token in tokens { | ||
15 | match token { | ||
16 | "should_panic" | "no_run" | "ignore" | "allow_fail" => { | ||
17 | seen_rust_tags = !seen_other_tags | ||
18 | } | ||
19 | "rust" => seen_rust_tags = true, | ||
20 | "test_harness" | "compile_fail" => seen_rust_tags = !seen_other_tags || seen_rust_tags, | ||
21 | x if x.starts_with("edition") => {} | ||
22 | x if x.starts_with('E') && x.len() == 5 => { | ||
23 | if x[1..].parse::<u32>().is_ok() { | ||
24 | seen_rust_tags = !seen_other_tags || seen_rust_tags; | ||
25 | } else { | ||
26 | seen_other_tags = true; | ||
27 | } | ||
28 | } | ||
29 | _ => seen_other_tags = true, | ||
30 | } | ||
31 | } | ||
32 | |||
33 | !seen_other_tags || seen_rust_tags | ||
34 | } | ||
diff --git a/crates/ide_db/src/search.rs b/crates/ide_db/src/search.rs index b55e3851e..8f899ea56 100644 --- a/crates/ide_db/src/search.rs +++ b/crates/ide_db/src/search.rs | |||
@@ -7,7 +7,9 @@ | |||
7 | use std::{convert::TryInto, mem}; | 7 | use std::{convert::TryInto, mem}; |
8 | 8 | ||
9 | use base_db::{FileId, FileRange, SourceDatabase, SourceDatabaseExt}; | 9 | use base_db::{FileId, FileRange, SourceDatabase, SourceDatabaseExt}; |
10 | use hir::{DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility}; | 10 | use hir::{ |
11 | DefWithBody, HasAttrs, HasSource, InFile, ModuleDef, ModuleSource, Semantics, Visibility, | ||
12 | }; | ||
11 | use once_cell::unsync::Lazy; | 13 | use once_cell::unsync::Lazy; |
12 | use rustc_hash::FxHashMap; | 14 | use rustc_hash::FxHashMap; |
13 | use syntax::{ast, match_ast, AstNode, TextRange, TextSize}; | 15 | use syntax::{ast, match_ast, AstNode, TextRange, TextSize}; |
@@ -295,7 +297,7 @@ impl Definition { | |||
295 | } | 297 | } |
296 | 298 | ||
297 | pub fn usages<'a>(&'a self, sema: &'a Semantics<RootDatabase>) -> FindUsages<'a> { | 299 | pub fn usages<'a>(&'a self, sema: &'a Semantics<RootDatabase>) -> FindUsages<'a> { |
298 | FindUsages { def: self, sema, scope: None } | 300 | FindUsages { def: self, sema, scope: None, include_self_kw_refs: false } |
299 | } | 301 | } |
300 | } | 302 | } |
301 | 303 | ||
@@ -303,9 +305,15 @@ pub struct FindUsages<'a> { | |||
303 | def: &'a Definition, | 305 | def: &'a Definition, |
304 | sema: &'a Semantics<'a, RootDatabase>, | 306 | sema: &'a Semantics<'a, RootDatabase>, |
305 | scope: Option<SearchScope>, | 307 | scope: Option<SearchScope>, |
308 | include_self_kw_refs: bool, | ||
306 | } | 309 | } |
307 | 310 | ||
308 | impl<'a> FindUsages<'a> { | 311 | impl<'a> FindUsages<'a> { |
312 | pub fn include_self_kw_refs(mut self, include: bool) -> FindUsages<'a> { | ||
313 | self.include_self_kw_refs = include; | ||
314 | self | ||
315 | } | ||
316 | |||
309 | pub fn in_scope(self, scope: SearchScope) -> FindUsages<'a> { | 317 | pub fn in_scope(self, scope: SearchScope) -> FindUsages<'a> { |
310 | self.set_scope(Some(scope)) | 318 | self.set_scope(Some(scope)) |
311 | } | 319 | } |
@@ -352,6 +360,8 @@ impl<'a> FindUsages<'a> { | |||
352 | }; | 360 | }; |
353 | 361 | ||
354 | let pat = name.as_str(); | 362 | let pat = name.as_str(); |
363 | let search_for_self = self.include_self_kw_refs; | ||
364 | |||
355 | for (file_id, search_range) in search_scope { | 365 | for (file_id, search_range) in search_scope { |
356 | let text = sema.db.file_text(file_id); | 366 | let text = sema.db.file_text(file_id); |
357 | let search_range = | 367 | let search_range = |
@@ -359,31 +369,47 @@ impl<'a> FindUsages<'a> { | |||
359 | 369 | ||
360 | let tree = Lazy::new(|| sema.parse(file_id).syntax().clone()); | 370 | let tree = Lazy::new(|| sema.parse(file_id).syntax().clone()); |
361 | 371 | ||
362 | for (idx, _) in text.match_indices(pat) { | 372 | let mut handle_match = |idx: usize| -> bool { |
363 | let offset: TextSize = idx.try_into().unwrap(); | 373 | let offset: TextSize = idx.try_into().unwrap(); |
364 | if !search_range.contains_inclusive(offset) { | 374 | if !search_range.contains_inclusive(offset) { |
365 | continue; | 375 | return false; |
366 | } | 376 | } |
367 | 377 | ||
368 | if let Some(name) = sema.find_node_at_offset_with_descend(&tree, offset) { | 378 | if let Some(name) = sema.find_node_at_offset_with_descend(&tree, offset) { |
369 | match name { | 379 | match name { |
370 | ast::NameLike::NameRef(name_ref) => { | 380 | ast::NameLike::NameRef(name_ref) => { |
371 | if self.found_name_ref(&name_ref, sink) { | 381 | if self.found_name_ref(&name_ref, sink) { |
372 | return; | 382 | return true; |
373 | } | 383 | } |
374 | } | 384 | } |
375 | ast::NameLike::Name(name) => { | 385 | ast::NameLike::Name(name) => { |
376 | if self.found_name(&name, sink) { | 386 | if self.found_name(&name, sink) { |
377 | return; | 387 | return true; |
378 | } | 388 | } |
379 | } | 389 | } |
380 | ast::NameLike::Lifetime(lifetime) => { | 390 | ast::NameLike::Lifetime(lifetime) => { |
381 | if self.found_lifetime(&lifetime, sink) { | 391 | if self.found_lifetime(&lifetime, sink) { |
382 | return; | 392 | return true; |
383 | } | 393 | } |
384 | } | 394 | } |
385 | } | 395 | } |
386 | } | 396 | } |
397 | |||
398 | return false; | ||
399 | }; | ||
400 | |||
401 | for (idx, _) in text.match_indices(pat) { | ||
402 | if handle_match(idx) { | ||
403 | return; | ||
404 | } | ||
405 | } | ||
406 | |||
407 | if search_for_self { | ||
408 | for (idx, _) in text.match_indices("Self") { | ||
409 | if handle_match(idx) { | ||
410 | return; | ||
411 | } | ||
412 | } | ||
387 | } | 413 | } |
388 | } | 414 | } |
389 | } | 415 | } |
@@ -422,6 +448,24 @@ impl<'a> FindUsages<'a> { | |||
422 | }; | 448 | }; |
423 | sink(file_id, reference) | 449 | sink(file_id, reference) |
424 | } | 450 | } |
451 | Some(NameRefClass::Definition(Definition::SelfType(impl_))) => { | ||
452 | let ty = impl_.self_ty(self.sema.db); | ||
453 | |||
454 | if let Some(adt) = ty.as_adt() { | ||
455 | if &Definition::ModuleDef(ModuleDef::Adt(adt)) == self.def { | ||
456 | let FileRange { file_id, range } = | ||
457 | self.sema.original_range(name_ref.syntax()); | ||
458 | let reference = FileReference { | ||
459 | range, | ||
460 | name: ast::NameLike::NameRef(name_ref.clone()), | ||
461 | access: None, | ||
462 | }; | ||
463 | return sink(file_id, reference); | ||
464 | } | ||
465 | } | ||
466 | |||
467 | false | ||
468 | } | ||
425 | Some(NameRefClass::FieldShorthand { local_ref: local, field_ref: field }) => { | 469 | Some(NameRefClass::FieldShorthand { local_ref: local, field_ref: field }) => { |
426 | let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); | 470 | let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); |
427 | let reference = match self.def { | 471 | let reference = match self.def { |