diff options
Diffstat (limited to 'crates/ide_assists')
5 files changed, 143 insertions, 137 deletions
diff --git a/crates/ide_assists/src/handlers/auto_import.rs b/crates/ide_assists/src/handlers/auto_import.rs index 49aa70f74..a454a2af3 100644 --- a/crates/ide_assists/src/handlers/auto_import.rs +++ b/crates/ide_assists/src/handlers/auto_import.rs | |||
@@ -93,7 +93,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> | |||
93 | 93 | ||
94 | let range = ctx.sema.original_range(&syntax_under_caret).range; | 94 | let range = ctx.sema.original_range(&syntax_under_caret).range; |
95 | let group_label = group_label(import_assets.import_candidate()); | 95 | let group_label = group_label(import_assets.import_candidate()); |
96 | let scope = ImportScope::find_insert_use_container(&syntax_under_caret, &ctx.sema)?; | 96 | let scope = ImportScope::find_insert_use_container_with_macros(&syntax_under_caret, &ctx.sema)?; |
97 | for import in proposed_imports { | 97 | for import in proposed_imports { |
98 | acc.add_group( | 98 | acc.add_group( |
99 | &group_label, | 99 | &group_label, |
@@ -101,9 +101,11 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> | |||
101 | format!("Import `{}`", import.import_path), | 101 | format!("Import `{}`", import.import_path), |
102 | range, | 102 | range, |
103 | |builder| { | 103 | |builder| { |
104 | let rewriter = | 104 | let scope = match scope.clone() { |
105 | insert_use(&scope, mod_path_to_ast(&import.import_path), ctx.config.insert_use); | 105 | ImportScope::File(it) => ImportScope::File(builder.make_ast_mut(it)), |
106 | builder.rewrite(rewriter); | 106 | ImportScope::Module(it) => ImportScope::Module(builder.make_ast_mut(it)), |
107 | }; | ||
108 | insert_use(&scope, mod_path_to_ast(&import.import_path), ctx.config.insert_use); | ||
107 | }, | 109 | }, |
108 | ); | 110 | ); |
109 | } | 111 | } |
diff --git a/crates/ide_assists/src/handlers/extract_struct_from_enum_variant.rs b/crates/ide_assists/src/handlers/extract_struct_from_enum_variant.rs index a8d6355bd..66f274fa7 100644 --- a/crates/ide_assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/crates/ide_assists/src/handlers/extract_struct_from_enum_variant.rs | |||
@@ -5,7 +5,7 @@ use hir::{Module, ModuleDef, Name, Variant}; | |||
5 | use ide_db::{ | 5 | use ide_db::{ |
6 | defs::Definition, | 6 | defs::Definition, |
7 | helpers::{ | 7 | helpers::{ |
8 | insert_use::{insert_use, ImportScope}, | 8 | insert_use::{insert_use, ImportScope, InsertUseConfig}, |
9 | mod_path_to_ast, | 9 | mod_path_to_ast, |
10 | }, | 10 | }, |
11 | search::FileReference, | 11 | search::FileReference, |
@@ -13,9 +13,9 @@ use ide_db::{ | |||
13 | }; | 13 | }; |
14 | use rustc_hash::FxHashSet; | 14 | use rustc_hash::FxHashSet; |
15 | use syntax::{ | 15 | use syntax::{ |
16 | algo::{find_node_at_offset, SyntaxRewriter}, | 16 | algo::find_node_at_offset, |
17 | ast::{self, edit::IndentLevel, make, AstNode, NameOwner, VisibilityOwner}, | 17 | ast::{self, make, AstNode, NameOwner, VisibilityOwner}, |
18 | SourceFile, SyntaxElement, SyntaxNode, T, | 18 | ted, SyntaxNode, T, |
19 | }; | 19 | }; |
20 | 20 | ||
21 | use crate::{AssistContext, AssistId, AssistKind, Assists}; | 21 | use crate::{AssistContext, AssistId, AssistKind, Assists}; |
@@ -62,40 +62,50 @@ pub(crate) fn extract_struct_from_enum_variant( | |||
62 | let mut visited_modules_set = FxHashSet::default(); | 62 | let mut visited_modules_set = FxHashSet::default(); |
63 | let current_module = enum_hir.module(ctx.db()); | 63 | let current_module = enum_hir.module(ctx.db()); |
64 | visited_modules_set.insert(current_module); | 64 | visited_modules_set.insert(current_module); |
65 | let mut def_rewriter = None; | 65 | // record file references of the file the def resides in, we only want to swap to the edited file in the builder once |
66 | let mut def_file_references = None; | ||
66 | for (file_id, references) in usages { | 67 | for (file_id, references) in usages { |
67 | let mut rewriter = SyntaxRewriter::default(); | ||
68 | let source_file = ctx.sema.parse(file_id); | ||
69 | for reference in references { | ||
70 | update_reference( | ||
71 | ctx, | ||
72 | &mut rewriter, | ||
73 | reference, | ||
74 | &source_file, | ||
75 | &enum_module_def, | ||
76 | &variant_hir_name, | ||
77 | &mut visited_modules_set, | ||
78 | ); | ||
79 | } | ||
80 | if file_id == ctx.frange.file_id { | 68 | if file_id == ctx.frange.file_id { |
81 | def_rewriter = Some(rewriter); | 69 | def_file_references = Some(references); |
82 | continue; | 70 | continue; |
83 | } | 71 | } |
84 | builder.edit_file(file_id); | 72 | builder.edit_file(file_id); |
85 | builder.rewrite(rewriter); | 73 | let source_file = builder.make_ast_mut(ctx.sema.parse(file_id)); |
74 | let processed = process_references( | ||
75 | ctx, | ||
76 | &mut visited_modules_set, | ||
77 | source_file.syntax(), | ||
78 | &enum_module_def, | ||
79 | &variant_hir_name, | ||
80 | references, | ||
81 | ); | ||
82 | processed.into_iter().for_each(|(path, node, import)| { | ||
83 | apply_references(ctx.config.insert_use, path, node, import) | ||
84 | }); | ||
86 | } | 85 | } |
87 | let mut rewriter = def_rewriter.unwrap_or_default(); | ||
88 | update_variant(&mut rewriter, &variant); | ||
89 | extract_struct_def( | ||
90 | &mut rewriter, | ||
91 | &enum_ast, | ||
92 | variant_name.clone(), | ||
93 | &field_list, | ||
94 | &variant.parent_enum().syntax().clone().into(), | ||
95 | enum_ast.visibility(), | ||
96 | ); | ||
97 | builder.edit_file(ctx.frange.file_id); | 86 | builder.edit_file(ctx.frange.file_id); |
98 | builder.rewrite(rewriter); | 87 | let source_file = builder.make_ast_mut(ctx.sema.parse(ctx.frange.file_id)); |
88 | let variant = builder.make_ast_mut(variant.clone()); | ||
89 | if let Some(references) = def_file_references { | ||
90 | let processed = process_references( | ||
91 | ctx, | ||
92 | &mut visited_modules_set, | ||
93 | source_file.syntax(), | ||
94 | &enum_module_def, | ||
95 | &variant_hir_name, | ||
96 | references, | ||
97 | ); | ||
98 | processed.into_iter().for_each(|(path, node, import)| { | ||
99 | apply_references(ctx.config.insert_use, path, node, import) | ||
100 | }); | ||
101 | } | ||
102 | |||
103 | let def = create_struct_def(variant_name.clone(), &field_list, enum_ast.visibility()); | ||
104 | let start_offset = &variant.parent_enum().syntax().clone(); | ||
105 | ted::insert_raw(ted::Position::before(start_offset), def.syntax()); | ||
106 | ted::insert_raw(ted::Position::before(start_offset), &make::tokens::blank_line()); | ||
107 | |||
108 | update_variant(&variant); | ||
99 | }, | 109 | }, |
100 | ) | 110 | ) |
101 | } | 111 | } |
@@ -136,34 +146,11 @@ fn existing_definition(db: &RootDatabase, variant_name: &ast::Name, variant: &Va | |||
136 | .any(|(name, _)| name.to_string() == variant_name.to_string()) | 146 | .any(|(name, _)| name.to_string() == variant_name.to_string()) |
137 | } | 147 | } |
138 | 148 | ||
139 | fn insert_import( | 149 | fn create_struct_def( |
140 | ctx: &AssistContext, | ||
141 | rewriter: &mut SyntaxRewriter, | ||
142 | scope_node: &SyntaxNode, | ||
143 | module: &Module, | ||
144 | enum_module_def: &ModuleDef, | ||
145 | variant_hir_name: &Name, | ||
146 | ) -> Option<()> { | ||
147 | let db = ctx.db(); | ||
148 | let mod_path = | ||
149 | module.find_use_path_prefixed(db, *enum_module_def, ctx.config.insert_use.prefix_kind); | ||
150 | if let Some(mut mod_path) = mod_path { | ||
151 | mod_path.pop_segment(); | ||
152 | mod_path.push_segment(variant_hir_name.clone()); | ||
153 | let scope = ImportScope::find_insert_use_container(scope_node, &ctx.sema)?; | ||
154 | *rewriter += insert_use(&scope, mod_path_to_ast(&mod_path), ctx.config.insert_use); | ||
155 | } | ||
156 | Some(()) | ||
157 | } | ||
158 | |||
159 | fn extract_struct_def( | ||
160 | rewriter: &mut SyntaxRewriter, | ||
161 | enum_: &ast::Enum, | ||
162 | variant_name: ast::Name, | 150 | variant_name: ast::Name, |
163 | field_list: &Either<ast::RecordFieldList, ast::TupleFieldList>, | 151 | field_list: &Either<ast::RecordFieldList, ast::TupleFieldList>, |
164 | start_offset: &SyntaxElement, | ||
165 | visibility: Option<ast::Visibility>, | 152 | visibility: Option<ast::Visibility>, |
166 | ) -> Option<()> { | 153 | ) -> ast::Struct { |
167 | let pub_vis = Some(make::visibility_pub()); | 154 | let pub_vis = Some(make::visibility_pub()); |
168 | let field_list = match field_list { | 155 | let field_list = match field_list { |
169 | Either::Left(field_list) => { | 156 | Either::Left(field_list) => { |
@@ -180,65 +167,90 @@ fn extract_struct_def( | |||
180 | .into(), | 167 | .into(), |
181 | }; | 168 | }; |
182 | 169 | ||
183 | rewriter.insert_before( | 170 | make::struct_(visibility, variant_name, None, field_list).clone_for_update() |
184 | start_offset, | ||
185 | make::struct_(visibility, variant_name, None, field_list).syntax(), | ||
186 | ); | ||
187 | rewriter.insert_before(start_offset, &make::tokens::blank_line()); | ||
188 | |||
189 | if let indent_level @ 1..=usize::MAX = IndentLevel::from_node(enum_.syntax()).0 as usize { | ||
190 | rewriter | ||
191 | .insert_before(start_offset, &make::tokens::whitespace(&" ".repeat(4 * indent_level))); | ||
192 | } | ||
193 | Some(()) | ||
194 | } | 171 | } |
195 | 172 | ||
196 | fn update_variant(rewriter: &mut SyntaxRewriter, variant: &ast::Variant) -> Option<()> { | 173 | fn update_variant(variant: &ast::Variant) -> Option<()> { |
197 | let name = variant.name()?; | 174 | let name = variant.name()?; |
198 | let tuple_field = make::tuple_field(None, make::ty(&name.text())); | 175 | let tuple_field = make::tuple_field(None, make::ty(&name.text())); |
199 | let replacement = make::variant( | 176 | let replacement = make::variant( |
200 | name, | 177 | name, |
201 | Some(ast::FieldList::TupleFieldList(make::tuple_field_list(iter::once(tuple_field)))), | 178 | Some(ast::FieldList::TupleFieldList(make::tuple_field_list(iter::once(tuple_field)))), |
202 | ); | 179 | ) |
203 | rewriter.replace(variant.syntax(), replacement.syntax()); | 180 | .clone_for_update(); |
181 | ted::replace(variant.syntax(), replacement.syntax()); | ||
204 | Some(()) | 182 | Some(()) |
205 | } | 183 | } |
206 | 184 | ||
207 | fn update_reference( | 185 | fn apply_references( |
186 | insert_use_cfg: InsertUseConfig, | ||
187 | segment: ast::PathSegment, | ||
188 | node: SyntaxNode, | ||
189 | import: Option<(ImportScope, hir::ModPath)>, | ||
190 | ) { | ||
191 | if let Some((scope, path)) = import { | ||
192 | insert_use(&scope, mod_path_to_ast(&path), insert_use_cfg); | ||
193 | } | ||
194 | ted::insert_raw( | ||
195 | ted::Position::before(segment.syntax()), | ||
196 | make::path_from_text(&format!("{}", segment)).clone_for_update().syntax(), | ||
197 | ); | ||
198 | ted::insert_raw(ted::Position::before(segment.syntax()), make::token(T!['('])); | ||
199 | ted::insert_raw(ted::Position::after(&node), make::token(T![')'])); | ||
200 | } | ||
201 | |||
202 | fn process_references( | ||
208 | ctx: &AssistContext, | 203 | ctx: &AssistContext, |
209 | rewriter: &mut SyntaxRewriter, | 204 | visited_modules: &mut FxHashSet<Module>, |
210 | reference: FileReference, | 205 | source_file: &SyntaxNode, |
211 | source_file: &SourceFile, | ||
212 | enum_module_def: &ModuleDef, | 206 | enum_module_def: &ModuleDef, |
213 | variant_hir_name: &Name, | 207 | variant_hir_name: &Name, |
214 | visited_modules_set: &mut FxHashSet<Module>, | 208 | refs: Vec<FileReference>, |
215 | ) -> Option<()> { | 209 | ) -> Vec<(ast::PathSegment, SyntaxNode, Option<(ImportScope, hir::ModPath)>)> { |
210 | // we have to recollect here eagerly as we are about to edit the tree we need to calculate the changes | ||
211 | // and corresponding nodes up front | ||
212 | refs.into_iter() | ||
213 | .flat_map(|reference| { | ||
214 | let (segment, scope_node, module) = | ||
215 | reference_to_node(&ctx.sema, source_file, reference)?; | ||
216 | if !visited_modules.contains(&module) { | ||
217 | let mod_path = module.find_use_path_prefixed( | ||
218 | ctx.sema.db, | ||
219 | *enum_module_def, | ||
220 | ctx.config.insert_use.prefix_kind, | ||
221 | ); | ||
222 | if let Some(mut mod_path) = mod_path { | ||
223 | mod_path.pop_segment(); | ||
224 | mod_path.push_segment(variant_hir_name.clone()); | ||
225 | let scope = ImportScope::find_insert_use_container(&scope_node)?; | ||
226 | visited_modules.insert(module); | ||
227 | return Some((segment, scope_node, Some((scope, mod_path)))); | ||
228 | } | ||
229 | } | ||
230 | Some((segment, scope_node, None)) | ||
231 | }) | ||
232 | .collect() | ||
233 | } | ||
234 | |||
235 | fn reference_to_node( | ||
236 | sema: &hir::Semantics<RootDatabase>, | ||
237 | source_file: &SyntaxNode, | ||
238 | reference: FileReference, | ||
239 | ) -> Option<(ast::PathSegment, SyntaxNode, hir::Module)> { | ||
216 | let offset = reference.range.start(); | 240 | let offset = reference.range.start(); |
217 | let (segment, expr) = if let Some(path_expr) = | 241 | if let Some(path_expr) = find_node_at_offset::<ast::PathExpr>(source_file, offset) { |
218 | find_node_at_offset::<ast::PathExpr>(source_file.syntax(), offset) | ||
219 | { | ||
220 | // tuple variant | 242 | // tuple variant |
221 | (path_expr.path()?.segment()?, path_expr.syntax().parent()?) | 243 | Some((path_expr.path()?.segment()?, path_expr.syntax().parent()?)) |
222 | } else if let Some(record_expr) = | 244 | } else if let Some(record_expr) = find_node_at_offset::<ast::RecordExpr>(source_file, offset) { |
223 | find_node_at_offset::<ast::RecordExpr>(source_file.syntax(), offset) | ||
224 | { | ||
225 | // record variant | 245 | // record variant |
226 | (record_expr.path()?.segment()?, record_expr.syntax().clone()) | 246 | Some((record_expr.path()?.segment()?, record_expr.syntax().clone())) |
227 | } else { | 247 | } else { |
228 | return None; | 248 | None |
229 | }; | ||
230 | |||
231 | let module = ctx.sema.scope(&expr).module()?; | ||
232 | if !visited_modules_set.contains(&module) { | ||
233 | if insert_import(ctx, rewriter, &expr, &module, enum_module_def, variant_hir_name).is_some() | ||
234 | { | ||
235 | visited_modules_set.insert(module); | ||
236 | } | ||
237 | } | 249 | } |
238 | rewriter.insert_after(segment.syntax(), &make::token(T!['('])); | 250 | .and_then(|(segment, expr)| { |
239 | rewriter.insert_after(segment.syntax(), segment.syntax()); | 251 | let module = sema.scope(&expr).module()?; |
240 | rewriter.insert_after(&expr, &make::token(T![')'])); | 252 | Some((segment, expr, module)) |
241 | Some(()) | 253 | }) |
242 | } | 254 | } |
243 | 255 | ||
244 | #[cfg(test)] | 256 | #[cfg(test)] |
@@ -345,7 +357,7 @@ mod my_mod { | |||
345 | 357 | ||
346 | pub struct MyField(pub u8, pub u8); | 358 | pub struct MyField(pub u8, pub u8); |
347 | 359 | ||
348 | pub enum MyEnum { | 360 | pub enum MyEnum { |
349 | MyField(MyField), | 361 | MyField(MyField), |
350 | } | 362 | } |
351 | } | 363 | } |
diff --git a/crates/ide_assists/src/handlers/reorder_fields.rs b/crates/ide_assists/src/handlers/reorder_fields.rs index 1a95135ca..e90bbdbcf 100644 --- a/crates/ide_assists/src/handlers/reorder_fields.rs +++ b/crates/ide_assists/src/handlers/reorder_fields.rs | |||
@@ -83,11 +83,9 @@ fn replace<T: AstNode + PartialEq>( | |||
83 | fields: impl Iterator<Item = T>, | 83 | fields: impl Iterator<Item = T>, |
84 | sorted_fields: impl IntoIterator<Item = T>, | 84 | sorted_fields: impl IntoIterator<Item = T>, |
85 | ) { | 85 | ) { |
86 | fields.zip(sorted_fields).filter(|(field, sorted)| field != sorted).for_each( | 86 | fields.zip(sorted_fields).for_each(|(field, sorted_field)| { |
87 | |(field, sorted_field)| { | 87 | ted::replace(field.syntax(), sorted_field.syntax().clone_for_update()) |
88 | ted::replace(field.syntax(), sorted_field.syntax().clone_for_update()); | 88 | }); |
89 | }, | ||
90 | ); | ||
91 | } | 89 | } |
92 | 90 | ||
93 | fn compute_fields_ranks(path: &ast::Path, ctx: &AssistContext) -> Option<FxHashMap<String, usize>> { | 91 | fn compute_fields_ranks(path: &ast::Path, ctx: &AssistContext) -> Option<FxHashMap<String, usize>> { |
diff --git a/crates/ide_assists/src/handlers/reorder_impl.rs b/crates/ide_assists/src/handlers/reorder_impl.rs index f976e73ad..72d889248 100644 --- a/crates/ide_assists/src/handlers/reorder_impl.rs +++ b/crates/ide_assists/src/handlers/reorder_impl.rs | |||
@@ -4,9 +4,8 @@ use rustc_hash::FxHashMap; | |||
4 | use hir::{PathResolution, Semantics}; | 4 | use hir::{PathResolution, Semantics}; |
5 | use ide_db::RootDatabase; | 5 | use ide_db::RootDatabase; |
6 | use syntax::{ | 6 | use syntax::{ |
7 | algo, | ||
8 | ast::{self, NameOwner}, | 7 | ast::{self, NameOwner}, |
9 | AstNode, | 8 | ted, AstNode, |
10 | }; | 9 | }; |
11 | 10 | ||
12 | use crate::{AssistContext, AssistId, AssistKind, Assists}; | 11 | use crate::{AssistContext, AssistId, AssistKind, Assists}; |
@@ -75,13 +74,16 @@ pub(crate) fn reorder_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<()> | |||
75 | } | 74 | } |
76 | 75 | ||
77 | let target = items.syntax().text_range(); | 76 | let target = items.syntax().text_range(); |
78 | acc.add(AssistId("reorder_impl", AssistKind::RefactorRewrite), "Sort methods", target, |edit| { | 77 | acc.add( |
79 | let mut rewriter = algo::SyntaxRewriter::default(); | 78 | AssistId("reorder_impl", AssistKind::RefactorRewrite), |
80 | for (old, new) in methods.iter().zip(&sorted) { | 79 | "Sort methods", |
81 | rewriter.replace(old.syntax(), new.syntax()); | 80 | target, |
82 | } | 81 | |builder| { |
83 | edit.rewrite(rewriter); | 82 | methods.into_iter().zip(sorted).for_each(|(old, new)| { |
84 | }) | 83 | ted::replace(builder.make_ast_mut(old).syntax(), new.clone_for_update().syntax()) |
84 | }); | ||
85 | }, | ||
86 | ) | ||
85 | } | 87 | } |
86 | 88 | ||
87 | fn compute_method_ranks(path: &ast::Path, ctx: &AssistContext) -> Option<FxHashMap<String, usize>> { | 89 | fn compute_method_ranks(path: &ast::Path, ctx: &AssistContext) -> Option<FxHashMap<String, usize>> { |
diff --git a/crates/ide_assists/src/handlers/replace_qualified_name_with_use.rs b/crates/ide_assists/src/handlers/replace_qualified_name_with_use.rs index 36d2e0331..99ba79860 100644 --- a/crates/ide_assists/src/handlers/replace_qualified_name_with_use.rs +++ b/crates/ide_assists/src/handlers/replace_qualified_name_with_use.rs | |||
@@ -1,5 +1,5 @@ | |||
1 | use ide_db::helpers::insert_use::{insert_use, ImportScope}; | 1 | use ide_db::helpers::insert_use::{insert_use, ImportScope}; |
2 | use syntax::{algo::SyntaxRewriter, ast, match_ast, AstNode, SyntaxNode}; | 2 | use syntax::{ast, match_ast, ted, AstNode, SyntaxNode}; |
3 | 3 | ||
4 | use crate::{AssistContext, AssistId, AssistKind, Assists}; | 4 | use crate::{AssistContext, AssistId, AssistKind, Assists}; |
5 | 5 | ||
@@ -31,7 +31,7 @@ pub(crate) fn replace_qualified_name_with_use( | |||
31 | } | 31 | } |
32 | 32 | ||
33 | let target = path.syntax().text_range(); | 33 | let target = path.syntax().text_range(); |
34 | let scope = ImportScope::find_insert_use_container(path.syntax(), &ctx.sema)?; | 34 | let scope = ImportScope::find_insert_use_container_with_macros(path.syntax(), &ctx.sema)?; |
35 | let syntax = scope.as_syntax_node(); | 35 | let syntax = scope.as_syntax_node(); |
36 | acc.add( | 36 | acc.add( |
37 | AssistId("replace_qualified_name_with_use", AssistKind::RefactorRewrite), | 37 | AssistId("replace_qualified_name_with_use", AssistKind::RefactorRewrite), |
@@ -40,18 +40,17 @@ pub(crate) fn replace_qualified_name_with_use( | |||
40 | |builder| { | 40 | |builder| { |
41 | // Now that we've brought the name into scope, re-qualify all paths that could be | 41 | // Now that we've brought the name into scope, re-qualify all paths that could be |
42 | // affected (that is, all paths inside the node we added the `use` to). | 42 | // affected (that is, all paths inside the node we added the `use` to). |
43 | let mut rewriter = SyntaxRewriter::default(); | 43 | let syntax = builder.make_mut(syntax.clone()); |
44 | shorten_paths(&mut rewriter, syntax.clone(), &path); | ||
45 | if let Some(ref import_scope) = ImportScope::from(syntax.clone()) { | 44 | if let Some(ref import_scope) = ImportScope::from(syntax.clone()) { |
46 | rewriter += insert_use(import_scope, path, ctx.config.insert_use); | 45 | shorten_paths(&syntax, &path.clone_for_update()); |
47 | builder.rewrite(rewriter); | 46 | insert_use(import_scope, path, ctx.config.insert_use); |
48 | } | 47 | } |
49 | }, | 48 | }, |
50 | ) | 49 | ) |
51 | } | 50 | } |
52 | 51 | ||
53 | /// Adds replacements to `re` that shorten `path` in all descendants of `node`. | 52 | /// Adds replacements to `re` that shorten `path` in all descendants of `node`. |
54 | fn shorten_paths(rewriter: &mut SyntaxRewriter<'static>, node: SyntaxNode, path: &ast::Path) { | 53 | fn shorten_paths(node: &SyntaxNode, path: &ast::Path) { |
55 | for child in node.children() { | 54 | for child in node.children() { |
56 | match_ast! { | 55 | match_ast! { |
57 | match child { | 56 | match child { |
@@ -60,34 +59,26 @@ fn shorten_paths(rewriter: &mut SyntaxRewriter<'static>, node: SyntaxNode, path: | |||
60 | ast::Use(_it) => continue, | 59 | ast::Use(_it) => continue, |
61 | // Don't descend into submodules, they don't have the same `use` items in scope. | 60 | // Don't descend into submodules, they don't have the same `use` items in scope. |
62 | ast::Module(_it) => continue, | 61 | ast::Module(_it) => continue, |
63 | 62 | ast::Path(p) => if maybe_replace_path(p.clone(), path.clone()).is_none() { | |
64 | ast::Path(p) => { | 63 | shorten_paths(p.syntax(), path); |
65 | match maybe_replace_path(rewriter, p.clone(), path.clone()) { | ||
66 | Some(()) => {}, | ||
67 | None => shorten_paths(rewriter, p.syntax().clone(), path), | ||
68 | } | ||
69 | }, | 64 | }, |
70 | _ => shorten_paths(rewriter, child, path), | 65 | _ => shorten_paths(&child, path), |
71 | } | 66 | } |
72 | } | 67 | } |
73 | } | 68 | } |
74 | } | 69 | } |
75 | 70 | ||
76 | fn maybe_replace_path( | 71 | fn maybe_replace_path(path: ast::Path, target: ast::Path) -> Option<()> { |
77 | rewriter: &mut SyntaxRewriter<'static>, | ||
78 | path: ast::Path, | ||
79 | target: ast::Path, | ||
80 | ) -> Option<()> { | ||
81 | if !path_eq(path.clone(), target) { | 72 | if !path_eq(path.clone(), target) { |
82 | return None; | 73 | return None; |
83 | } | 74 | } |
84 | 75 | ||
85 | // Shorten `path`, leaving only its last segment. | 76 | // Shorten `path`, leaving only its last segment. |
86 | if let Some(parent) = path.qualifier() { | 77 | if let Some(parent) = path.qualifier() { |
87 | rewriter.delete(parent.syntax()); | 78 | ted::remove(parent.syntax()); |
88 | } | 79 | } |
89 | if let Some(double_colon) = path.coloncolon_token() { | 80 | if let Some(double_colon) = path.coloncolon_token() { |
90 | rewriter.delete(&double_colon); | 81 | ted::remove(&double_colon); |
91 | } | 82 | } |
92 | 83 | ||
93 | Some(()) | 84 | Some(()) |
@@ -150,6 +141,7 @@ Debug | |||
150 | ", | 141 | ", |
151 | ); | 142 | ); |
152 | } | 143 | } |
144 | |||
153 | #[test] | 145 | #[test] |
154 | fn test_replace_add_use_no_anchor_with_item_below() { | 146 | fn test_replace_add_use_no_anchor_with_item_below() { |
155 | check_assist( | 147 | check_assist( |