aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
Diffstat (limited to 'crates')
-rw-r--r--crates/ra_assists/src/assist_ctx.rs6
-rw-r--r--crates/ra_assists/src/ast_transform.rs12
-rw-r--r--crates/ra_assists/src/handlers/merge_imports.rs103
-rw-r--r--crates/ra_hir_expand/src/eager.rs17
-rw-r--r--crates/ra_ide/src/expand_macro.rs12
-rw-r--r--crates/ra_syntax/src/algo.rs138
-rw-r--r--crates/ra_syntax/src/ast/edit.rs103
7 files changed, 270 insertions, 121 deletions
diff --git a/crates/ra_assists/src/assist_ctx.rs b/crates/ra_assists/src/assist_ctx.rs
index 62182cf03..c3e653299 100644
--- a/crates/ra_assists/src/assist_ctx.rs
+++ b/crates/ra_assists/src/assist_ctx.rs
@@ -11,6 +11,7 @@ use ra_syntax::{
11use ra_text_edit::TextEditBuilder; 11use ra_text_edit::TextEditBuilder;
12 12
13use crate::{AssistAction, AssistId, AssistLabel, GroupLabel, ResolvedAssist}; 13use crate::{AssistAction, AssistId, AssistLabel, GroupLabel, ResolvedAssist};
14use algo::SyntaxRewriter;
14 15
15#[derive(Clone, Debug)] 16#[derive(Clone, Debug)]
16pub(crate) struct Assist(pub(crate) Vec<AssistInfo>); 17pub(crate) struct Assist(pub(crate) Vec<AssistInfo>);
@@ -234,6 +235,11 @@ impl ActionBuilder {
234 pub(crate) fn replace_ast<N: AstNode>(&mut self, old: N, new: N) { 235 pub(crate) fn replace_ast<N: AstNode>(&mut self, old: N, new: N) {
235 algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit) 236 algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit)
236 } 237 }
238 pub(crate) fn rewrite(&mut self, rewriter: SyntaxRewriter) {
239 let node = rewriter.rewrite_root().unwrap();
240 let new = rewriter.rewrite(&node);
241 algo::diff(&node, &new).into_text_edit(&mut self.edit)
242 }
237 243
238 fn build(self) -> AssistAction { 244 fn build(self) -> AssistAction {
239 AssistAction { 245 AssistAction {
diff --git a/crates/ra_assists/src/ast_transform.rs b/crates/ra_assists/src/ast_transform.rs
index 45558c448..52b4c82db 100644
--- a/crates/ra_assists/src/ast_transform.rs
+++ b/crates/ra_assists/src/ast_transform.rs
@@ -3,7 +3,10 @@ use rustc_hash::FxHashMap;
3 3
4use hir::{PathResolution, SemanticsScope}; 4use hir::{PathResolution, SemanticsScope};
5use ra_ide_db::RootDatabase; 5use ra_ide_db::RootDatabase;
6use ra_syntax::ast::{self, AstNode}; 6use ra_syntax::{
7 algo::SyntaxRewriter,
8 ast::{self, AstNode},
9};
7 10
8pub trait AstTransform<'a> { 11pub trait AstTransform<'a> {
9 fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode>; 12 fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode>;
@@ -153,15 +156,14 @@ impl<'a> QualifyPaths<'a> {
153} 156}
154 157
155pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: N) -> N { 158pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: N) -> N {
156 let syntax = node.syntax(); 159 SyntaxRewriter::from_fn(|element| match element {
157 let result = ra_syntax::algo::replace_descendants(syntax, |element| match element {
158 ra_syntax::SyntaxElement::Node(n) => { 160 ra_syntax::SyntaxElement::Node(n) => {
159 let replacement = transformer.get_substitution(&n)?; 161 let replacement = transformer.get_substitution(&n)?;
160 Some(replacement.into()) 162 Some(replacement.into())
161 } 163 }
162 _ => None, 164 _ => None,
163 }); 165 })
164 N::cast(result).unwrap() 166 .rewrite_ast(&node)
165} 167}
166 168
167impl<'a> AstTransform<'a> for QualifyPaths<'a> { 169impl<'a> AstTransform<'a> for QualifyPaths<'a> {
diff --git a/crates/ra_assists/src/handlers/merge_imports.rs b/crates/ra_assists/src/handlers/merge_imports.rs
index 89bc975bd..9c57d1e30 100644
--- a/crates/ra_assists/src/handlers/merge_imports.rs
+++ b/crates/ra_assists/src/handlers/merge_imports.rs
@@ -1,9 +1,9 @@
1use std::iter::successors; 1use std::iter::successors;
2 2
3use ra_syntax::{ 3use ra_syntax::{
4 algo::neighbor, 4 algo::{neighbor, SyntaxRewriter},
5 ast::{self, edit::AstNodeEdit, make}, 5 ast::{self, edit::AstNodeEdit, make},
6 AstNode, AstToken, Direction, InsertPosition, SyntaxElement, TextRange, T, 6 AstNode, Direction, InsertPosition, SyntaxElement, T,
7}; 7};
8 8
9use crate::{Assist, AssistCtx, AssistId}; 9use crate::{Assist, AssistCtx, AssistId};
@@ -22,9 +22,10 @@ use crate::{Assist, AssistCtx, AssistId};
22// ``` 22// ```
23pub(crate) fn merge_imports(ctx: AssistCtx) -> Option<Assist> { 23pub(crate) fn merge_imports(ctx: AssistCtx) -> Option<Assist> {
24 let tree: ast::UseTree = ctx.find_node_at_offset()?; 24 let tree: ast::UseTree = ctx.find_node_at_offset()?;
25 let (new_tree, to_delete) = if let Some(use_item) = 25 let mut rewriter = SyntaxRewriter::default();
26 tree.syntax().parent().and_then(ast::UseItem::cast) 26 let mut offset = ctx.frange.range.start();
27 { 27
28 if let Some(use_item) = tree.syntax().parent().and_then(ast::UseItem::cast) {
28 let (merged, to_delete) = next_prev() 29 let (merged, to_delete) = next_prev()
29 .filter_map(|dir| neighbor(&use_item, dir)) 30 .filter_map(|dir| neighbor(&use_item, dir))
30 .filter_map(|it| Some((it.clone(), it.use_tree()?))) 31 .filter_map(|it| Some((it.clone(), it.use_tree()?)))
@@ -32,42 +33,28 @@ pub(crate) fn merge_imports(ctx: AssistCtx) -> Option<Assist> {
32 Some((try_merge_trees(&tree, &use_tree)?, use_item.clone())) 33 Some((try_merge_trees(&tree, &use_tree)?, use_item.clone()))
33 })?; 34 })?;
34 35
35 let mut range = to_delete.syntax().text_range(); 36 rewriter.replace_ast(&tree, &merged);
36 let next_ws = to_delete 37 rewriter += to_delete.remove();
37 .syntax() 38
38 .next_sibling_or_token() 39 if to_delete.syntax().text_range().end() < offset {
39 .and_then(|it| it.into_token()) 40 offset -= to_delete.syntax().text_range().len();
40 .and_then(ast::Whitespace::cast);
41 if let Some(ws) = next_ws {
42 range = range.extend_to(&ws.syntax().text_range())
43 } 41 }
44 (merged, range)
45 } else { 42 } else {
46 let (merged, to_delete) = next_prev() 43 let (merged, to_delete) = next_prev()
47 .filter_map(|dir| neighbor(&tree, dir)) 44 .filter_map(|dir| neighbor(&tree, dir))
48 .find_map(|use_tree| Some((try_merge_trees(&tree, &use_tree)?, use_tree.clone())))?; 45 .find_map(|use_tree| Some((try_merge_trees(&tree, &use_tree)?, use_tree.clone())))?;
49 46
50 let mut range = to_delete.syntax().text_range(); 47 rewriter.replace_ast(&tree, &merged);
51 if let Some((dir, nb)) = next_prev().find_map(|dir| Some((dir, neighbor(&to_delete, dir)?))) 48 rewriter += to_delete.remove();
52 { 49
53 let nb_range = nb.syntax().text_range(); 50 if to_delete.syntax().text_range().end() < offset {
54 if dir == Direction::Prev { 51 offset -= to_delete.syntax().text_range().len();
55 range = TextRange::from_to(nb_range.end(), range.end());
56 } else {
57 range = TextRange::from_to(range.start(), nb_range.start());
58 }
59 } 52 }
60 (merged, range)
61 }; 53 };
62 54
63 let mut offset = ctx.frange.range.start();
64 ctx.add_assist(AssistId("merge_imports"), "Merge imports", |edit| { 55 ctx.add_assist(AssistId("merge_imports"), "Merge imports", |edit| {
65 edit.replace_ast(tree, new_tree); 56 edit.rewrite(rewriter);
66 edit.delete(to_delete); 57 // FIXME: we only need because our diff is imprecise
67
68 if to_delete.end() <= offset {
69 offset -= to_delete.len();
70 }
71 edit.set_cursor(offset); 58 edit.set_cursor(offset);
72 }) 59 })
73} 60}
@@ -156,7 +143,7 @@ use std::fmt::Debug;
156use std::fmt<|>::Display; 143use std::fmt<|>::Display;
157", 144",
158 r" 145 r"
159use std::fmt<|>::{Display, Debug}; 146use std::fmt:<|>:{Display, Debug};
160", 147",
161 ); 148 );
162 } 149 }
@@ -178,7 +165,57 @@ use std::{fmt<|>::{Debug, Display}};
178use std::{fmt::Debug, fmt<|>::Display}; 165use std::{fmt::Debug, fmt<|>::Display};
179", 166",
180 r" 167 r"
181use std::{fmt<|>::{Display, Debug}}; 168use std::{fmt::<|>{Display, Debug}};
169",
170 );
171 }
172
173 #[test]
174 fn removes_just_enough_whitespace() {
175 check_assist(
176 merge_imports,
177 r"
178use foo<|>::bar;
179use foo::baz;
180
181/// Doc comment
182",
183 r"
184use foo<|>::{bar, baz};
185
186/// Doc comment
187",
188 );
189 }
190
191 #[test]
192 fn works_with_trailing_comma() {
193 check_assist(
194 merge_imports,
195 r"
196use {
197 foo<|>::bar,
198 foo::baz,
199};
200",
201 r"
202use {
203 foo<|>::{bar, baz},
204};
205",
206 );
207 check_assist(
208 merge_imports,
209 r"
210use {
211 foo::baz,
212 foo<|>::bar,
213};
214",
215 r"
216use {
217 foo::{bar<|>, baz},
218};
182", 219",
183 ); 220 );
184 } 221 }
diff --git a/crates/ra_hir_expand/src/eager.rs b/crates/ra_hir_expand/src/eager.rs
index 4cbce4df5..687d40294 100644
--- a/crates/ra_hir_expand/src/eager.rs
+++ b/crates/ra_hir_expand/src/eager.rs
@@ -26,8 +26,8 @@ use crate::{
26}; 26};
27 27
28use ra_parser::FragmentKind; 28use ra_parser::FragmentKind;
29use ra_syntax::{algo::replace_descendants, SyntaxElement, SyntaxNode}; 29use ra_syntax::{algo::SyntaxRewriter, SyntaxNode};
30use std::{collections::HashMap, sync::Arc}; 30use std::sync::Arc;
31 31
32pub fn expand_eager_macro( 32pub fn expand_eager_macro(
33 db: &dyn AstDatabase, 33 db: &dyn AstDatabase,
@@ -95,10 +95,10 @@ fn eager_macro_recur(
95 curr: InFile<SyntaxNode>, 95 curr: InFile<SyntaxNode>,
96 macro_resolver: &dyn Fn(ast::Path) -> Option<MacroDefId>, 96 macro_resolver: &dyn Fn(ast::Path) -> Option<MacroDefId>,
97) -> Option<SyntaxNode> { 97) -> Option<SyntaxNode> {
98 let mut original = curr.value.clone(); 98 let original = curr.value.clone();
99 99
100 let children = curr.value.descendants().filter_map(ast::MacroCall::cast); 100 let children = curr.value.descendants().filter_map(ast::MacroCall::cast);
101 let mut replaces: HashMap<SyntaxElement, SyntaxElement> = HashMap::default(); 101 let mut rewriter = SyntaxRewriter::default();
102 102
103 // Collect replacement 103 // Collect replacement
104 for child in children { 104 for child in children {
@@ -119,12 +119,9 @@ fn eager_macro_recur(
119 } 119 }
120 }; 120 };
121 121
122 replaces.insert(child.syntax().clone().into(), insert.into()); 122 rewriter.replace(child.syntax(), &insert);
123 } 123 }
124 124
125 if !replaces.is_empty() { 125 let res = rewriter.rewrite(&original);
126 original = replace_descendants(&original, |n| replaces.get(n).cloned()); 126 Some(res)
127 }
128
129 Some(original)
130} 127}
diff --git a/crates/ra_ide/src/expand_macro.rs b/crates/ra_ide/src/expand_macro.rs
index e58526f31..f536ba3e7 100644
--- a/crates/ra_ide/src/expand_macro.rs
+++ b/crates/ra_ide/src/expand_macro.rs
@@ -3,10 +3,9 @@
3use hir::Semantics; 3use hir::Semantics;
4use ra_ide_db::RootDatabase; 4use ra_ide_db::RootDatabase;
5use ra_syntax::{ 5use ra_syntax::{
6 algo::{find_node_at_offset, replace_descendants}, 6 algo::{find_node_at_offset, SyntaxRewriter},
7 ast, AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, WalkEvent, T, 7 ast, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, WalkEvent, T,
8}; 8};
9use rustc_hash::FxHashMap;
10 9
11use crate::FilePosition; 10use crate::FilePosition;
12 11
@@ -37,7 +36,7 @@ fn expand_macro_recur(
37 let mut expanded = sema.expand(macro_call)?; 36 let mut expanded = sema.expand(macro_call)?;
38 37
39 let children = expanded.descendants().filter_map(ast::MacroCall::cast); 38 let children = expanded.descendants().filter_map(ast::MacroCall::cast);
40 let mut replaces: FxHashMap<SyntaxElement, SyntaxElement> = FxHashMap::default(); 39 let mut rewriter = SyntaxRewriter::default();
41 40
42 for child in children.into_iter() { 41 for child in children.into_iter() {
43 if let Some(new_node) = expand_macro_recur(sema, &child) { 42 if let Some(new_node) = expand_macro_recur(sema, &child) {
@@ -47,12 +46,13 @@ fn expand_macro_recur(
47 if expanded == *child.syntax() { 46 if expanded == *child.syntax() {
48 expanded = new_node; 47 expanded = new_node;
49 } else { 48 } else {
50 replaces.insert(child.syntax().clone().into(), new_node.into()); 49 rewriter.replace(child.syntax(), &new_node)
51 } 50 }
52 } 51 }
53 } 52 }
54 53
55 Some(replace_descendants(&expanded, |n| replaces.get(n).cloned())) 54 let res = rewriter.rewrite(&expanded);
55 Some(res)
56} 56}
57 57
58// FIXME: It would also be cool to share logic here and in the mbe tests, 58// FIXME: It would also be cool to share logic here and in the mbe tests,
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs
index ffdbdc767..4d463a3ef 100644
--- a/crates/ra_syntax/src/algo.rs
+++ b/crates/ra_syntax/src/algo.rs
@@ -1,6 +1,9 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use std::ops::RangeInclusive; 3use std::{
4 fmt,
5 ops::{self, RangeInclusive},
6};
4 7
5use itertools::Itertools; 8use itertools::Itertools;
6use ra_text_edit::TextEditBuilder; 9use ra_text_edit::TextEditBuilder;
@@ -222,44 +225,121 @@ fn _replace_children(
222 with_children(parent, new_children) 225 with_children(parent, new_children)
223} 226}
224 227
225/// Replaces descendants in the node, according to the mapping. 228#[derive(Default)]
226/// 229pub struct SyntaxRewriter<'a> {
227/// This is a type-unsafe low-level editing API, if you need to use it, prefer 230 f: Option<Box<dyn Fn(&SyntaxElement) -> Option<SyntaxElement> + 'a>>,
228/// to create a type-safe abstraction on top of it instead. 231 //FIXME: add debug_assertions that all elements are in fact from the same file.
229pub fn replace_descendants( 232 replacements: FxHashMap<SyntaxElement, Replacement>,
230 parent: &SyntaxNode,
231 map: impl Fn(&SyntaxElement) -> Option<SyntaxElement>,
232) -> SyntaxNode {
233 _replace_descendants(parent, &map)
234} 233}
235 234
236fn _replace_descendants( 235impl fmt::Debug for SyntaxRewriter<'_> {
237 parent: &SyntaxNode, 236 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
238 map: &dyn Fn(&SyntaxElement) -> Option<SyntaxElement>, 237 f.debug_struct("SyntaxRewriter").field("replacements", &self.replacements).finish()
239) -> SyntaxNode { 238 }
240 // FIXME: this could be made much faster. 239}
241 let new_children = parent.children_with_tokens().map(|it| go(map, it)).collect::<Vec<_>>();
242 return with_children(parent, new_children);
243 240
244 fn go( 241impl<'a> SyntaxRewriter<'a> {
245 map: &dyn Fn(&SyntaxElement) -> Option<SyntaxElement>, 242 pub fn from_fn(f: impl Fn(&SyntaxElement) -> Option<SyntaxElement> + 'a) -> SyntaxRewriter<'a> {
246 element: SyntaxElement, 243 SyntaxRewriter { f: Some(Box::new(f)), replacements: FxHashMap::default() }
247 ) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { 244 }
248 if let Some(replacement) = map(&element) { 245 pub fn delete<T: Clone + Into<SyntaxElement>>(&mut self, what: &T) {
246 let what = what.clone().into();
247 let replacement = Replacement::Delete;
248 self.replacements.insert(what, replacement);
249 }
250 pub fn replace<T: Clone + Into<SyntaxElement>>(&mut self, what: &T, with: &T) {
251 let what = what.clone().into();
252 let replacement = Replacement::Single(with.clone().into());
253 self.replacements.insert(what, replacement);
254 }
255 pub fn replace_ast<T: AstNode>(&mut self, what: &T, with: &T) {
256 self.replace(what.syntax(), with.syntax())
257 }
258
259 pub fn rewrite(&self, node: &SyntaxNode) -> SyntaxNode {
260 if self.f.is_none() && self.replacements.is_empty() {
261 return node.clone();
262 }
263 self.rewrite_children(node)
264 }
265
266 pub fn rewrite_ast<N: AstNode>(self, node: &N) -> N {
267 N::cast(self.rewrite(node.syntax())).unwrap()
268 }
269
270 pub fn rewrite_root(&self) -> Option<SyntaxNode> {
271 assert!(self.f.is_none());
272 self.replacements
273 .keys()
274 .map(|element| match element {
275 SyntaxElement::Node(it) => it.clone(),
276 SyntaxElement::Token(it) => it.parent(),
277 })
278 .fold1(|a, b| least_common_ancestor(&a, &b).unwrap())
279 }
280
281 fn replacement(&self, element: &SyntaxElement) -> Option<Replacement> {
282 if let Some(f) = &self.f {
283 assert!(self.replacements.is_empty());
284 return f(element).map(Replacement::Single);
285 }
286 self.replacements.get(element).cloned()
287 }
288
289 fn rewrite_children(&self, node: &SyntaxNode) -> SyntaxNode {
290 // FIXME: this could be made much faster.
291 let new_children =
292 node.children_with_tokens().flat_map(|it| self.rewrite_self(&it)).collect::<Vec<_>>();
293 with_children(node, new_children)
294 }
295
296 fn rewrite_self(
297 &self,
298 element: &SyntaxElement,
299 ) -> Option<NodeOrToken<rowan::GreenNode, rowan::GreenToken>> {
300 if let Some(replacement) = self.replacement(&element) {
249 return match replacement { 301 return match replacement {
250 NodeOrToken::Node(it) => NodeOrToken::Node(it.green().clone()), 302 Replacement::Single(NodeOrToken::Node(it)) => {
251 NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), 303 Some(NodeOrToken::Node(it.green().clone()))
304 }
305 Replacement::Single(NodeOrToken::Token(it)) => {
306 Some(NodeOrToken::Token(it.green().clone()))
307 }
308 Replacement::Delete => None,
252 }; 309 };
253 } 310 }
254 match element { 311 let res = match element {
255 NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), 312 NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()),
256 NodeOrToken::Node(it) => { 313 NodeOrToken::Node(it) => NodeOrToken::Node(self.rewrite_children(it).green().clone()),
257 NodeOrToken::Node(_replace_descendants(&it, map).green().clone()) 314 };
258 } 315 Some(res)
259 } 316 }
317}
318
319impl<'a> ops::AddAssign for SyntaxRewriter<'_> {
320 fn add_assign(&mut self, rhs: SyntaxRewriter) {
321 assert!(rhs.f.is_none());
322 self.replacements.extend(rhs.replacements)
260 } 323 }
261} 324}
262 325
326#[derive(Clone, Debug)]
327enum Replacement {
328 Delete,
329 Single(SyntaxElement),
330}
331
332/// Replaces descendants in the node, according to the mapping.
333///
334/// This is a type-unsafe low-level editing API, if you need to use it, prefer
335/// to create a type-safe abstraction on top of it instead.
336pub fn _replace_descendants(
337 parent: &SyntaxNode,
338 map: impl Fn(&SyntaxElement) -> Option<SyntaxElement>,
339) -> SyntaxNode {
340 SyntaxRewriter::from_fn(map).rewrite(parent)
341}
342
263fn with_children( 343fn with_children(
264 parent: &SyntaxNode, 344 parent: &SyntaxNode,
265 new_children: Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>, 345 new_children: Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>,
diff --git a/crates/ra_syntax/src/ast/edit.rs b/crates/ra_syntax/src/ast/edit.rs
index 324327162..f74c9f9c6 100644
--- a/crates/ra_syntax/src/ast/edit.rs
+++ b/crates/ra_syntax/src/ast/edit.rs
@@ -4,7 +4,6 @@
4use std::{iter, ops::RangeInclusive}; 4use std::{iter, ops::RangeInclusive};
5 5
6use arrayvec::ArrayVec; 6use arrayvec::ArrayVec;
7use rustc_hash::FxHashMap;
8 7
9use crate::{ 8use crate::{
10 algo, 9 algo,
@@ -17,6 +16,7 @@ use crate::{
17 SyntaxKind::{ATTR, COMMENT, WHITESPACE}, 16 SyntaxKind::{ATTR, COMMENT, WHITESPACE},
18 SyntaxNode, SyntaxToken, T, 17 SyntaxNode, SyntaxToken, T,
19}; 18};
19use algo::{neighbor, SyntaxRewriter};
20 20
21impl ast::BinExpr { 21impl ast::BinExpr {
22 #[must_use] 22 #[must_use]
@@ -255,6 +255,28 @@ impl ast::UseItem {
255 } 255 }
256 self.clone() 256 self.clone()
257 } 257 }
258
259 pub fn remove(&self) -> SyntaxRewriter<'static> {
260 let mut res = SyntaxRewriter::default();
261 res.delete(self.syntax());
262 let next_ws = self
263 .syntax()
264 .next_sibling_or_token()
265 .and_then(|it| it.into_token())
266 .and_then(ast::Whitespace::cast);
267 if let Some(next_ws) = next_ws {
268 let ws_text = next_ws.syntax().text();
269 if ws_text.starts_with('\n') {
270 let rest = &ws_text[1..];
271 if rest.is_empty() {
272 res.delete(next_ws.syntax())
273 } else {
274 res.replace(next_ws.syntax(), &make::tokens::whitespace(rest));
275 }
276 }
277 }
278 res
279 }
258} 280}
259 281
260impl ast::UseTree { 282impl ast::UseTree {
@@ -293,6 +315,22 @@ impl ast::UseTree {
293 Some(res) 315 Some(res)
294 } 316 }
295 } 317 }
318
319 pub fn remove(&self) -> SyntaxRewriter<'static> {
320 let mut res = SyntaxRewriter::default();
321 res.delete(self.syntax());
322 for &dir in [Direction::Next, Direction::Prev].iter() {
323 if let Some(nb) = neighbor(self, dir) {
324 self.syntax()
325 .siblings_with_tokens(dir)
326 .skip(1)
327 .take_while(|it| it.as_node() != Some(nb.syntax()))
328 .for_each(|el| res.delete(&el));
329 return res;
330 }
331 }
332 res
333 }
296} 334}
297 335
298#[must_use] 336#[must_use]
@@ -343,28 +381,24 @@ impl IndentLevel {
343 } 381 }
344 382
345 fn _increase_indent(self, node: SyntaxNode) -> SyntaxNode { 383 fn _increase_indent(self, node: SyntaxNode) -> SyntaxNode {
346 let replacements: FxHashMap<SyntaxElement, SyntaxElement> = node 384 let mut rewriter = SyntaxRewriter::default();
347 .descendants_with_tokens() 385 node.descendants_with_tokens()
348 .filter_map(|el| el.into_token()) 386 .filter_map(|el| el.into_token())
349 .filter_map(ast::Whitespace::cast) 387 .filter_map(ast::Whitespace::cast)
350 .filter(|ws| { 388 .filter(|ws| {
351 let text = ws.syntax().text(); 389 let text = ws.syntax().text();
352 text.contains('\n') 390 text.contains('\n')
353 }) 391 })
354 .map(|ws| { 392 .for_each(|ws| {
355 ( 393 let new_ws = make::tokens::whitespace(&format!(
356 ws.syntax().clone().into(), 394 "{}{:width$}",
357 make::tokens::whitespace(&format!( 395 ws.syntax().text(),
358 "{}{:width$}", 396 "",
359 ws.syntax().text(), 397 width = self.0 as usize * 4
360 "", 398 ));
361 width = self.0 as usize * 4 399 rewriter.replace(ws.syntax(), &new_ws)
362 )) 400 });
363 .into(), 401 rewriter.rewrite(&node)
364 )
365 })
366 .collect();
367 algo::replace_descendants(&node, |n| replacements.get(n).cloned())
368 } 402 }
369 403
370 pub fn decrease_indent<N: AstNode>(self, node: N) -> N { 404 pub fn decrease_indent<N: AstNode>(self, node: N) -> N {
@@ -372,27 +406,21 @@ impl IndentLevel {
372 } 406 }
373 407
374 fn _decrease_indent(self, node: SyntaxNode) -> SyntaxNode { 408 fn _decrease_indent(self, node: SyntaxNode) -> SyntaxNode {
375 let replacements: FxHashMap<SyntaxElement, SyntaxElement> = node 409 let mut rewriter = SyntaxRewriter::default();
376 .descendants_with_tokens() 410 node.descendants_with_tokens()
377 .filter_map(|el| el.into_token()) 411 .filter_map(|el| el.into_token())
378 .filter_map(ast::Whitespace::cast) 412 .filter_map(ast::Whitespace::cast)
379 .filter(|ws| { 413 .filter(|ws| {
380 let text = ws.syntax().text(); 414 let text = ws.syntax().text();
381 text.contains('\n') 415 text.contains('\n')
382 }) 416 })
383 .map(|ws| { 417 .for_each(|ws| {
384 ( 418 let new_ws = make::tokens::whitespace(
385 ws.syntax().clone().into(), 419 &ws.syntax().text().replace(&format!("\n{:1$}", "", self.0 as usize * 4), "\n"),
386 make::tokens::whitespace( 420 );
387 &ws.syntax() 421 rewriter.replace(ws.syntax(), &new_ws)
388 .text() 422 });
389 .replace(&format!("\n{:1$}", "", self.0 as usize * 4), "\n"), 423 rewriter.rewrite(&node)
390 )
391 .into(),
392 )
393 })
394 .collect();
395 algo::replace_descendants(&node, |n| replacements.get(n).cloned())
396 } 424 }
397} 425}
398 426
@@ -442,12 +470,11 @@ pub trait AstNodeEdit: AstNode + Sized {
442 &self, 470 &self,
443 replacement_map: impl IntoIterator<Item = (D, D)>, 471 replacement_map: impl IntoIterator<Item = (D, D)>,
444 ) -> Self { 472 ) -> Self {
445 let map = replacement_map 473 let mut rewriter = SyntaxRewriter::default();
446 .into_iter() 474 for (from, to) in replacement_map {
447 .map(|(from, to)| (from.syntax().clone().into(), to.syntax().clone().into())) 475 rewriter.replace(from.syntax(), to.syntax())
448 .collect::<FxHashMap<SyntaxElement, _>>(); 476 }
449 let new_syntax = algo::replace_descendants(self.syntax(), |n| map.get(n).cloned()); 477 rewriter.rewrite_ast(self)
450 Self::cast(new_syntax).unwrap()
451 } 478 }
452} 479}
453 480