aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_syntax
diff options
context:
space:
mode:
Diffstat (limited to 'crates/ra_syntax')
-rw-r--r--crates/ra_syntax/Cargo.toml2
-rw-r--r--crates/ra_syntax/src/algo.rs142
-rw-r--r--crates/ra_syntax/src/ast/edit.rs109
-rw-r--r--crates/ra_syntax/src/ast/extensions.rs13
-rw-r--r--crates/ra_syntax/src/ast/make.rs17
5 files changed, 211 insertions, 72 deletions
diff --git a/crates/ra_syntax/Cargo.toml b/crates/ra_syntax/Cargo.toml
index 8efc6b368..6fccc2303 100644
--- a/crates/ra_syntax/Cargo.toml
+++ b/crates/ra_syntax/Cargo.toml
@@ -11,7 +11,7 @@ repository = "https://github.com/rust-analyzer/rust-analyzer"
11doctest = false 11doctest = false
12 12
13[dependencies] 13[dependencies]
14itertools = "0.8.2" 14itertools = "0.9.0"
15rowan = "0.9.1" 15rowan = "0.9.1"
16rustc_lexer = "0.1.0" 16rustc_lexer = "0.1.0"
17rustc-hash = "1.1.0" 17rustc-hash = "1.1.0"
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs
index 344cf0fbe..4d463a3ef 100644
--- a/crates/ra_syntax/src/algo.rs
+++ b/crates/ra_syntax/src/algo.rs
@@ -1,6 +1,9 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use std::ops::RangeInclusive; 3use std::{
4 fmt,
5 ops::{self, RangeInclusive},
6};
4 7
5use itertools::Itertools; 8use itertools::Itertools;
6use ra_text_edit::TextEditBuilder; 9use ra_text_edit::TextEditBuilder;
@@ -95,6 +98,10 @@ impl TreeDiff {
95 builder.replace(from.text_range(), to.to_string()) 98 builder.replace(from.text_range(), to.to_string())
96 } 99 }
97 } 100 }
101
102 pub fn is_empty(&self) -> bool {
103 self.replacements.is_empty()
104 }
98} 105}
99 106
100/// Finds minimal the diff, which, applied to `from`, will result in `to`. 107/// Finds minimal the diff, which, applied to `from`, will result in `to`.
@@ -218,44 +225,121 @@ fn _replace_children(
218 with_children(parent, new_children) 225 with_children(parent, new_children)
219} 226}
220 227
221/// Replaces descendants in the node, according to the mapping. 228#[derive(Default)]
222/// 229pub struct SyntaxRewriter<'a> {
223/// This is a type-unsafe low-level editing API, if you need to use it, prefer 230 f: Option<Box<dyn Fn(&SyntaxElement) -> Option<SyntaxElement> + 'a>>,
224/// to create a type-safe abstraction on top of it instead. 231 //FIXME: add debug_assertions that all elements are in fact from the same file.
225pub fn replace_descendants( 232 replacements: FxHashMap<SyntaxElement, Replacement>,
226 parent: &SyntaxNode,
227 map: impl Fn(&SyntaxElement) -> Option<SyntaxElement>,
228) -> SyntaxNode {
229 _replace_descendants(parent, &map)
230} 233}
231 234
232fn _replace_descendants( 235impl fmt::Debug for SyntaxRewriter<'_> {
233 parent: &SyntaxNode, 236 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
234 map: &dyn Fn(&SyntaxElement) -> Option<SyntaxElement>, 237 f.debug_struct("SyntaxRewriter").field("replacements", &self.replacements).finish()
235) -> SyntaxNode { 238 }
236 // FIXME: this could be made much faster. 239}
237 let new_children = parent.children_with_tokens().map(|it| go(map, it)).collect::<Vec<_>>();
238 return with_children(parent, new_children);
239 240
240 fn go( 241impl<'a> SyntaxRewriter<'a> {
241 map: &dyn Fn(&SyntaxElement) -> Option<SyntaxElement>, 242 pub fn from_fn(f: impl Fn(&SyntaxElement) -> Option<SyntaxElement> + 'a) -> SyntaxRewriter<'a> {
242 element: SyntaxElement, 243 SyntaxRewriter { f: Some(Box::new(f)), replacements: FxHashMap::default() }
243 ) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { 244 }
244 if let Some(replacement) = map(&element) { 245 pub fn delete<T: Clone + Into<SyntaxElement>>(&mut self, what: &T) {
246 let what = what.clone().into();
247 let replacement = Replacement::Delete;
248 self.replacements.insert(what, replacement);
249 }
250 pub fn replace<T: Clone + Into<SyntaxElement>>(&mut self, what: &T, with: &T) {
251 let what = what.clone().into();
252 let replacement = Replacement::Single(with.clone().into());
253 self.replacements.insert(what, replacement);
254 }
255 pub fn replace_ast<T: AstNode>(&mut self, what: &T, with: &T) {
256 self.replace(what.syntax(), with.syntax())
257 }
258
259 pub fn rewrite(&self, node: &SyntaxNode) -> SyntaxNode {
260 if self.f.is_none() && self.replacements.is_empty() {
261 return node.clone();
262 }
263 self.rewrite_children(node)
264 }
265
266 pub fn rewrite_ast<N: AstNode>(self, node: &N) -> N {
267 N::cast(self.rewrite(node.syntax())).unwrap()
268 }
269
270 pub fn rewrite_root(&self) -> Option<SyntaxNode> {
271 assert!(self.f.is_none());
272 self.replacements
273 .keys()
274 .map(|element| match element {
275 SyntaxElement::Node(it) => it.clone(),
276 SyntaxElement::Token(it) => it.parent(),
277 })
278 .fold1(|a, b| least_common_ancestor(&a, &b).unwrap())
279 }
280
281 fn replacement(&self, element: &SyntaxElement) -> Option<Replacement> {
282 if let Some(f) = &self.f {
283 assert!(self.replacements.is_empty());
284 return f(element).map(Replacement::Single);
285 }
286 self.replacements.get(element).cloned()
287 }
288
289 fn rewrite_children(&self, node: &SyntaxNode) -> SyntaxNode {
290 // FIXME: this could be made much faster.
291 let new_children =
292 node.children_with_tokens().flat_map(|it| self.rewrite_self(&it)).collect::<Vec<_>>();
293 with_children(node, new_children)
294 }
295
296 fn rewrite_self(
297 &self,
298 element: &SyntaxElement,
299 ) -> Option<NodeOrToken<rowan::GreenNode, rowan::GreenToken>> {
300 if let Some(replacement) = self.replacement(&element) {
245 return match replacement { 301 return match replacement {
246 NodeOrToken::Node(it) => NodeOrToken::Node(it.green().clone()), 302 Replacement::Single(NodeOrToken::Node(it)) => {
247 NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), 303 Some(NodeOrToken::Node(it.green().clone()))
304 }
305 Replacement::Single(NodeOrToken::Token(it)) => {
306 Some(NodeOrToken::Token(it.green().clone()))
307 }
308 Replacement::Delete => None,
248 }; 309 };
249 } 310 }
250 match element { 311 let res = match element {
251 NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), 312 NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()),
252 NodeOrToken::Node(it) => { 313 NodeOrToken::Node(it) => NodeOrToken::Node(self.rewrite_children(it).green().clone()),
253 NodeOrToken::Node(_replace_descendants(&it, map).green().clone()) 314 };
254 } 315 Some(res)
255 }
256 } 316 }
257} 317}
258 318
319impl<'a> ops::AddAssign for SyntaxRewriter<'_> {
320 fn add_assign(&mut self, rhs: SyntaxRewriter) {
321 assert!(rhs.f.is_none());
322 self.replacements.extend(rhs.replacements)
323 }
324}
325
326#[derive(Clone, Debug)]
327enum Replacement {
328 Delete,
329 Single(SyntaxElement),
330}
331
332/// Replaces descendants in the node, according to the mapping.
333///
334/// This is a type-unsafe low-level editing API, if you need to use it, prefer
335/// to create a type-safe abstraction on top of it instead.
336pub fn _replace_descendants(
337 parent: &SyntaxNode,
338 map: impl Fn(&SyntaxElement) -> Option<SyntaxElement>,
339) -> SyntaxNode {
340 SyntaxRewriter::from_fn(map).rewrite(parent)
341}
342
259fn with_children( 343fn with_children(
260 parent: &SyntaxNode, 344 parent: &SyntaxNode,
261 new_children: Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>, 345 new_children: Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>,
diff --git a/crates/ra_syntax/src/ast/edit.rs b/crates/ra_syntax/src/ast/edit.rs
index 68dae008f..f74c9f9c6 100644
--- a/crates/ra_syntax/src/ast/edit.rs
+++ b/crates/ra_syntax/src/ast/edit.rs
@@ -4,7 +4,6 @@
4use std::{iter, ops::RangeInclusive}; 4use std::{iter, ops::RangeInclusive};
5 5
6use arrayvec::ArrayVec; 6use arrayvec::ArrayVec;
7use rustc_hash::FxHashMap;
8 7
9use crate::{ 8use crate::{
10 algo, 9 algo,
@@ -17,6 +16,7 @@ use crate::{
17 SyntaxKind::{ATTR, COMMENT, WHITESPACE}, 16 SyntaxKind::{ATTR, COMMENT, WHITESPACE},
18 SyntaxNode, SyntaxToken, T, 17 SyntaxNode, SyntaxToken, T,
19}; 18};
19use algo::{neighbor, SyntaxRewriter};
20 20
21impl ast::BinExpr { 21impl ast::BinExpr {
22 #[must_use] 22 #[must_use]
@@ -255,6 +255,28 @@ impl ast::UseItem {
255 } 255 }
256 self.clone() 256 self.clone()
257 } 257 }
258
259 pub fn remove(&self) -> SyntaxRewriter<'static> {
260 let mut res = SyntaxRewriter::default();
261 res.delete(self.syntax());
262 let next_ws = self
263 .syntax()
264 .next_sibling_or_token()
265 .and_then(|it| it.into_token())
266 .and_then(ast::Whitespace::cast);
267 if let Some(next_ws) = next_ws {
268 let ws_text = next_ws.syntax().text();
269 if ws_text.starts_with('\n') {
270 let rest = &ws_text[1..];
271 if rest.is_empty() {
272 res.delete(next_ws.syntax())
273 } else {
274 res.replace(next_ws.syntax(), &make::tokens::whitespace(rest));
275 }
276 }
277 }
278 res
279 }
258} 280}
259 281
260impl ast::UseTree { 282impl ast::UseTree {
@@ -293,14 +315,30 @@ impl ast::UseTree {
293 Some(res) 315 Some(res)
294 } 316 }
295 } 317 }
318
319 pub fn remove(&self) -> SyntaxRewriter<'static> {
320 let mut res = SyntaxRewriter::default();
321 res.delete(self.syntax());
322 for &dir in [Direction::Next, Direction::Prev].iter() {
323 if let Some(nb) = neighbor(self, dir) {
324 self.syntax()
325 .siblings_with_tokens(dir)
326 .skip(1)
327 .take_while(|it| it.as_node() != Some(nb.syntax()))
328 .for_each(|el| res.delete(&el));
329 return res;
330 }
331 }
332 res
333 }
296} 334}
297 335
298#[must_use] 336#[must_use]
299pub fn strip_attrs_and_docs<N: ast::AttrsOwner>(node: &N) -> N { 337pub fn remove_attrs_and_docs<N: ast::AttrsOwner>(node: &N) -> N {
300 N::cast(strip_attrs_and_docs_inner(node.syntax().clone())).unwrap() 338 N::cast(remove_attrs_and_docs_inner(node.syntax().clone())).unwrap()
301} 339}
302 340
303fn strip_attrs_and_docs_inner(mut node: SyntaxNode) -> SyntaxNode { 341fn remove_attrs_and_docs_inner(mut node: SyntaxNode) -> SyntaxNode {
304 while let Some(start) = 342 while let Some(start) =
305 node.children_with_tokens().find(|it| it.kind() == ATTR || it.kind() == COMMENT) 343 node.children_with_tokens().find(|it| it.kind() == ATTR || it.kind() == COMMENT)
306 { 344 {
@@ -343,28 +381,24 @@ impl IndentLevel {
343 } 381 }
344 382
345 fn _increase_indent(self, node: SyntaxNode) -> SyntaxNode { 383 fn _increase_indent(self, node: SyntaxNode) -> SyntaxNode {
346 let replacements: FxHashMap<SyntaxElement, SyntaxElement> = node 384 let mut rewriter = SyntaxRewriter::default();
347 .descendants_with_tokens() 385 node.descendants_with_tokens()
348 .filter_map(|el| el.into_token()) 386 .filter_map(|el| el.into_token())
349 .filter_map(ast::Whitespace::cast) 387 .filter_map(ast::Whitespace::cast)
350 .filter(|ws| { 388 .filter(|ws| {
351 let text = ws.syntax().text(); 389 let text = ws.syntax().text();
352 text.contains('\n') 390 text.contains('\n')
353 }) 391 })
354 .map(|ws| { 392 .for_each(|ws| {
355 ( 393 let new_ws = make::tokens::whitespace(&format!(
356 ws.syntax().clone().into(), 394 "{}{:width$}",
357 make::tokens::whitespace(&format!( 395 ws.syntax().text(),
358 "{}{:width$}", 396 "",
359 ws.syntax().text(), 397 width = self.0 as usize * 4
360 "", 398 ));
361 width = self.0 as usize * 4 399 rewriter.replace(ws.syntax(), &new_ws)
362 )) 400 });
363 .into(), 401 rewriter.rewrite(&node)
364 )
365 })
366 .collect();
367 algo::replace_descendants(&node, |n| replacements.get(n).cloned())
368 } 402 }
369 403
370 pub fn decrease_indent<N: AstNode>(self, node: N) -> N { 404 pub fn decrease_indent<N: AstNode>(self, node: N) -> N {
@@ -372,27 +406,21 @@ impl IndentLevel {
372 } 406 }
373 407
374 fn _decrease_indent(self, node: SyntaxNode) -> SyntaxNode { 408 fn _decrease_indent(self, node: SyntaxNode) -> SyntaxNode {
375 let replacements: FxHashMap<SyntaxElement, SyntaxElement> = node 409 let mut rewriter = SyntaxRewriter::default();
376 .descendants_with_tokens() 410 node.descendants_with_tokens()
377 .filter_map(|el| el.into_token()) 411 .filter_map(|el| el.into_token())
378 .filter_map(ast::Whitespace::cast) 412 .filter_map(ast::Whitespace::cast)
379 .filter(|ws| { 413 .filter(|ws| {
380 let text = ws.syntax().text(); 414 let text = ws.syntax().text();
381 text.contains('\n') 415 text.contains('\n')
382 }) 416 })
383 .map(|ws| { 417 .for_each(|ws| {
384 ( 418 let new_ws = make::tokens::whitespace(
385 ws.syntax().clone().into(), 419 &ws.syntax().text().replace(&format!("\n{:1$}", "", self.0 as usize * 4), "\n"),
386 make::tokens::whitespace( 420 );
387 &ws.syntax() 421 rewriter.replace(ws.syntax(), &new_ws)
388 .text() 422 });
389 .replace(&format!("\n{:1$}", "", self.0 as usize * 4), "\n"), 423 rewriter.rewrite(&node)
390 )
391 .into(),
392 )
393 })
394 .collect();
395 algo::replace_descendants(&node, |n| replacements.get(n).cloned())
396 } 424 }
397} 425}
398 426
@@ -442,12 +470,11 @@ pub trait AstNodeEdit: AstNode + Sized {
442 &self, 470 &self,
443 replacement_map: impl IntoIterator<Item = (D, D)>, 471 replacement_map: impl IntoIterator<Item = (D, D)>,
444 ) -> Self { 472 ) -> Self {
445 let map = replacement_map 473 let mut rewriter = SyntaxRewriter::default();
446 .into_iter() 474 for (from, to) in replacement_map {
447 .map(|(from, to)| (from.syntax().clone().into(), to.syntax().clone().into())) 475 rewriter.replace(from.syntax(), to.syntax())
448 .collect::<FxHashMap<SyntaxElement, _>>(); 476 }
449 let new_syntax = algo::replace_descendants(self.syntax(), |n| map.get(n).cloned()); 477 rewriter.rewrite_ast(self)
450 Self::cast(new_syntax).unwrap()
451 } 478 }
452} 479}
453 480
diff --git a/crates/ra_syntax/src/ast/extensions.rs b/crates/ra_syntax/src/ast/extensions.rs
index c3ae8f90e..392731dac 100644
--- a/crates/ra_syntax/src/ast/extensions.rs
+++ b/crates/ra_syntax/src/ast/extensions.rs
@@ -4,7 +4,7 @@
4use itertools::Itertools; 4use itertools::Itertools;
5 5
6use crate::{ 6use crate::{
7 ast::{self, child_opt, children, AstNode, AttrInput, SyntaxNode}, 7 ast::{self, child_opt, children, AstNode, AttrInput, NameOwner, SyntaxNode},
8 SmolStr, SyntaxElement, 8 SmolStr, SyntaxElement,
9 SyntaxKind::*, 9 SyntaxKind::*,
10 SyntaxToken, T, 10 SyntaxToken, T,
@@ -514,3 +514,14 @@ impl ast::Visibility {
514 self.syntax().children_with_tokens().any(|it| it.kind() == T![super]) 514 self.syntax().children_with_tokens().any(|it| it.kind() == T![super])
515 } 515 }
516} 516}
517
518impl ast::MacroCall {
519 pub fn is_macro_rules(&self) -> Option<ast::Name> {
520 let name_ref = self.path()?.segment()?.name_ref()?;
521 if name_ref.text() == "macro_rules" {
522 self.name()
523 } else {
524 None
525 }
526 }
527}
diff --git a/crates/ra_syntax/src/ast/make.rs b/crates/ra_syntax/src/ast/make.rs
index 9f6f1cc53..1145b69e8 100644
--- a/crates/ra_syntax/src/ast/make.rs
+++ b/crates/ra_syntax/src/ast/make.rs
@@ -87,6 +87,9 @@ pub fn block_from_expr(e: ast::Expr) -> ast::Block {
87pub fn expr_unit() -> ast::Expr { 87pub fn expr_unit() -> ast::Expr {
88 expr_from_text("()") 88 expr_from_text("()")
89} 89}
90pub fn expr_empty_block() -> ast::Expr {
91 expr_from_text("{}")
92}
90pub fn expr_unimplemented() -> ast::Expr { 93pub fn expr_unimplemented() -> ast::Expr {
91 expr_from_text("unimplemented!()") 94 expr_from_text("unimplemented!()")
92} 95}
@@ -136,6 +139,20 @@ pub fn placeholder_pat() -> ast::PlaceholderPat {
136 } 139 }
137} 140}
138 141
142/// Creates a tuple of patterns from an interator of patterns.
143///
144/// Invariant: `pats` must be length > 1
145///
146/// FIXME handle `pats` length == 1
147pub fn tuple_pat(pats: impl IntoIterator<Item = ast::Pat>) -> ast::TuplePat {
148 let pats_str = pats.into_iter().map(|p| p.to_string()).join(", ");
149 return from_text(&format!("({})", pats_str));
150
151 fn from_text(text: &str) -> ast::TuplePat {
152 ast_from_text(&format!("fn f({}: ())", text))
153 }
154}
155
139pub fn tuple_struct_pat( 156pub fn tuple_struct_pat(
140 path: ast::Path, 157 path: ast::Path,
141 pats: impl IntoIterator<Item = ast::Pat>, 158 pats: impl IntoIterator<Item = ast::Pat>,