aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_syntax
diff options
context:
space:
mode:
Diffstat (limited to 'crates/ra_syntax')
-rw-r--r--crates/ra_syntax/Cargo.toml2
-rw-r--r--crates/ra_syntax/src/algo.rs127
-rw-r--r--crates/ra_syntax/src/ast/edit.rs109
-rw-r--r--crates/ra_syntax/src/ast/make.rs3
-rw-r--r--crates/ra_syntax/test_data/parser/err/0010_unsafe_lambda_block.txt30
-rw-r--r--crates/ra_syntax/test_data/parser/inline/ok/0158_lambda_ret_block.rs1
-rw-r--r--crates/ra_syntax/test_data/parser/inline/ok/0158_lambda_ret_block.txt45
7 files changed, 232 insertions, 85 deletions
diff --git a/crates/ra_syntax/Cargo.toml b/crates/ra_syntax/Cargo.toml
index 8efc6b368..6fccc2303 100644
--- a/crates/ra_syntax/Cargo.toml
+++ b/crates/ra_syntax/Cargo.toml
@@ -11,7 +11,7 @@ repository = "https://github.com/rust-analyzer/rust-analyzer"
11doctest = false 11doctest = false
12 12
13[dependencies] 13[dependencies]
14itertools = "0.8.2" 14itertools = "0.9.0"
15rowan = "0.9.1" 15rowan = "0.9.1"
16rustc_lexer = "0.1.0" 16rustc_lexer = "0.1.0"
17rustc-hash = "1.1.0" 17rustc-hash = "1.1.0"
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs
index ffdbdc767..191123c8e 100644
--- a/crates/ra_syntax/src/algo.rs
+++ b/crates/ra_syntax/src/algo.rs
@@ -1,6 +1,9 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use std::ops::RangeInclusive; 3use std::{
4 fmt,
5 ops::{self, RangeInclusive},
6};
4 7
5use itertools::Itertools; 8use itertools::Itertools;
6use ra_text_edit::TextEditBuilder; 9use ra_text_edit::TextEditBuilder;
@@ -222,44 +225,110 @@ fn _replace_children(
222 with_children(parent, new_children) 225 with_children(parent, new_children)
223} 226}
224 227
225/// Replaces descendants in the node, according to the mapping. 228#[derive(Default)]
226/// 229pub struct SyntaxRewriter<'a> {
227/// This is a type-unsafe low-level editing API, if you need to use it, prefer 230 f: Option<Box<dyn Fn(&SyntaxElement) -> Option<SyntaxElement> + 'a>>,
228/// to create a type-safe abstraction on top of it instead. 231 //FIXME: add debug_assertions that all elements are in fact from the same file.
229pub fn replace_descendants( 232 replacements: FxHashMap<SyntaxElement, Replacement>,
230 parent: &SyntaxNode,
231 map: impl Fn(&SyntaxElement) -> Option<SyntaxElement>,
232) -> SyntaxNode {
233 _replace_descendants(parent, &map)
234} 233}
235 234
236fn _replace_descendants( 235impl fmt::Debug for SyntaxRewriter<'_> {
237 parent: &SyntaxNode, 236 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
238 map: &dyn Fn(&SyntaxElement) -> Option<SyntaxElement>, 237 f.debug_struct("SyntaxRewriter").field("replacements", &self.replacements).finish()
239) -> SyntaxNode { 238 }
240 // FIXME: this could be made much faster. 239}
241 let new_children = parent.children_with_tokens().map(|it| go(map, it)).collect::<Vec<_>>();
242 return with_children(parent, new_children);
243 240
244 fn go( 241impl<'a> SyntaxRewriter<'a> {
245 map: &dyn Fn(&SyntaxElement) -> Option<SyntaxElement>, 242 pub fn from_fn(f: impl Fn(&SyntaxElement) -> Option<SyntaxElement> + 'a) -> SyntaxRewriter<'a> {
246 element: SyntaxElement, 243 SyntaxRewriter { f: Some(Box::new(f)), replacements: FxHashMap::default() }
247 ) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { 244 }
248 if let Some(replacement) = map(&element) { 245 pub fn delete<T: Clone + Into<SyntaxElement>>(&mut self, what: &T) {
246 let what = what.clone().into();
247 let replacement = Replacement::Delete;
248 self.replacements.insert(what, replacement);
249 }
250 pub fn replace<T: Clone + Into<SyntaxElement>>(&mut self, what: &T, with: &T) {
251 let what = what.clone().into();
252 let replacement = Replacement::Single(with.clone().into());
253 self.replacements.insert(what, replacement);
254 }
255 pub fn replace_ast<T: AstNode>(&mut self, what: &T, with: &T) {
256 self.replace(what.syntax(), with.syntax())
257 }
258
259 pub fn rewrite(&self, node: &SyntaxNode) -> SyntaxNode {
260 if self.f.is_none() && self.replacements.is_empty() {
261 return node.clone();
262 }
263 self.rewrite_children(node)
264 }
265
266 pub fn rewrite_ast<N: AstNode>(self, node: &N) -> N {
267 N::cast(self.rewrite(node.syntax())).unwrap()
268 }
269
270 pub fn rewrite_root(&self) -> Option<SyntaxNode> {
271 assert!(self.f.is_none());
272 self.replacements
273 .keys()
274 .map(|element| match element {
275 SyntaxElement::Node(it) => it.clone(),
276 SyntaxElement::Token(it) => it.parent(),
277 })
278 .fold1(|a, b| least_common_ancestor(&a, &b).unwrap())
279 }
280
281 fn replacement(&self, element: &SyntaxElement) -> Option<Replacement> {
282 if let Some(f) = &self.f {
283 assert!(self.replacements.is_empty());
284 return f(element).map(Replacement::Single);
285 }
286 self.replacements.get(element).cloned()
287 }
288
289 fn rewrite_children(&self, node: &SyntaxNode) -> SyntaxNode {
290 // FIXME: this could be made much faster.
291 let new_children =
292 node.children_with_tokens().flat_map(|it| self.rewrite_self(&it)).collect::<Vec<_>>();
293 with_children(node, new_children)
294 }
295
296 fn rewrite_self(
297 &self,
298 element: &SyntaxElement,
299 ) -> Option<NodeOrToken<rowan::GreenNode, rowan::GreenToken>> {
300 if let Some(replacement) = self.replacement(&element) {
249 return match replacement { 301 return match replacement {
250 NodeOrToken::Node(it) => NodeOrToken::Node(it.green().clone()), 302 Replacement::Single(NodeOrToken::Node(it)) => {
251 NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), 303 Some(NodeOrToken::Node(it.green().clone()))
304 }
305 Replacement::Single(NodeOrToken::Token(it)) => {
306 Some(NodeOrToken::Token(it.green().clone()))
307 }
308 Replacement::Delete => None,
252 }; 309 };
253 } 310 }
254 match element { 311 let res = match element {
255 NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), 312 NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()),
256 NodeOrToken::Node(it) => { 313 NodeOrToken::Node(it) => NodeOrToken::Node(self.rewrite_children(it).green().clone()),
257 NodeOrToken::Node(_replace_descendants(&it, map).green().clone()) 314 };
258 } 315 Some(res)
259 }
260 } 316 }
261} 317}
262 318
319impl<'a> ops::AddAssign for SyntaxRewriter<'_> {
320 fn add_assign(&mut self, rhs: SyntaxRewriter) {
321 assert!(rhs.f.is_none());
322 self.replacements.extend(rhs.replacements)
323 }
324}
325
326#[derive(Clone, Debug)]
327enum Replacement {
328 Delete,
329 Single(SyntaxElement),
330}
331
263fn with_children( 332fn with_children(
264 parent: &SyntaxNode, 333 parent: &SyntaxNode,
265 new_children: Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>, 334 new_children: Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>,
diff --git a/crates/ra_syntax/src/ast/edit.rs b/crates/ra_syntax/src/ast/edit.rs
index 68dae008f..f74c9f9c6 100644
--- a/crates/ra_syntax/src/ast/edit.rs
+++ b/crates/ra_syntax/src/ast/edit.rs
@@ -4,7 +4,6 @@
4use std::{iter, ops::RangeInclusive}; 4use std::{iter, ops::RangeInclusive};
5 5
6use arrayvec::ArrayVec; 6use arrayvec::ArrayVec;
7use rustc_hash::FxHashMap;
8 7
9use crate::{ 8use crate::{
10 algo, 9 algo,
@@ -17,6 +16,7 @@ use crate::{
17 SyntaxKind::{ATTR, COMMENT, WHITESPACE}, 16 SyntaxKind::{ATTR, COMMENT, WHITESPACE},
18 SyntaxNode, SyntaxToken, T, 17 SyntaxNode, SyntaxToken, T,
19}; 18};
19use algo::{neighbor, SyntaxRewriter};
20 20
21impl ast::BinExpr { 21impl ast::BinExpr {
22 #[must_use] 22 #[must_use]
@@ -255,6 +255,28 @@ impl ast::UseItem {
255 } 255 }
256 self.clone() 256 self.clone()
257 } 257 }
258
259 pub fn remove(&self) -> SyntaxRewriter<'static> {
260 let mut res = SyntaxRewriter::default();
261 res.delete(self.syntax());
262 let next_ws = self
263 .syntax()
264 .next_sibling_or_token()
265 .and_then(|it| it.into_token())
266 .and_then(ast::Whitespace::cast);
267 if let Some(next_ws) = next_ws {
268 let ws_text = next_ws.syntax().text();
269 if ws_text.starts_with('\n') {
270 let rest = &ws_text[1..];
271 if rest.is_empty() {
272 res.delete(next_ws.syntax())
273 } else {
274 res.replace(next_ws.syntax(), &make::tokens::whitespace(rest));
275 }
276 }
277 }
278 res
279 }
258} 280}
259 281
260impl ast::UseTree { 282impl ast::UseTree {
@@ -293,14 +315,30 @@ impl ast::UseTree {
293 Some(res) 315 Some(res)
294 } 316 }
295 } 317 }
318
319 pub fn remove(&self) -> SyntaxRewriter<'static> {
320 let mut res = SyntaxRewriter::default();
321 res.delete(self.syntax());
322 for &dir in [Direction::Next, Direction::Prev].iter() {
323 if let Some(nb) = neighbor(self, dir) {
324 self.syntax()
325 .siblings_with_tokens(dir)
326 .skip(1)
327 .take_while(|it| it.as_node() != Some(nb.syntax()))
328 .for_each(|el| res.delete(&el));
329 return res;
330 }
331 }
332 res
333 }
296} 334}
297 335
298#[must_use] 336#[must_use]
299pub fn strip_attrs_and_docs<N: ast::AttrsOwner>(node: &N) -> N { 337pub fn remove_attrs_and_docs<N: ast::AttrsOwner>(node: &N) -> N {
300 N::cast(strip_attrs_and_docs_inner(node.syntax().clone())).unwrap() 338 N::cast(remove_attrs_and_docs_inner(node.syntax().clone())).unwrap()
301} 339}
302 340
303fn strip_attrs_and_docs_inner(mut node: SyntaxNode) -> SyntaxNode { 341fn remove_attrs_and_docs_inner(mut node: SyntaxNode) -> SyntaxNode {
304 while let Some(start) = 342 while let Some(start) =
305 node.children_with_tokens().find(|it| it.kind() == ATTR || it.kind() == COMMENT) 343 node.children_with_tokens().find(|it| it.kind() == ATTR || it.kind() == COMMENT)
306 { 344 {
@@ -343,28 +381,24 @@ impl IndentLevel {
343 } 381 }
344 382
345 fn _increase_indent(self, node: SyntaxNode) -> SyntaxNode { 383 fn _increase_indent(self, node: SyntaxNode) -> SyntaxNode {
346 let replacements: FxHashMap<SyntaxElement, SyntaxElement> = node 384 let mut rewriter = SyntaxRewriter::default();
347 .descendants_with_tokens() 385 node.descendants_with_tokens()
348 .filter_map(|el| el.into_token()) 386 .filter_map(|el| el.into_token())
349 .filter_map(ast::Whitespace::cast) 387 .filter_map(ast::Whitespace::cast)
350 .filter(|ws| { 388 .filter(|ws| {
351 let text = ws.syntax().text(); 389 let text = ws.syntax().text();
352 text.contains('\n') 390 text.contains('\n')
353 }) 391 })
354 .map(|ws| { 392 .for_each(|ws| {
355 ( 393 let new_ws = make::tokens::whitespace(&format!(
356 ws.syntax().clone().into(), 394 "{}{:width$}",
357 make::tokens::whitespace(&format!( 395 ws.syntax().text(),
358 "{}{:width$}", 396 "",
359 ws.syntax().text(), 397 width = self.0 as usize * 4
360 "", 398 ));
361 width = self.0 as usize * 4 399 rewriter.replace(ws.syntax(), &new_ws)
362 )) 400 });
363 .into(), 401 rewriter.rewrite(&node)
364 )
365 })
366 .collect();
367 algo::replace_descendants(&node, |n| replacements.get(n).cloned())
368 } 402 }
369 403
370 pub fn decrease_indent<N: AstNode>(self, node: N) -> N { 404 pub fn decrease_indent<N: AstNode>(self, node: N) -> N {
@@ -372,27 +406,21 @@ impl IndentLevel {
372 } 406 }
373 407
374 fn _decrease_indent(self, node: SyntaxNode) -> SyntaxNode { 408 fn _decrease_indent(self, node: SyntaxNode) -> SyntaxNode {
375 let replacements: FxHashMap<SyntaxElement, SyntaxElement> = node 409 let mut rewriter = SyntaxRewriter::default();
376 .descendants_with_tokens() 410 node.descendants_with_tokens()
377 .filter_map(|el| el.into_token()) 411 .filter_map(|el| el.into_token())
378 .filter_map(ast::Whitespace::cast) 412 .filter_map(ast::Whitespace::cast)
379 .filter(|ws| { 413 .filter(|ws| {
380 let text = ws.syntax().text(); 414 let text = ws.syntax().text();
381 text.contains('\n') 415 text.contains('\n')
382 }) 416 })
383 .map(|ws| { 417 .for_each(|ws| {
384 ( 418 let new_ws = make::tokens::whitespace(
385 ws.syntax().clone().into(), 419 &ws.syntax().text().replace(&format!("\n{:1$}", "", self.0 as usize * 4), "\n"),
386 make::tokens::whitespace( 420 );
387 &ws.syntax() 421 rewriter.replace(ws.syntax(), &new_ws)
388 .text() 422 });
389 .replace(&format!("\n{:1$}", "", self.0 as usize * 4), "\n"), 423 rewriter.rewrite(&node)
390 )
391 .into(),
392 )
393 })
394 .collect();
395 algo::replace_descendants(&node, |n| replacements.get(n).cloned())
396 } 424 }
397} 425}
398 426
@@ -442,12 +470,11 @@ pub trait AstNodeEdit: AstNode + Sized {
442 &self, 470 &self,
443 replacement_map: impl IntoIterator<Item = (D, D)>, 471 replacement_map: impl IntoIterator<Item = (D, D)>,
444 ) -> Self { 472 ) -> Self {
445 let map = replacement_map 473 let mut rewriter = SyntaxRewriter::default();
446 .into_iter() 474 for (from, to) in replacement_map {
447 .map(|(from, to)| (from.syntax().clone().into(), to.syntax().clone().into())) 475 rewriter.replace(from.syntax(), to.syntax())
448 .collect::<FxHashMap<SyntaxElement, _>>(); 476 }
449 let new_syntax = algo::replace_descendants(self.syntax(), |n| map.get(n).cloned()); 477 rewriter.rewrite_ast(self)
450 Self::cast(new_syntax).unwrap()
451 } 478 }
452} 479}
453 480
diff --git a/crates/ra_syntax/src/ast/make.rs b/crates/ra_syntax/src/ast/make.rs
index 9257ccd1a..1145b69e8 100644
--- a/crates/ra_syntax/src/ast/make.rs
+++ b/crates/ra_syntax/src/ast/make.rs
@@ -87,6 +87,9 @@ pub fn block_from_expr(e: ast::Expr) -> ast::Block {
87pub fn expr_unit() -> ast::Expr { 87pub fn expr_unit() -> ast::Expr {
88 expr_from_text("()") 88 expr_from_text("()")
89} 89}
90pub fn expr_empty_block() -> ast::Expr {
91 expr_from_text("{}")
92}
90pub fn expr_unimplemented() -> ast::Expr { 93pub fn expr_unimplemented() -> ast::Expr {
91 expr_from_text("unimplemented!()") 94 expr_from_text("unimplemented!()")
92} 95}
diff --git a/crates/ra_syntax/test_data/parser/err/0010_unsafe_lambda_block.txt b/crates/ra_syntax/test_data/parser/err/0010_unsafe_lambda_block.txt
index e0edf6a2d..0ffbd25aa 100644
--- a/crates/ra_syntax/test_data/parser/err/0010_unsafe_lambda_block.txt
+++ b/crates/ra_syntax/test_data/parser/err/0010_unsafe_lambda_block.txt
@@ -12,8 +12,8 @@ SOURCE_FILE@[0; 42)
12 BLOCK@[10; 41) 12 BLOCK@[10; 41)
13 L_CURLY@[10; 11) "{" 13 L_CURLY@[10; 11) "{"
14 WHITESPACE@[11; 16) "\n " 14 WHITESPACE@[11; 16) "\n "
15 EXPR_STMT@[16; 39) 15 EXPR_STMT@[16; 24)
16 LAMBDA_EXPR@[16; 38) 16 LAMBDA_EXPR@[16; 24)
17 PARAM_LIST@[16; 18) 17 PARAM_LIST@[16; 18)
18 PIPE@[16; 17) "|" 18 PIPE@[16; 17) "|"
19 PIPE@[17; 18) "|" 19 PIPE@[17; 18) "|"
@@ -24,20 +24,22 @@ SOURCE_FILE@[0; 42)
24 TUPLE_TYPE@[22; 24) 24 TUPLE_TYPE@[22; 24)
25 L_PAREN@[22; 23) "(" 25 L_PAREN@[22; 23) "("
26 R_PAREN@[23; 24) ")" 26 R_PAREN@[23; 24) ")"
27 WHITESPACE@[24; 25) " " 27 WHITESPACE@[24; 25) " "
28 BLOCK_EXPR@[25; 38) 28 EXPR_STMT@[25; 39)
29 UNSAFE_KW@[25; 31) "unsafe" 29 BLOCK_EXPR@[25; 38)
30 WHITESPACE@[31; 32) " " 30 UNSAFE_KW@[25; 31) "unsafe"
31 BLOCK@[32; 38) 31 WHITESPACE@[31; 32) " "
32 L_CURLY@[32; 33) "{" 32 BLOCK@[32; 38)
33 WHITESPACE@[33; 34) " " 33 L_CURLY@[32; 33) "{"
34 TUPLE_EXPR@[34; 36) 34 WHITESPACE@[33; 34) " "
35 L_PAREN@[34; 35) "(" 35 TUPLE_EXPR@[34; 36)
36 R_PAREN@[35; 36) ")" 36 L_PAREN@[34; 35) "("
37 WHITESPACE@[36; 37) " " 37 R_PAREN@[35; 36) ")"
38 R_CURLY@[37; 38) "}" 38 WHITESPACE@[36; 37) " "
39 R_CURLY@[37; 38) "}"
39 SEMI@[38; 39) ";" 40 SEMI@[38; 39) ";"
40 WHITESPACE@[39; 40) "\n" 41 WHITESPACE@[39; 40) "\n"
41 R_CURLY@[40; 41) "}" 42 R_CURLY@[40; 41) "}"
42 WHITESPACE@[41; 42) "\n" 43 WHITESPACE@[41; 42) "\n"
43error [24; 24): expected `{` 44error [24; 24): expected `{`
45error [24; 24): expected SEMI
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0158_lambda_ret_block.rs b/crates/ra_syntax/test_data/parser/inline/ok/0158_lambda_ret_block.rs
new file mode 100644
index 000000000..061118d3a
--- /dev/null
+++ b/crates/ra_syntax/test_data/parser/inline/ok/0158_lambda_ret_block.rs
@@ -0,0 +1 @@
fn main() { || -> i32 { 92 }(); }
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0158_lambda_ret_block.txt b/crates/ra_syntax/test_data/parser/inline/ok/0158_lambda_ret_block.txt
new file mode 100644
index 000000000..ba8779094
--- /dev/null
+++ b/crates/ra_syntax/test_data/parser/inline/ok/0158_lambda_ret_block.txt
@@ -0,0 +1,45 @@
1SOURCE_FILE@[0; 34)
2 FN_DEF@[0; 33)
3 FN_KW@[0; 2) "fn"
4 WHITESPACE@[2; 3) " "
5 NAME@[3; 7)
6 IDENT@[3; 7) "main"
7 PARAM_LIST@[7; 9)
8 L_PAREN@[7; 8) "("
9 R_PAREN@[8; 9) ")"
10 WHITESPACE@[9; 10) " "
11 BLOCK_EXPR@[10; 33)
12 BLOCK@[10; 33)
13 L_CURLY@[10; 11) "{"
14 WHITESPACE@[11; 12) " "
15 EXPR_STMT@[12; 31)
16 CALL_EXPR@[12; 30)
17 LAMBDA_EXPR@[12; 28)
18 PARAM_LIST@[12; 14)
19 PIPE@[12; 13) "|"
20 PIPE@[13; 14) "|"
21 WHITESPACE@[14; 15) " "
22 RET_TYPE@[15; 21)
23 THIN_ARROW@[15; 17) "->"
24 WHITESPACE@[17; 18) " "
25 PATH_TYPE@[18; 21)
26 PATH@[18; 21)
27 PATH_SEGMENT@[18; 21)
28 NAME_REF@[18; 21)
29 IDENT@[18; 21) "i32"
30 WHITESPACE@[21; 22) " "
31 BLOCK_EXPR@[22; 28)
32 BLOCK@[22; 28)
33 L_CURLY@[22; 23) "{"
34 WHITESPACE@[23; 24) " "
35 LITERAL@[24; 26)
36 INT_NUMBER@[24; 26) "92"
37 WHITESPACE@[26; 27) " "
38 R_CURLY@[27; 28) "}"
39 ARG_LIST@[28; 30)
40 L_PAREN@[28; 29) "("
41 R_PAREN@[29; 30) ")"
42 SEMI@[30; 31) ";"
43 WHITESPACE@[31; 32) " "
44 R_CURLY@[32; 33) "}"
45 WHITESPACE@[33; 34) "\n"