diff options
author | Aleksey Kladov <[email protected]> | 2021-01-30 15:19:21 +0000 |
---|---|---|
committer | Aleksey Kladov <[email protected]> | 2021-03-16 13:10:49 +0000 |
commit | f5a81ec4683613bd62624811733345d627f2127b (patch) | |
tree | 54490888591ddc005d510695787308b78739ef05 /crates/syntax | |
parent | 62ec04bbd53ba50e21a7b8f23d46958d322640eb (diff) |
Upgrade rowan
Notably, new rowan comes with support for mutable syntax trees.
Diffstat (limited to 'crates/syntax')
-rw-r--r-- | crates/syntax/Cargo.toml | 2 | ||||
-rw-r--r-- | crates/syntax/src/algo.rs | 35 | ||||
-rw-r--r-- | crates/syntax/src/ast.rs | 7 | ||||
-rw-r--r-- | crates/syntax/src/ast/edit_in_place.rs | 105 | ||||
-rw-r--r-- | crates/syntax/src/ast/make.rs | 14 | ||||
-rw-r--r-- | crates/syntax/src/ast/node_ext.rs | 4 | ||||
-rw-r--r-- | crates/syntax/src/lib.rs | 1 | ||||
-rw-r--r-- | crates/syntax/src/parsing/reparsing.rs | 6 | ||||
-rw-r--r-- | crates/syntax/src/ted.rs | 78 |
9 files changed, 226 insertions, 26 deletions
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index c0fd894b0..74cafaa8d 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml | |||
@@ -13,7 +13,7 @@ doctest = false | |||
13 | [dependencies] | 13 | [dependencies] |
14 | cov-mark = { version = "1.1", features = ["thread-local"] } | 14 | cov-mark = { version = "1.1", features = ["thread-local"] } |
15 | itertools = "0.10.0" | 15 | itertools = "0.10.0" |
16 | rowan = "0.12.2" | 16 | rowan = "0.13.0-pre.2" |
17 | rustc_lexer = { version = "710.0.0", package = "rustc-ap-rustc_lexer" } | 17 | rustc_lexer = { version = "710.0.0", package = "rustc-ap-rustc_lexer" } |
18 | rustc-hash = "1.1.0" | 18 | rustc-hash = "1.1.0" |
19 | arrayvec = "0.5.1" | 19 | arrayvec = "0.5.1" |
diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs index b13252eec..82ebf9037 100644 --- a/crates/syntax/src/algo.rs +++ b/crates/syntax/src/algo.rs | |||
@@ -4,7 +4,6 @@ use std::{ | |||
4 | fmt, | 4 | fmt, |
5 | hash::BuildHasherDefault, | 5 | hash::BuildHasherDefault, |
6 | ops::{self, RangeInclusive}, | 6 | ops::{self, RangeInclusive}, |
7 | ptr, | ||
8 | }; | 7 | }; |
9 | 8 | ||
10 | use indexmap::IndexMap; | 9 | use indexmap::IndexMap; |
@@ -27,7 +26,7 @@ pub fn ancestors_at_offset( | |||
27 | offset: TextSize, | 26 | offset: TextSize, |
28 | ) -> impl Iterator<Item = SyntaxNode> { | 27 | ) -> impl Iterator<Item = SyntaxNode> { |
29 | node.token_at_offset(offset) | 28 | node.token_at_offset(offset) |
30 | .map(|token| token.parent().ancestors()) | 29 | .map(|token| token.ancestors()) |
31 | .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) | 30 | .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) |
32 | } | 31 | } |
33 | 32 | ||
@@ -171,7 +170,7 @@ pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff { | |||
171 | && lhs.text_range().len() == rhs.text_range().len() | 170 | && lhs.text_range().len() == rhs.text_range().len() |
172 | && match (&lhs, &rhs) { | 171 | && match (&lhs, &rhs) { |
173 | (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => { | 172 | (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => { |
174 | ptr::eq(lhs.green(), rhs.green()) || lhs.text() == rhs.text() | 173 | lhs == rhs || lhs.text() == rhs.text() |
175 | } | 174 | } |
176 | (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(), | 175 | (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(), |
177 | _ => false, | 176 | _ => false, |
@@ -280,9 +279,10 @@ fn _insert_children( | |||
280 | to_green_element(element) | 279 | to_green_element(element) |
281 | }); | 280 | }); |
282 | 281 | ||
283 | let mut old_children = parent.green().children().map(|it| match it { | 282 | let parent_green = parent.green(); |
284 | NodeOrToken::Token(it) => NodeOrToken::Token(it.clone()), | 283 | let mut old_children = parent_green.children().map(|it| match it { |
285 | NodeOrToken::Node(it) => NodeOrToken::Node(it.clone()), | 284 | NodeOrToken::Token(it) => NodeOrToken::Token(it.to_owned()), |
285 | NodeOrToken::Node(it) => NodeOrToken::Node(it.to_owned()), | ||
286 | }); | 286 | }); |
287 | 287 | ||
288 | let new_children = match &position { | 288 | let new_children = match &position { |
@@ -319,9 +319,10 @@ fn _replace_children( | |||
319 | ) -> SyntaxNode { | 319 | ) -> SyntaxNode { |
320 | let start = position_of_child(parent, to_delete.start().clone()); | 320 | let start = position_of_child(parent, to_delete.start().clone()); |
321 | let end = position_of_child(parent, to_delete.end().clone()); | 321 | let end = position_of_child(parent, to_delete.end().clone()); |
322 | let mut old_children = parent.green().children().map(|it| match it { | 322 | let parent_green = parent.green(); |
323 | NodeOrToken::Token(it) => NodeOrToken::Token(it.clone()), | 323 | let mut old_children = parent_green.children().map(|it| match it { |
324 | NodeOrToken::Node(it) => NodeOrToken::Node(it.clone()), | 324 | NodeOrToken::Token(it) => NodeOrToken::Token(it.to_owned()), |
325 | NodeOrToken::Node(it) => NodeOrToken::Node(it.to_owned()), | ||
325 | }); | 326 | }); |
326 | 327 | ||
327 | let before = old_children.by_ref().take(start).collect::<Vec<_>>(); | 328 | let before = old_children.by_ref().take(start).collect::<Vec<_>>(); |
@@ -487,9 +488,9 @@ impl<'a> SyntaxRewriter<'a> { | |||
487 | /// Returns `None` when there are no replacements. | 488 | /// Returns `None` when there are no replacements. |
488 | pub fn rewrite_root(&self) -> Option<SyntaxNode> { | 489 | pub fn rewrite_root(&self) -> Option<SyntaxNode> { |
489 | let _p = profile::span("rewrite_root"); | 490 | let _p = profile::span("rewrite_root"); |
490 | fn element_to_node_or_parent(element: &SyntaxElement) -> SyntaxNode { | 491 | fn element_to_node_or_parent(element: &SyntaxElement) -> Option<SyntaxNode> { |
491 | match element { | 492 | match element { |
492 | SyntaxElement::Node(it) => it.clone(), | 493 | SyntaxElement::Node(it) => Some(it.clone()), |
493 | SyntaxElement::Token(it) => it.parent(), | 494 | SyntaxElement::Token(it) => it.parent(), |
494 | } | 495 | } |
495 | } | 496 | } |
@@ -497,9 +498,9 @@ impl<'a> SyntaxRewriter<'a> { | |||
497 | assert!(self.f.is_none()); | 498 | assert!(self.f.is_none()); |
498 | self.replacements | 499 | self.replacements |
499 | .keys() | 500 | .keys() |
500 | .map(element_to_node_or_parent) | 501 | .filter_map(element_to_node_or_parent) |
501 | .chain(self.insertions.keys().map(|pos| match pos { | 502 | .chain(self.insertions.keys().filter_map(|pos| match pos { |
502 | InsertPos::FirstChildOf(it) => it.clone(), | 503 | InsertPos::FirstChildOf(it) => Some(it.clone()), |
503 | InsertPos::After(it) => element_to_node_or_parent(it), | 504 | InsertPos::After(it) => element_to_node_or_parent(it), |
504 | })) | 505 | })) |
505 | // If we only have one replacement/insertion, we must return its parent node, since `rewrite` does | 506 | // If we only have one replacement/insertion, we must return its parent node, since `rewrite` does |
@@ -552,7 +553,7 @@ impl<'a> SyntaxRewriter<'a> { | |||
552 | }; | 553 | }; |
553 | } else { | 554 | } else { |
554 | match element { | 555 | match element { |
555 | NodeOrToken::Token(it) => acc.push(NodeOrToken::Token(it.green().clone())), | 556 | NodeOrToken::Token(it) => acc.push(NodeOrToken::Token(it.green().to_owned())), |
556 | NodeOrToken::Node(it) => { | 557 | NodeOrToken::Node(it) => { |
557 | acc.push(NodeOrToken::Node(self.rewrite_children(it))); | 558 | acc.push(NodeOrToken::Node(self.rewrite_children(it))); |
558 | } | 559 | } |
@@ -567,7 +568,7 @@ impl<'a> SyntaxRewriter<'a> { | |||
567 | fn element_to_green(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { | 568 | fn element_to_green(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { |
568 | match element { | 569 | match element { |
569 | NodeOrToken::Node(it) => NodeOrToken::Node(it.green().to_owned()), | 570 | NodeOrToken::Node(it) => NodeOrToken::Node(it.green().to_owned()), |
570 | NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), | 571 | NodeOrToken::Token(it) => NodeOrToken::Token(it.green().to_owned()), |
571 | } | 572 | } |
572 | } | 573 | } |
573 | 574 | ||
@@ -625,7 +626,7 @@ fn position_of_child(parent: &SyntaxNode, child: SyntaxElement) -> usize { | |||
625 | fn to_green_element(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { | 626 | fn to_green_element(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { |
626 | match element { | 627 | match element { |
627 | NodeOrToken::Node(it) => it.green().to_owned().into(), | 628 | NodeOrToken::Node(it) => it.green().to_owned().into(), |
628 | NodeOrToken::Token(it) => it.green().clone().into(), | 629 | NodeOrToken::Token(it) => it.green().to_owned().into(), |
629 | } | 630 | } |
630 | } | 631 | } |
631 | 632 | ||
diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs index b3a24d39d..19261686c 100644 --- a/crates/syntax/src/ast.rs +++ b/crates/syntax/src/ast.rs | |||
@@ -6,6 +6,7 @@ mod token_ext; | |||
6 | mod node_ext; | 6 | mod node_ext; |
7 | mod expr_ext; | 7 | mod expr_ext; |
8 | pub mod edit; | 8 | pub mod edit; |
9 | pub mod edit_in_place; | ||
9 | pub mod make; | 10 | pub mod make; |
10 | 11 | ||
11 | use std::marker::PhantomData; | 12 | use std::marker::PhantomData; |
@@ -40,6 +41,12 @@ pub trait AstNode { | |||
40 | Self: Sized; | 41 | Self: Sized; |
41 | 42 | ||
42 | fn syntax(&self) -> &SyntaxNode; | 43 | fn syntax(&self) -> &SyntaxNode; |
44 | fn clone_for_update(&self) -> Self | ||
45 | where | ||
46 | Self: Sized, | ||
47 | { | ||
48 | Self::cast(self.syntax().clone_for_update()).unwrap() | ||
49 | } | ||
43 | } | 50 | } |
44 | 51 | ||
45 | /// Like `AstNode`, but wraps tokens rather than interior nodes. | 52 | /// Like `AstNode`, but wraps tokens rather than interior nodes. |
diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs new file mode 100644 index 000000000..06cde591d --- /dev/null +++ b/crates/syntax/src/ast/edit_in_place.rs | |||
@@ -0,0 +1,105 @@ | |||
1 | //! Structural editing for ast. | ||
2 | |||
3 | use std::iter::empty; | ||
4 | |||
5 | use ast::{edit::AstNodeEdit, make, GenericParamsOwner, WhereClause}; | ||
6 | use parser::T; | ||
7 | |||
8 | use crate::{ | ||
9 | ast, | ||
10 | ted::{self, Position}, | ||
11 | AstNode, Direction, SyntaxKind, | ||
12 | }; | ||
13 | |||
14 | use super::NameOwner; | ||
15 | |||
16 | pub trait GenericParamsOwnerEdit: ast::GenericParamsOwner + AstNodeEdit { | ||
17 | fn get_or_create_where_clause(&self) -> ast::WhereClause; | ||
18 | } | ||
19 | |||
20 | impl GenericParamsOwnerEdit for ast::Fn { | ||
21 | fn get_or_create_where_clause(&self) -> WhereClause { | ||
22 | if self.where_clause().is_none() { | ||
23 | let position = if let Some(ty) = self.ret_type() { | ||
24 | Position::after(ty.syntax().clone()) | ||
25 | } else if let Some(param_list) = self.param_list() { | ||
26 | Position::after(param_list.syntax().clone()) | ||
27 | } else { | ||
28 | Position::last_child_of(self.syntax().clone()) | ||
29 | }; | ||
30 | create_where_clause(position) | ||
31 | } | ||
32 | self.where_clause().unwrap() | ||
33 | } | ||
34 | } | ||
35 | |||
36 | impl GenericParamsOwnerEdit for ast::Impl { | ||
37 | fn get_or_create_where_clause(&self) -> WhereClause { | ||
38 | if self.where_clause().is_none() { | ||
39 | let position = if let Some(ty) = self.self_ty() { | ||
40 | Position::after(ty.syntax().clone()) | ||
41 | } else { | ||
42 | Position::last_child_of(self.syntax().clone()) | ||
43 | }; | ||
44 | create_where_clause(position) | ||
45 | } | ||
46 | self.where_clause().unwrap() | ||
47 | } | ||
48 | } | ||
49 | impl GenericParamsOwnerEdit for ast::Struct { | ||
50 | fn get_or_create_where_clause(&self) -> WhereClause { | ||
51 | if self.where_clause().is_none() { | ||
52 | let tfl = self.field_list().and_then(|fl| match fl { | ||
53 | ast::FieldList::RecordFieldList(_) => None, | ||
54 | ast::FieldList::TupleFieldList(it) => Some(it), | ||
55 | }); | ||
56 | let position = if let Some(tfl) = tfl { | ||
57 | Position::after(tfl.syntax().clone()) | ||
58 | } else if let Some(gpl) = self.generic_param_list() { | ||
59 | Position::after(gpl.syntax().clone()) | ||
60 | } else if let Some(name) = self.name() { | ||
61 | Position::after(name.syntax().clone()) | ||
62 | } else { | ||
63 | Position::last_child_of(self.syntax().clone()) | ||
64 | }; | ||
65 | create_where_clause(position) | ||
66 | } | ||
67 | self.where_clause().unwrap() | ||
68 | } | ||
69 | } | ||
70 | |||
71 | fn create_where_clause(position: Position) { | ||
72 | let elements = vec![ | ||
73 | make::tokens::single_space().into(), | ||
74 | make::where_clause(empty()).clone_for_update().syntax().clone().into(), | ||
75 | ]; | ||
76 | ted::insert_all(position, elements); | ||
77 | } | ||
78 | |||
79 | impl ast::WhereClause { | ||
80 | pub fn add_predicate(&self, predicate: ast::WherePred) { | ||
81 | if let Some(pred) = self.predicates().last() { | ||
82 | if !pred.syntax().siblings_with_tokens(Direction::Next).any(|it| it.kind() == T![,]) { | ||
83 | ted::append_child(self.syntax().clone(), make::token(T![,])); | ||
84 | } | ||
85 | } | ||
86 | if self.syntax().children_with_tokens().last().map(|it| it.kind()) | ||
87 | != Some(SyntaxKind::WHITESPACE) | ||
88 | { | ||
89 | ted::append_child(self.syntax().clone(), make::tokens::single_space()); | ||
90 | } | ||
91 | ted::append_child(self.syntax().clone(), predicate.syntax().clone()) | ||
92 | } | ||
93 | } | ||
94 | |||
95 | impl ast::TypeBoundList { | ||
96 | pub fn remove(&self) { | ||
97 | if let Some(colon) = | ||
98 | self.syntax().siblings_with_tokens(Direction::Prev).find(|it| it.kind() == T![:]) | ||
99 | { | ||
100 | ted::remove_all(colon..=self.syntax().clone().into()) | ||
101 | } else { | ||
102 | ted::remove(self.syntax().clone()) | ||
103 | } | ||
104 | } | ||
105 | } | ||
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index 05a6b0b25..810c8d4c8 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs | |||
@@ -174,6 +174,11 @@ pub fn block_expr( | |||
174 | pub fn expr_unit() -> ast::Expr { | 174 | pub fn expr_unit() -> ast::Expr { |
175 | expr_from_text("()") | 175 | expr_from_text("()") |
176 | } | 176 | } |
177 | pub fn expr_literal(text: &str) -> ast::Literal { | ||
178 | assert_eq!(text.trim(), text); | ||
179 | ast_from_text(&format!("fn f() {{ let _ = {}; }}", text)) | ||
180 | } | ||
181 | |||
177 | pub fn expr_empty_block() -> ast::Expr { | 182 | pub fn expr_empty_block() -> ast::Expr { |
178 | expr_from_text("{}") | 183 | expr_from_text("{}") |
179 | } | 184 | } |
@@ -390,6 +395,7 @@ pub fn token(kind: SyntaxKind) -> SyntaxToken { | |||
390 | tokens::SOURCE_FILE | 395 | tokens::SOURCE_FILE |
391 | .tree() | 396 | .tree() |
392 | .syntax() | 397 | .syntax() |
398 | .clone_for_update() | ||
393 | .descendants_with_tokens() | 399 | .descendants_with_tokens() |
394 | .filter_map(|it| it.into_token()) | 400 | .filter_map(|it| it.into_token()) |
395 | .find(|it| it.kind() == kind) | 401 | .find(|it| it.kind() == kind) |
@@ -544,6 +550,7 @@ pub mod tokens { | |||
544 | SOURCE_FILE | 550 | SOURCE_FILE |
545 | .tree() | 551 | .tree() |
546 | .syntax() | 552 | .syntax() |
553 | .clone_for_update() | ||
547 | .descendants_with_tokens() | 554 | .descendants_with_tokens() |
548 | .filter_map(|it| it.into_token()) | 555 | .filter_map(|it| it.into_token()) |
549 | .find(|it| it.kind() == WHITESPACE && it.text() == " ") | 556 | .find(|it| it.kind() == WHITESPACE && it.text() == " ") |
@@ -569,13 +576,16 @@ pub mod tokens { | |||
569 | } | 576 | } |
570 | 577 | ||
571 | pub fn single_newline() -> SyntaxToken { | 578 | pub fn single_newline() -> SyntaxToken { |
572 | SOURCE_FILE | 579 | let res = SOURCE_FILE |
573 | .tree() | 580 | .tree() |
574 | .syntax() | 581 | .syntax() |
582 | .clone_for_update() | ||
575 | .descendants_with_tokens() | 583 | .descendants_with_tokens() |
576 | .filter_map(|it| it.into_token()) | 584 | .filter_map(|it| it.into_token()) |
577 | .find(|it| it.kind() == WHITESPACE && it.text() == "\n") | 585 | .find(|it| it.kind() == WHITESPACE && it.text() == "\n") |
578 | .unwrap() | 586 | .unwrap(); |
587 | res.detach(); | ||
588 | res | ||
579 | } | 589 | } |
580 | 590 | ||
581 | pub fn blank_line() -> SyntaxToken { | 591 | pub fn blank_line() -> SyntaxToken { |
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs index 52ac97c84..0b0d39a75 100644 --- a/crates/syntax/src/ast/node_ext.rs +++ b/crates/syntax/src/ast/node_ext.rs | |||
@@ -34,7 +34,9 @@ impl ast::NameRef { | |||
34 | } | 34 | } |
35 | 35 | ||
36 | fn text_of_first_token(node: &SyntaxNode) -> &str { | 36 | fn text_of_first_token(node: &SyntaxNode) -> &str { |
37 | node.green().children().next().and_then(|it| it.into_token()).unwrap().text() | 37 | let t = |
38 | node.green().children().next().and_then(|it| it.into_token()).unwrap().text().to_string(); | ||
39 | Box::leak(Box::new(t)) | ||
38 | } | 40 | } |
39 | 41 | ||
40 | pub enum Macro { | 42 | pub enum Macro { |
diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index 09e212e8c..2a5c61171 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs | |||
@@ -38,6 +38,7 @@ pub mod ast; | |||
38 | #[doc(hidden)] | 38 | #[doc(hidden)] |
39 | pub mod fuzz; | 39 | pub mod fuzz; |
40 | pub mod utils; | 40 | pub mod utils; |
41 | pub mod ted; | ||
41 | 42 | ||
42 | use std::{marker::PhantomData, sync::Arc}; | 43 | use std::{marker::PhantomData, sync::Arc}; |
43 | 44 | ||
diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs index 3d637bf91..4ad50ab72 100644 --- a/crates/syntax/src/parsing/reparsing.rs +++ b/crates/syntax/src/parsing/reparsing.rs | |||
@@ -124,11 +124,7 @@ fn is_contextual_kw(text: &str) -> bool { | |||
124 | fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> { | 124 | fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> { |
125 | let node = node.covering_element(range); | 125 | let node = node.covering_element(range); |
126 | 126 | ||
127 | let mut ancestors = match node { | 127 | node.ancestors().find_map(|node| { |
128 | NodeOrToken::Token(it) => it.parent().ancestors(), | ||
129 | NodeOrToken::Node(it) => it.ancestors(), | ||
130 | }; | ||
131 | ancestors.find_map(|node| { | ||
132 | let first_child = node.first_child_or_token().map(|it| it.kind()); | 128 | let first_child = node.first_child_or_token().map(|it| it.kind()); |
133 | let parent = node.parent().map(|it| it.kind()); | 129 | let parent = node.parent().map(|it| it.kind()); |
134 | Reparser::for_node(node.kind(), first_child, parent).map(|r| (node, r)) | 130 | Reparser::for_node(node.kind(), first_child, parent).map(|r| (node, r)) |
diff --git a/crates/syntax/src/ted.rs b/crates/syntax/src/ted.rs new file mode 100644 index 000000000..8d6175ed9 --- /dev/null +++ b/crates/syntax/src/ted.rs | |||
@@ -0,0 +1,78 @@ | |||
1 | //! Primitive tree editor, ed for trees | ||
2 | #![allow(unused)] | ||
3 | use std::ops::RangeInclusive; | ||
4 | |||
5 | use crate::{SyntaxElement, SyntaxNode}; | ||
6 | |||
7 | #[derive(Debug)] | ||
8 | pub struct Position { | ||
9 | repr: PositionRepr, | ||
10 | } | ||
11 | |||
12 | #[derive(Debug)] | ||
13 | enum PositionRepr { | ||
14 | FirstChild(SyntaxNode), | ||
15 | After(SyntaxElement), | ||
16 | } | ||
17 | |||
18 | impl Position { | ||
19 | pub fn after(elem: impl Into<SyntaxElement>) -> Position { | ||
20 | let repr = PositionRepr::After(elem.into()); | ||
21 | Position { repr } | ||
22 | } | ||
23 | pub fn before(elem: impl Into<SyntaxElement>) -> Position { | ||
24 | let elem = elem.into(); | ||
25 | let repr = match elem.prev_sibling_or_token() { | ||
26 | Some(it) => PositionRepr::After(it), | ||
27 | None => PositionRepr::FirstChild(elem.parent().unwrap()), | ||
28 | }; | ||
29 | Position { repr } | ||
30 | } | ||
31 | pub fn first_child_of(node: impl Into<SyntaxNode>) -> Position { | ||
32 | let repr = PositionRepr::FirstChild(node.into()); | ||
33 | Position { repr } | ||
34 | } | ||
35 | pub fn last_child_of(node: impl Into<SyntaxNode>) -> Position { | ||
36 | let node = node.into(); | ||
37 | let repr = match node.last_child_or_token() { | ||
38 | Some(it) => PositionRepr::After(it), | ||
39 | None => PositionRepr::FirstChild(node), | ||
40 | }; | ||
41 | Position { repr } | ||
42 | } | ||
43 | } | ||
44 | |||
45 | pub fn insert(position: Position, elem: impl Into<SyntaxElement>) { | ||
46 | insert_all(position, vec![elem.into()]) | ||
47 | } | ||
48 | pub fn insert_all(position: Position, elements: Vec<SyntaxElement>) { | ||
49 | let (parent, index) = match position.repr { | ||
50 | PositionRepr::FirstChild(parent) => (parent, 0), | ||
51 | PositionRepr::After(child) => (child.parent().unwrap(), child.index() + 1), | ||
52 | }; | ||
53 | parent.splice_children(index..index, elements); | ||
54 | } | ||
55 | |||
56 | pub fn remove(elem: impl Into<SyntaxElement>) { | ||
57 | let elem = elem.into(); | ||
58 | remove_all(elem.clone()..=elem) | ||
59 | } | ||
60 | pub fn remove_all(range: RangeInclusive<SyntaxElement>) { | ||
61 | replace_all(range, Vec::new()) | ||
62 | } | ||
63 | |||
64 | pub fn replace(old: impl Into<SyntaxElement>, new: impl Into<SyntaxElement>) { | ||
65 | let old = old.into(); | ||
66 | replace_all(old.clone()..=old, vec![new.into()]) | ||
67 | } | ||
68 | pub fn replace_all(range: RangeInclusive<SyntaxElement>, new: Vec<SyntaxElement>) { | ||
69 | let start = range.start().index(); | ||
70 | let end = range.end().index(); | ||
71 | let parent = range.start().parent().unwrap(); | ||
72 | parent.splice_children(start..end + 1, new) | ||
73 | } | ||
74 | |||
75 | pub fn append_child(node: impl Into<SyntaxNode>, child: impl Into<SyntaxElement>) { | ||
76 | let position = Position::last_child_of(node); | ||
77 | insert(position, child) | ||
78 | } | ||