diff options
Diffstat (limited to 'crates/syntax')
-rw-r--r-- | crates/syntax/Cargo.toml | 2 | ||||
-rw-r--r-- | crates/syntax/src/algo.rs | 133 | ||||
-rw-r--r-- | crates/syntax/src/ast.rs | 6 | ||||
-rw-r--r-- | crates/syntax/src/ast/edit.rs | 493 | ||||
-rw-r--r-- | crates/syntax/src/ast/edit_in_place.rs | 182 | ||||
-rw-r--r-- | crates/syntax/src/ast/make.rs | 12 | ||||
-rw-r--r-- | crates/syntax/src/ast/token_ext.rs | 81 | ||||
-rw-r--r-- | crates/syntax/src/parsing.rs | 7 | ||||
-rw-r--r-- | crates/syntax/src/parsing/text_tree_sink.rs | 4 | ||||
-rw-r--r-- | crates/syntax/test_data/parser/ok/0045_block_attrs.rast | 6 |
10 files changed, 292 insertions, 634 deletions
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index c0bc59918..747f0b9eb 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml | |||
@@ -13,7 +13,7 @@ doctest = false | |||
13 | [dependencies] | 13 | [dependencies] |
14 | cov-mark = { version = "1.1", features = ["thread-local"] } | 14 | cov-mark = { version = "1.1", features = ["thread-local"] } |
15 | itertools = "0.10.0" | 15 | itertools = "0.10.0" |
16 | rowan = "=0.13.0-pre.5" | 16 | rowan = "=0.13.0-pre.6" |
17 | rustc_lexer = { version = "716.0.0", package = "rustc-ap-rustc_lexer" } | 17 | rustc_lexer = { version = "716.0.0", package = "rustc-ap-rustc_lexer" } |
18 | rustc-hash = "1.1.0" | 18 | rustc-hash = "1.1.0" |
19 | arrayvec = "0.7" | 19 | arrayvec = "0.7" |
diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs index 3f9b84ab9..241713c48 100644 --- a/crates/syntax/src/algo.rs +++ b/crates/syntax/src/algo.rs | |||
@@ -1,6 +1,6 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use std::{fmt, hash::BuildHasherDefault, ops::RangeInclusive}; | 3 | use std::{hash::BuildHasherDefault, ops::RangeInclusive}; |
4 | 4 | ||
5 | use indexmap::IndexMap; | 5 | use indexmap::IndexMap; |
6 | use itertools::Itertools; | 6 | use itertools::Itertools; |
@@ -330,137 +330,6 @@ fn _replace_children( | |||
330 | with_children(parent, new_children) | 330 | with_children(parent, new_children) |
331 | } | 331 | } |
332 | 332 | ||
333 | #[derive(Debug, PartialEq, Eq, Hash)] | ||
334 | enum InsertPos { | ||
335 | FirstChildOf(SyntaxNode), | ||
336 | After(SyntaxElement), | ||
337 | } | ||
338 | |||
339 | #[derive(Default)] | ||
340 | pub struct SyntaxRewriter<'a> { | ||
341 | //FIXME: add debug_assertions that all elements are in fact from the same file. | ||
342 | replacements: FxHashMap<SyntaxElement, Replacement>, | ||
343 | insertions: IndexMap<InsertPos, Vec<SyntaxElement>>, | ||
344 | _pd: std::marker::PhantomData<&'a ()>, | ||
345 | } | ||
346 | |||
347 | impl fmt::Debug for SyntaxRewriter<'_> { | ||
348 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
349 | f.debug_struct("SyntaxRewriter") | ||
350 | .field("replacements", &self.replacements) | ||
351 | .field("insertions", &self.insertions) | ||
352 | .finish() | ||
353 | } | ||
354 | } | ||
355 | |||
356 | impl SyntaxRewriter<'_> { | ||
357 | pub fn replace<T: Clone + Into<SyntaxElement>>(&mut self, what: &T, with: &T) { | ||
358 | let what = what.clone().into(); | ||
359 | let replacement = Replacement::Single(with.clone().into()); | ||
360 | self.replacements.insert(what, replacement); | ||
361 | } | ||
362 | |||
363 | pub fn rewrite(&self, node: &SyntaxNode) -> SyntaxNode { | ||
364 | let _p = profile::span("rewrite"); | ||
365 | |||
366 | if self.replacements.is_empty() && self.insertions.is_empty() { | ||
367 | return node.clone(); | ||
368 | } | ||
369 | let green = self.rewrite_children(node); | ||
370 | with_green(node, green) | ||
371 | } | ||
372 | |||
373 | pub fn rewrite_ast<N: AstNode>(self, node: &N) -> N { | ||
374 | N::cast(self.rewrite(node.syntax())).unwrap() | ||
375 | } | ||
376 | |||
377 | /// Returns a node that encompasses all replacements to be done by this rewriter. | ||
378 | /// | ||
379 | /// Passing the returned node to `rewrite` will apply all replacements queued up in `self`. | ||
380 | /// | ||
381 | /// Returns `None` when there are no replacements. | ||
382 | pub fn rewrite_root(&self) -> Option<SyntaxNode> { | ||
383 | let _p = profile::span("rewrite_root"); | ||
384 | fn element_to_node_or_parent(element: &SyntaxElement) -> Option<SyntaxNode> { | ||
385 | match element { | ||
386 | SyntaxElement::Node(it) => Some(it.clone()), | ||
387 | SyntaxElement::Token(it) => it.parent(), | ||
388 | } | ||
389 | } | ||
390 | |||
391 | self.replacements | ||
392 | .keys() | ||
393 | .filter_map(element_to_node_or_parent) | ||
394 | .chain(self.insertions.keys().filter_map(|pos| match pos { | ||
395 | InsertPos::FirstChildOf(it) => Some(it.clone()), | ||
396 | InsertPos::After(it) => element_to_node_or_parent(it), | ||
397 | })) | ||
398 | // If we only have one replacement/insertion, we must return its parent node, since `rewrite` does | ||
399 | // not replace the node passed to it. | ||
400 | .map(|it| it.parent().unwrap_or(it)) | ||
401 | .fold1(|a, b| least_common_ancestor(&a, &b).unwrap()) | ||
402 | } | ||
403 | |||
404 | fn replacement(&self, element: &SyntaxElement) -> Option<Replacement> { | ||
405 | self.replacements.get(element).cloned() | ||
406 | } | ||
407 | |||
408 | fn insertions(&self, pos: &InsertPos) -> Option<impl Iterator<Item = SyntaxElement> + '_> { | ||
409 | self.insertions.get(pos).map(|insertions| insertions.iter().cloned()) | ||
410 | } | ||
411 | |||
412 | fn rewrite_children(&self, node: &SyntaxNode) -> rowan::GreenNode { | ||
413 | let _p = profile::span("rewrite_children"); | ||
414 | |||
415 | // FIXME: this could be made much faster. | ||
416 | let mut new_children = Vec::new(); | ||
417 | if let Some(elements) = self.insertions(&InsertPos::FirstChildOf(node.clone())) { | ||
418 | new_children.extend(elements.map(element_to_green)); | ||
419 | } | ||
420 | for child in node.children_with_tokens() { | ||
421 | self.rewrite_self(&mut new_children, &child); | ||
422 | } | ||
423 | |||
424 | rowan::GreenNode::new(rowan::SyntaxKind(node.kind() as u16), new_children) | ||
425 | } | ||
426 | |||
427 | fn rewrite_self( | ||
428 | &self, | ||
429 | acc: &mut Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>, | ||
430 | element: &SyntaxElement, | ||
431 | ) { | ||
432 | let _p = profile::span("rewrite_self"); | ||
433 | |||
434 | if let Some(replacement) = self.replacement(&element) { | ||
435 | match replacement { | ||
436 | Replacement::Single(element) => acc.push(element_to_green(element)), | ||
437 | }; | ||
438 | } else { | ||
439 | match element { | ||
440 | NodeOrToken::Token(it) => acc.push(NodeOrToken::Token(it.green().to_owned())), | ||
441 | NodeOrToken::Node(it) => { | ||
442 | acc.push(NodeOrToken::Node(self.rewrite_children(it))); | ||
443 | } | ||
444 | } | ||
445 | } | ||
446 | if let Some(elements) = self.insertions(&InsertPos::After(element.clone())) { | ||
447 | acc.extend(elements.map(element_to_green)); | ||
448 | } | ||
449 | } | ||
450 | } | ||
451 | |||
452 | fn element_to_green(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { | ||
453 | match element { | ||
454 | NodeOrToken::Node(it) => NodeOrToken::Node(it.green().into_owned()), | ||
455 | NodeOrToken::Token(it) => NodeOrToken::Token(it.green().to_owned()), | ||
456 | } | ||
457 | } | ||
458 | |||
459 | #[derive(Clone, Debug)] | ||
460 | enum Replacement { | ||
461 | Single(SyntaxElement), | ||
462 | } | ||
463 | |||
464 | fn with_children( | 333 | fn with_children( |
465 | parent: &SyntaxNode, | 334 | parent: &SyntaxNode, |
466 | new_children: Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>, | 335 | new_children: Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>, |
diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs index 7f472d4db..a8071b51d 100644 --- a/crates/syntax/src/ast.rs +++ b/crates/syntax/src/ast.rs | |||
@@ -47,6 +47,12 @@ pub trait AstNode { | |||
47 | { | 47 | { |
48 | Self::cast(self.syntax().clone_for_update()).unwrap() | 48 | Self::cast(self.syntax().clone_for_update()).unwrap() |
49 | } | 49 | } |
50 | fn clone_subtree(&self) -> Self | ||
51 | where | ||
52 | Self: Sized, | ||
53 | { | ||
54 | Self::cast(self.syntax().clone_subtree()).unwrap() | ||
55 | } | ||
50 | } | 56 | } |
51 | 57 | ||
52 | /// Like `AstNode`, but wraps tokens rather than interior nodes. | 58 | /// Like `AstNode`, but wraps tokens rather than interior nodes. |
diff --git a/crates/syntax/src/ast/edit.rs b/crates/syntax/src/ast/edit.rs index cbc75f922..61952377f 100644 --- a/crates/syntax/src/ast/edit.rs +++ b/crates/syntax/src/ast/edit.rs | |||
@@ -2,20 +2,16 @@ | |||
2 | //! immutable, all function here return a fresh copy of the tree, instead of | 2 | //! immutable, all function here return a fresh copy of the tree, instead of |
3 | //! doing an in-place modification. | 3 | //! doing an in-place modification. |
4 | use std::{ | 4 | use std::{ |
5 | array, fmt, iter, | 5 | fmt, iter, |
6 | ops::{self, RangeInclusive}, | 6 | ops::{self, RangeInclusive}, |
7 | }; | 7 | }; |
8 | 8 | ||
9 | use arrayvec::ArrayVec; | 9 | use arrayvec::ArrayVec; |
10 | 10 | ||
11 | use crate::{ | 11 | use crate::{ |
12 | algo::{self, SyntaxRewriter}, | 12 | algo, |
13 | ast::{ | 13 | ast::{self, make, AstNode}, |
14 | self, | 14 | ted, AstToken, InsertPosition, NodeOrToken, SyntaxElement, SyntaxKind, |
15 | make::{self, tokens}, | ||
16 | AstNode, GenericParamsOwner, NameOwner, TypeBoundsOwner, | ||
17 | }, | ||
18 | AstToken, Direction, InsertPosition, SmolStr, SyntaxElement, SyntaxKind, | ||
19 | SyntaxKind::{ATTR, COMMENT, WHITESPACE}, | 15 | SyntaxKind::{ATTR, COMMENT, WHITESPACE}, |
20 | SyntaxNode, SyntaxToken, T, | 16 | SyntaxNode, SyntaxToken, T, |
21 | }; | 17 | }; |
@@ -29,250 +25,6 @@ impl ast::BinExpr { | |||
29 | } | 25 | } |
30 | } | 26 | } |
31 | 27 | ||
32 | impl ast::Fn { | ||
33 | #[must_use] | ||
34 | pub fn with_body(&self, body: ast::BlockExpr) -> ast::Fn { | ||
35 | let mut to_insert: ArrayVec<SyntaxElement, 2> = ArrayVec::new(); | ||
36 | let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.body() { | ||
37 | old_body.syntax().clone().into() | ||
38 | } else if let Some(semi) = self.semicolon_token() { | ||
39 | to_insert.push(make::tokens::single_space().into()); | ||
40 | semi.into() | ||
41 | } else { | ||
42 | to_insert.push(make::tokens::single_space().into()); | ||
43 | to_insert.push(body.syntax().clone().into()); | ||
44 | return self.insert_children(InsertPosition::Last, to_insert); | ||
45 | }; | ||
46 | to_insert.push(body.syntax().clone().into()); | ||
47 | self.replace_children(single_node(old_body_or_semi), to_insert) | ||
48 | } | ||
49 | |||
50 | #[must_use] | ||
51 | pub fn with_generic_param_list(&self, generic_args: ast::GenericParamList) -> ast::Fn { | ||
52 | if let Some(old) = self.generic_param_list() { | ||
53 | return self.replace_descendant(old, generic_args); | ||
54 | } | ||
55 | |||
56 | let anchor = self.name().expect("The function must have a name").syntax().clone(); | ||
57 | |||
58 | let to_insert = [generic_args.syntax().clone().into()]; | ||
59 | self.insert_children(InsertPosition::After(anchor.into()), array::IntoIter::new(to_insert)) | ||
60 | } | ||
61 | } | ||
62 | |||
63 | fn make_multiline<N>(node: N) -> N | ||
64 | where | ||
65 | N: AstNode + Clone, | ||
66 | { | ||
67 | let l_curly = match node.syntax().children_with_tokens().find(|it| it.kind() == T!['{']) { | ||
68 | Some(it) => it, | ||
69 | None => return node, | ||
70 | }; | ||
71 | let sibling = match l_curly.next_sibling_or_token() { | ||
72 | Some(it) => it, | ||
73 | None => return node, | ||
74 | }; | ||
75 | let existing_ws = match sibling.as_token() { | ||
76 | None => None, | ||
77 | Some(tok) if tok.kind() != WHITESPACE => None, | ||
78 | Some(ws) => { | ||
79 | if ws.text().contains('\n') { | ||
80 | return node; | ||
81 | } | ||
82 | Some(ws.clone()) | ||
83 | } | ||
84 | }; | ||
85 | |||
86 | let indent = leading_indent(node.syntax()).unwrap_or_default(); | ||
87 | let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); | ||
88 | let to_insert = iter::once(ws.ws().into()); | ||
89 | match existing_ws { | ||
90 | None => node.insert_children(InsertPosition::After(l_curly), to_insert), | ||
91 | Some(ws) => node.replace_children(single_node(ws), to_insert), | ||
92 | } | ||
93 | } | ||
94 | |||
95 | impl ast::Impl { | ||
96 | #[must_use] | ||
97 | pub fn with_assoc_item_list(&self, items: ast::AssocItemList) -> ast::Impl { | ||
98 | let mut to_insert: ArrayVec<SyntaxElement, 2> = ArrayVec::new(); | ||
99 | if let Some(old_items) = self.assoc_item_list() { | ||
100 | let to_replace: SyntaxElement = old_items.syntax().clone().into(); | ||
101 | to_insert.push(items.syntax().clone().into()); | ||
102 | self.replace_children(single_node(to_replace), to_insert) | ||
103 | } else { | ||
104 | to_insert.push(make::tokens::single_space().into()); | ||
105 | to_insert.push(items.syntax().clone().into()); | ||
106 | self.insert_children(InsertPosition::Last, to_insert) | ||
107 | } | ||
108 | } | ||
109 | } | ||
110 | |||
111 | impl ast::AssocItemList { | ||
112 | #[must_use] | ||
113 | pub fn append_items( | ||
114 | &self, | ||
115 | items: impl IntoIterator<Item = ast::AssocItem>, | ||
116 | ) -> ast::AssocItemList { | ||
117 | let mut res = self.clone(); | ||
118 | if !self.syntax().text().contains_char('\n') { | ||
119 | res = make_multiline(res); | ||
120 | } | ||
121 | items.into_iter().for_each(|it| res = res.append_item(it)); | ||
122 | res.fixup_trailing_whitespace().unwrap_or(res) | ||
123 | } | ||
124 | |||
125 | #[must_use] | ||
126 | pub fn append_item(&self, item: ast::AssocItem) -> ast::AssocItemList { | ||
127 | let (indent, position, whitespace) = match self.assoc_items().last() { | ||
128 | Some(it) => ( | ||
129 | leading_indent(it.syntax()).unwrap_or_default().to_string(), | ||
130 | InsertPosition::After(it.syntax().clone().into()), | ||
131 | "\n\n", | ||
132 | ), | ||
133 | None => match self.l_curly_token() { | ||
134 | Some(it) => ( | ||
135 | " ".to_string() + &leading_indent(self.syntax()).unwrap_or_default(), | ||
136 | InsertPosition::After(it.into()), | ||
137 | "\n", | ||
138 | ), | ||
139 | None => return self.clone(), | ||
140 | }, | ||
141 | }; | ||
142 | let ws = tokens::WsBuilder::new(&format!("{}{}", whitespace, indent)); | ||
143 | let to_insert: ArrayVec<SyntaxElement, 2> = | ||
144 | [ws.ws().into(), item.syntax().clone().into()].into(); | ||
145 | self.insert_children(position, to_insert) | ||
146 | } | ||
147 | |||
148 | /// Remove extra whitespace between last item and closing curly brace. | ||
149 | fn fixup_trailing_whitespace(&self) -> Option<ast::AssocItemList> { | ||
150 | let first_token_after_items = | ||
151 | self.assoc_items().last()?.syntax().next_sibling_or_token()?; | ||
152 | let last_token_before_curly = self.r_curly_token()?.prev_sibling_or_token()?; | ||
153 | if last_token_before_curly != first_token_after_items { | ||
154 | // there is something more between last item and | ||
155 | // right curly than just whitespace - bail out | ||
156 | return None; | ||
157 | } | ||
158 | let whitespace = | ||
159 | last_token_before_curly.clone().into_token().and_then(ast::Whitespace::cast)?; | ||
160 | let text = whitespace.syntax().text(); | ||
161 | let newline = text.rfind('\n')?; | ||
162 | let keep = tokens::WsBuilder::new(&text[newline..]); | ||
163 | Some(self.replace_children( | ||
164 | first_token_after_items..=last_token_before_curly, | ||
165 | std::iter::once(keep.ws().into()), | ||
166 | )) | ||
167 | } | ||
168 | } | ||
169 | |||
170 | impl ast::RecordExprFieldList { | ||
171 | #[must_use] | ||
172 | pub fn append_field(&self, field: &ast::RecordExprField) -> ast::RecordExprFieldList { | ||
173 | self.insert_field(InsertPosition::Last, field) | ||
174 | } | ||
175 | |||
176 | #[must_use] | ||
177 | pub fn insert_field( | ||
178 | &self, | ||
179 | position: InsertPosition<&'_ ast::RecordExprField>, | ||
180 | field: &ast::RecordExprField, | ||
181 | ) -> ast::RecordExprFieldList { | ||
182 | let is_multiline = self.syntax().text().contains_char('\n'); | ||
183 | let ws; | ||
184 | let space = if is_multiline { | ||
185 | ws = tokens::WsBuilder::new(&format!( | ||
186 | "\n{} ", | ||
187 | leading_indent(self.syntax()).unwrap_or_default() | ||
188 | )); | ||
189 | ws.ws() | ||
190 | } else { | ||
191 | tokens::single_space() | ||
192 | }; | ||
193 | |||
194 | let mut to_insert: ArrayVec<SyntaxElement, 4> = ArrayVec::new(); | ||
195 | to_insert.push(space.into()); | ||
196 | to_insert.push(field.syntax().clone().into()); | ||
197 | to_insert.push(make::token(T![,]).into()); | ||
198 | |||
199 | macro_rules! after_l_curly { | ||
200 | () => {{ | ||
201 | let anchor = match self.l_curly_token() { | ||
202 | Some(it) => it.into(), | ||
203 | None => return self.clone(), | ||
204 | }; | ||
205 | InsertPosition::After(anchor) | ||
206 | }}; | ||
207 | } | ||
208 | |||
209 | macro_rules! after_field { | ||
210 | ($anchor:expr) => { | ||
211 | if let Some(comma) = $anchor | ||
212 | .syntax() | ||
213 | .siblings_with_tokens(Direction::Next) | ||
214 | .find(|it| it.kind() == T![,]) | ||
215 | { | ||
216 | InsertPosition::After(comma) | ||
217 | } else { | ||
218 | to_insert.insert(0, make::token(T![,]).into()); | ||
219 | InsertPosition::After($anchor.syntax().clone().into()) | ||
220 | } | ||
221 | }; | ||
222 | } | ||
223 | |||
224 | let position = match position { | ||
225 | InsertPosition::First => after_l_curly!(), | ||
226 | InsertPosition::Last => { | ||
227 | if !is_multiline { | ||
228 | // don't insert comma before curly | ||
229 | to_insert.pop(); | ||
230 | } | ||
231 | match self.fields().last() { | ||
232 | Some(it) => after_field!(it), | ||
233 | None => after_l_curly!(), | ||
234 | } | ||
235 | } | ||
236 | InsertPosition::Before(anchor) => { | ||
237 | InsertPosition::Before(anchor.syntax().clone().into()) | ||
238 | } | ||
239 | InsertPosition::After(anchor) => after_field!(anchor), | ||
240 | }; | ||
241 | |||
242 | self.insert_children(position, to_insert) | ||
243 | } | ||
244 | } | ||
245 | |||
246 | impl ast::TypeAlias { | ||
247 | #[must_use] | ||
248 | pub fn remove_bounds(&self) -> ast::TypeAlias { | ||
249 | let colon = match self.colon_token() { | ||
250 | Some(it) => it, | ||
251 | None => return self.clone(), | ||
252 | }; | ||
253 | let end = match self.type_bound_list() { | ||
254 | Some(it) => it.syntax().clone().into(), | ||
255 | None => colon.clone().into(), | ||
256 | }; | ||
257 | self.replace_children(colon.into()..=end, iter::empty()) | ||
258 | } | ||
259 | } | ||
260 | |||
261 | impl ast::TypeParam { | ||
262 | #[must_use] | ||
263 | pub fn remove_bounds(&self) -> ast::TypeParam { | ||
264 | let colon = match self.colon_token() { | ||
265 | Some(it) => it, | ||
266 | None => return self.clone(), | ||
267 | }; | ||
268 | let end = match self.type_bound_list() { | ||
269 | Some(it) => it.syntax().clone().into(), | ||
270 | None => colon.clone().into(), | ||
271 | }; | ||
272 | self.replace_children(colon.into()..=end, iter::empty()) | ||
273 | } | ||
274 | } | ||
275 | |||
276 | impl ast::Path { | 28 | impl ast::Path { |
277 | #[must_use] | 29 | #[must_use] |
278 | pub fn with_segment(&self, segment: ast::PathSegment) -> ast::Path { | 30 | pub fn with_segment(&self, segment: ast::PathSegment) -> ast::Path { |
@@ -313,33 +65,7 @@ impl ast::PathSegment { | |||
313 | } | 65 | } |
314 | } | 66 | } |
315 | 67 | ||
316 | impl ast::Use { | ||
317 | #[must_use] | ||
318 | pub fn with_use_tree(&self, use_tree: ast::UseTree) -> ast::Use { | ||
319 | if let Some(old) = self.use_tree() { | ||
320 | return self.replace_descendant(old, use_tree); | ||
321 | } | ||
322 | self.clone() | ||
323 | } | ||
324 | } | ||
325 | |||
326 | impl ast::UseTree { | 68 | impl ast::UseTree { |
327 | #[must_use] | ||
328 | pub fn with_path(&self, path: ast::Path) -> ast::UseTree { | ||
329 | if let Some(old) = self.path() { | ||
330 | return self.replace_descendant(old, path); | ||
331 | } | ||
332 | self.clone() | ||
333 | } | ||
334 | |||
335 | #[must_use] | ||
336 | pub fn with_use_tree_list(&self, use_tree_list: ast::UseTreeList) -> ast::UseTree { | ||
337 | if let Some(old) = self.use_tree_list() { | ||
338 | return self.replace_descendant(old, use_tree_list); | ||
339 | } | ||
340 | self.clone() | ||
341 | } | ||
342 | |||
343 | /// Splits off the given prefix, making it the path component of the use tree, appending the rest of the path to all UseTreeList items. | 69 | /// Splits off the given prefix, making it the path component of the use tree, appending the rest of the path to all UseTreeList items. |
344 | #[must_use] | 70 | #[must_use] |
345 | pub fn split_prefix(&self, prefix: &ast::Path) -> ast::UseTree { | 71 | pub fn split_prefix(&self, prefix: &ast::Path) -> ast::UseTree { |
@@ -376,134 +102,6 @@ impl ast::UseTree { | |||
376 | } | 102 | } |
377 | } | 103 | } |
378 | 104 | ||
379 | impl ast::MatchArmList { | ||
380 | #[must_use] | ||
381 | pub fn append_arms(&self, items: impl IntoIterator<Item = ast::MatchArm>) -> ast::MatchArmList { | ||
382 | let mut res = self.clone(); | ||
383 | res = res.strip_if_only_whitespace(); | ||
384 | if !res.syntax().text().contains_char('\n') { | ||
385 | res = make_multiline(res); | ||
386 | } | ||
387 | items.into_iter().for_each(|it| res = res.append_arm(it)); | ||
388 | res | ||
389 | } | ||
390 | |||
391 | fn strip_if_only_whitespace(&self) -> ast::MatchArmList { | ||
392 | let mut iter = self.syntax().children_with_tokens().skip_while(|it| it.kind() != T!['{']); | ||
393 | iter.next(); // Eat the curly | ||
394 | let mut inner = iter.take_while(|it| it.kind() != T!['}']); | ||
395 | if !inner.clone().all(|it| it.kind() == WHITESPACE) { | ||
396 | return self.clone(); | ||
397 | } | ||
398 | let start = match inner.next() { | ||
399 | Some(s) => s, | ||
400 | None => return self.clone(), | ||
401 | }; | ||
402 | let end = match inner.last() { | ||
403 | Some(s) => s, | ||
404 | None => start.clone(), | ||
405 | }; | ||
406 | self.replace_children(start..=end, &mut iter::empty()) | ||
407 | } | ||
408 | |||
409 | #[must_use] | ||
410 | pub fn remove_placeholder(&self) -> ast::MatchArmList { | ||
411 | let placeholder = | ||
412 | self.arms().find(|arm| matches!(arm.pat(), Some(ast::Pat::WildcardPat(_)))); | ||
413 | if let Some(placeholder) = placeholder { | ||
414 | self.remove_arm(&placeholder) | ||
415 | } else { | ||
416 | self.clone() | ||
417 | } | ||
418 | } | ||
419 | |||
420 | #[must_use] | ||
421 | fn remove_arm(&self, arm: &ast::MatchArm) -> ast::MatchArmList { | ||
422 | let start = arm.syntax().clone(); | ||
423 | let end = if let Some(comma) = start | ||
424 | .siblings_with_tokens(Direction::Next) | ||
425 | .skip(1) | ||
426 | .find(|it| !it.kind().is_trivia()) | ||
427 | .filter(|it| it.kind() == T![,]) | ||
428 | { | ||
429 | comma | ||
430 | } else { | ||
431 | start.clone().into() | ||
432 | }; | ||
433 | self.replace_children(start.into()..=end, None) | ||
434 | } | ||
435 | |||
436 | #[must_use] | ||
437 | pub fn append_arm(&self, item: ast::MatchArm) -> ast::MatchArmList { | ||
438 | let r_curly = match self.syntax().children_with_tokens().find(|it| it.kind() == T!['}']) { | ||
439 | Some(t) => t, | ||
440 | None => return self.clone(), | ||
441 | }; | ||
442 | let position = InsertPosition::Before(r_curly); | ||
443 | let arm_ws = tokens::WsBuilder::new(" "); | ||
444 | let match_indent = &leading_indent(self.syntax()).unwrap_or_default(); | ||
445 | let match_ws = tokens::WsBuilder::new(&format!("\n{}", match_indent)); | ||
446 | let to_insert: ArrayVec<SyntaxElement, 3> = | ||
447 | [arm_ws.ws().into(), item.syntax().clone().into(), match_ws.ws().into()].into(); | ||
448 | self.insert_children(position, to_insert) | ||
449 | } | ||
450 | } | ||
451 | |||
452 | impl ast::GenericParamList { | ||
453 | #[must_use] | ||
454 | pub fn append_params( | ||
455 | &self, | ||
456 | params: impl IntoIterator<Item = ast::GenericParam>, | ||
457 | ) -> ast::GenericParamList { | ||
458 | let mut res = self.clone(); | ||
459 | params.into_iter().for_each(|it| res = res.append_param(it)); | ||
460 | res | ||
461 | } | ||
462 | |||
463 | #[must_use] | ||
464 | pub fn append_param(&self, item: ast::GenericParam) -> ast::GenericParamList { | ||
465 | let space = tokens::single_space(); | ||
466 | |||
467 | let mut to_insert: ArrayVec<SyntaxElement, 4> = ArrayVec::new(); | ||
468 | if self.generic_params().next().is_some() { | ||
469 | to_insert.push(space.into()); | ||
470 | } | ||
471 | to_insert.push(item.syntax().clone().into()); | ||
472 | |||
473 | macro_rules! after_l_angle { | ||
474 | () => {{ | ||
475 | let anchor = match self.l_angle_token() { | ||
476 | Some(it) => it.into(), | ||
477 | None => return self.clone(), | ||
478 | }; | ||
479 | InsertPosition::After(anchor) | ||
480 | }}; | ||
481 | } | ||
482 | |||
483 | macro_rules! after_field { | ||
484 | ($anchor:expr) => { | ||
485 | if let Some(comma) = $anchor | ||
486 | .syntax() | ||
487 | .siblings_with_tokens(Direction::Next) | ||
488 | .find(|it| it.kind() == T![,]) | ||
489 | { | ||
490 | InsertPosition::After(comma) | ||
491 | } else { | ||
492 | to_insert.insert(0, make::token(T![,]).into()); | ||
493 | InsertPosition::After($anchor.syntax().clone().into()) | ||
494 | } | ||
495 | }; | ||
496 | } | ||
497 | |||
498 | let position = match self.generic_params().last() { | ||
499 | Some(it) => after_field!(it), | ||
500 | None => after_l_angle!(), | ||
501 | }; | ||
502 | |||
503 | self.insert_children(position, to_insert) | ||
504 | } | ||
505 | } | ||
506 | |||
507 | #[must_use] | 105 | #[must_use] |
508 | pub fn remove_attrs_and_docs<N: ast::AttrsOwner>(node: &N) -> N { | 106 | pub fn remove_attrs_and_docs<N: ast::AttrsOwner>(node: &N) -> N { |
509 | N::cast(remove_attrs_and_docs_inner(node.syntax().clone())).unwrap() | 107 | N::cast(remove_attrs_and_docs_inner(node.syntax().clone())).unwrap() |
@@ -554,6 +152,12 @@ impl ops::Add<u8> for IndentLevel { | |||
554 | } | 152 | } |
555 | 153 | ||
556 | impl IndentLevel { | 154 | impl IndentLevel { |
155 | pub fn single() -> IndentLevel { | ||
156 | IndentLevel(0) | ||
157 | } | ||
158 | pub fn is_zero(&self) -> bool { | ||
159 | self.0 == 0 | ||
160 | } | ||
557 | pub fn from_element(element: &SyntaxElement) -> IndentLevel { | 161 | pub fn from_element(element: &SyntaxElement) -> IndentLevel { |
558 | match element { | 162 | match element { |
559 | rowan::NodeOrToken::Node(it) => IndentLevel::from_node(it), | 163 | rowan::NodeOrToken::Node(it) => IndentLevel::from_node(it), |
@@ -588,54 +192,40 @@ impl IndentLevel { | |||
588 | /// ``` | 192 | /// ``` |
589 | /// if you indent the block, the `{` token would stay put. | 193 | /// if you indent the block, the `{` token would stay put. |
590 | fn increase_indent(self, node: SyntaxNode) -> SyntaxNode { | 194 | fn increase_indent(self, node: SyntaxNode) -> SyntaxNode { |
591 | let mut rewriter = SyntaxRewriter::default(); | 195 | let res = node.clone_subtree().clone_for_update(); |
592 | node.descendants_with_tokens() | 196 | let tokens = res.preorder_with_tokens().filter_map(|event| match event { |
593 | .filter_map(|el| el.into_token()) | 197 | rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it), |
594 | .filter_map(ast::Whitespace::cast) | 198 | _ => None, |
595 | .filter(|ws| { | 199 | }); |
596 | let text = ws.syntax().text(); | 200 | for token in tokens { |
597 | text.contains('\n') | 201 | if let Some(ws) = ast::Whitespace::cast(token) { |
598 | }) | 202 | if ws.text().contains('\n') { |
599 | .for_each(|ws| { | 203 | let new_ws = make::tokens::whitespace(&format!("{}{}", ws.syntax(), self)); |
600 | let new_ws = make::tokens::whitespace(&format!("{}{}", ws.syntax(), self,)); | 204 | ted::replace(ws.syntax(), &new_ws) |
601 | rewriter.replace(ws.syntax(), &new_ws) | 205 | } |
602 | }); | 206 | } |
603 | rewriter.rewrite(&node) | 207 | } |
208 | res.clone_subtree() | ||
604 | } | 209 | } |
605 | 210 | ||
606 | fn decrease_indent(self, node: SyntaxNode) -> SyntaxNode { | 211 | fn decrease_indent(self, node: SyntaxNode) -> SyntaxNode { |
607 | let mut rewriter = SyntaxRewriter::default(); | 212 | let res = node.clone_subtree().clone_for_update(); |
608 | node.descendants_with_tokens() | 213 | let tokens = res.preorder_with_tokens().filter_map(|event| match event { |
609 | .filter_map(|el| el.into_token()) | 214 | rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it), |
610 | .filter_map(ast::Whitespace::cast) | 215 | _ => None, |
611 | .filter(|ws| { | 216 | }); |
612 | let text = ws.syntax().text(); | 217 | for token in tokens { |
613 | text.contains('\n') | 218 | if let Some(ws) = ast::Whitespace::cast(token) { |
614 | }) | 219 | if ws.text().contains('\n') { |
615 | .for_each(|ws| { | 220 | let new_ws = make::tokens::whitespace( |
616 | let new_ws = make::tokens::whitespace( | 221 | &ws.syntax().text().replace(&format!("\n{}", self), "\n"), |
617 | &ws.syntax().text().replace(&format!("\n{}", self), "\n"), | 222 | ); |
618 | ); | 223 | ted::replace(ws.syntax(), &new_ws) |
619 | rewriter.replace(ws.syntax(), &new_ws) | 224 | } |
620 | }); | ||
621 | rewriter.rewrite(&node) | ||
622 | } | ||
623 | } | ||
624 | |||
625 | // FIXME: replace usages with IndentLevel above | ||
626 | fn leading_indent(node: &SyntaxNode) -> Option<SmolStr> { | ||
627 | for token in prev_tokens(node.first_token()?) { | ||
628 | if let Some(ws) = ast::Whitespace::cast(token.clone()) { | ||
629 | let ws_text = ws.text(); | ||
630 | if let Some(pos) = ws_text.rfind('\n') { | ||
631 | return Some(ws_text[pos + 1..].into()); | ||
632 | } | 225 | } |
633 | } | 226 | } |
634 | if token.text().contains('\n') { | 227 | res.clone_subtree() |
635 | break; | ||
636 | } | ||
637 | } | 228 | } |
638 | None | ||
639 | } | 229 | } |
640 | 230 | ||
641 | fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> { | 231 | fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> { |
@@ -662,13 +252,6 @@ pub trait AstNodeEdit: AstNode + Clone + Sized { | |||
662 | let new_syntax = algo::replace_children(self.syntax(), to_replace, to_insert); | 252 | let new_syntax = algo::replace_children(self.syntax(), to_replace, to_insert); |
663 | Self::cast(new_syntax).unwrap() | 253 | Self::cast(new_syntax).unwrap() |
664 | } | 254 | } |
665 | |||
666 | #[must_use] | ||
667 | fn replace_descendant<D: AstNode>(&self, old: D, new: D) -> Self { | ||
668 | let mut rewriter = SyntaxRewriter::default(); | ||
669 | rewriter.replace(old.syntax(), new.syntax()); | ||
670 | rewriter.rewrite_ast(self) | ||
671 | } | ||
672 | fn indent_level(&self) -> IndentLevel { | 255 | fn indent_level(&self) -> IndentLevel { |
673 | IndentLevel::from_node(self.syntax()) | 256 | IndentLevel::from_node(self.syntax()) |
674 | } | 257 | } |
diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs index 168355555..2676ed8c9 100644 --- a/crates/syntax/src/ast/edit_in_place.rs +++ b/crates/syntax/src/ast/edit_in_place.rs | |||
@@ -2,13 +2,18 @@ | |||
2 | 2 | ||
3 | use std::iter::empty; | 3 | use std::iter::empty; |
4 | 4 | ||
5 | use parser::T; | 5 | use parser::{SyntaxKind, T}; |
6 | use rowan::SyntaxElement; | ||
6 | 7 | ||
7 | use crate::{ | 8 | use crate::{ |
8 | algo::neighbor, | 9 | algo::neighbor, |
9 | ast::{self, edit::AstNodeEdit, make, GenericParamsOwner, WhereClause}, | 10 | ast::{ |
11 | self, | ||
12 | edit::{AstNodeEdit, IndentLevel}, | ||
13 | make, GenericParamsOwner, | ||
14 | }, | ||
10 | ted::{self, Position}, | 15 | ted::{self, Position}, |
11 | AstNode, AstToken, Direction, | 16 | AstNode, AstToken, Direction, SyntaxNode, |
12 | }; | 17 | }; |
13 | 18 | ||
14 | use super::NameOwner; | 19 | use super::NameOwner; |
@@ -37,7 +42,7 @@ impl GenericParamsOwnerEdit for ast::Fn { | |||
37 | } | 42 | } |
38 | } | 43 | } |
39 | 44 | ||
40 | fn get_or_create_where_clause(&self) -> WhereClause { | 45 | fn get_or_create_where_clause(&self) -> ast::WhereClause { |
41 | if self.where_clause().is_none() { | 46 | if self.where_clause().is_none() { |
42 | let position = if let Some(ty) = self.ret_type() { | 47 | let position = if let Some(ty) = self.ret_type() { |
43 | Position::after(ty.syntax()) | 48 | Position::after(ty.syntax()) |
@@ -67,7 +72,7 @@ impl GenericParamsOwnerEdit for ast::Impl { | |||
67 | } | 72 | } |
68 | } | 73 | } |
69 | 74 | ||
70 | fn get_or_create_where_clause(&self) -> WhereClause { | 75 | fn get_or_create_where_clause(&self) -> ast::WhereClause { |
71 | if self.where_clause().is_none() { | 76 | if self.where_clause().is_none() { |
72 | let position = if let Some(items) = self.assoc_item_list() { | 77 | let position = if let Some(items) = self.assoc_item_list() { |
73 | Position::before(items.syntax()) | 78 | Position::before(items.syntax()) |
@@ -97,7 +102,7 @@ impl GenericParamsOwnerEdit for ast::Trait { | |||
97 | } | 102 | } |
98 | } | 103 | } |
99 | 104 | ||
100 | fn get_or_create_where_clause(&self) -> WhereClause { | 105 | fn get_or_create_where_clause(&self) -> ast::WhereClause { |
101 | if self.where_clause().is_none() { | 106 | if self.where_clause().is_none() { |
102 | let position = if let Some(items) = self.assoc_item_list() { | 107 | let position = if let Some(items) = self.assoc_item_list() { |
103 | Position::before(items.syntax()) | 108 | Position::before(items.syntax()) |
@@ -127,7 +132,7 @@ impl GenericParamsOwnerEdit for ast::Struct { | |||
127 | } | 132 | } |
128 | } | 133 | } |
129 | 134 | ||
130 | fn get_or_create_where_clause(&self) -> WhereClause { | 135 | fn get_or_create_where_clause(&self) -> ast::WhereClause { |
131 | if self.where_clause().is_none() { | 136 | if self.where_clause().is_none() { |
132 | let tfl = self.field_list().and_then(|fl| match fl { | 137 | let tfl = self.field_list().and_then(|fl| match fl { |
133 | ast::FieldList::RecordFieldList(_) => None, | 138 | ast::FieldList::RecordFieldList(_) => None, |
@@ -165,7 +170,7 @@ impl GenericParamsOwnerEdit for ast::Enum { | |||
165 | } | 170 | } |
166 | } | 171 | } |
167 | 172 | ||
168 | fn get_or_create_where_clause(&self) -> WhereClause { | 173 | fn get_or_create_where_clause(&self) -> ast::WhereClause { |
169 | if self.where_clause().is_none() { | 174 | if self.where_clause().is_none() { |
170 | let position = if let Some(gpl) = self.generic_param_list() { | 175 | let position = if let Some(gpl) = self.generic_param_list() { |
171 | Position::after(gpl.syntax()) | 176 | Position::after(gpl.syntax()) |
@@ -272,6 +277,167 @@ impl ast::Use { | |||
272 | } | 277 | } |
273 | } | 278 | } |
274 | 279 | ||
280 | impl ast::Impl { | ||
281 | pub fn get_or_create_assoc_item_list(&self) -> ast::AssocItemList { | ||
282 | if self.assoc_item_list().is_none() { | ||
283 | let assoc_item_list = make::assoc_item_list().clone_for_update(); | ||
284 | ted::append_child(self.syntax(), assoc_item_list.syntax()); | ||
285 | } | ||
286 | self.assoc_item_list().unwrap() | ||
287 | } | ||
288 | } | ||
289 | |||
290 | impl ast::AssocItemList { | ||
291 | pub fn add_item(&self, item: ast::AssocItem) { | ||
292 | let (indent, position, whitespace) = match self.assoc_items().last() { | ||
293 | Some(last_item) => ( | ||
294 | IndentLevel::from_node(last_item.syntax()), | ||
295 | Position::after(last_item.syntax()), | ||
296 | "\n\n", | ||
297 | ), | ||
298 | None => match self.l_curly_token() { | ||
299 | Some(l_curly) => { | ||
300 | normalize_ws_between_braces(self.syntax()); | ||
301 | (IndentLevel::from_token(&l_curly) + 1, Position::after(&l_curly), "\n") | ||
302 | } | ||
303 | None => (IndentLevel::single(), Position::last_child_of(self.syntax()), "\n"), | ||
304 | }, | ||
305 | }; | ||
306 | let elements: Vec<SyntaxElement<_>> = vec![ | ||
307 | make::tokens::whitespace(&format!("{}{}", whitespace, indent)).into(), | ||
308 | item.syntax().clone().into(), | ||
309 | ]; | ||
310 | ted::insert_all(position, elements); | ||
311 | } | ||
312 | } | ||
313 | |||
314 | impl ast::Fn { | ||
315 | pub fn get_or_create_body(&self) -> ast::BlockExpr { | ||
316 | if self.body().is_none() { | ||
317 | let body = make::ext::empty_block_expr().clone_for_update(); | ||
318 | match self.semicolon_token() { | ||
319 | Some(semi) => { | ||
320 | ted::replace(semi, body.syntax()); | ||
321 | ted::insert(Position::before(body.syntax), make::tokens::single_space()); | ||
322 | } | ||
323 | None => ted::append_child(self.syntax(), body.syntax()), | ||
324 | } | ||
325 | } | ||
326 | self.body().unwrap() | ||
327 | } | ||
328 | } | ||
329 | |||
330 | impl ast::MatchArm { | ||
331 | pub fn remove(&self) { | ||
332 | if let Some(sibling) = self.syntax().prev_sibling_or_token() { | ||
333 | if sibling.kind() == SyntaxKind::WHITESPACE { | ||
334 | ted::remove(sibling); | ||
335 | } | ||
336 | } | ||
337 | if let Some(sibling) = self.syntax().next_sibling_or_token() { | ||
338 | if sibling.kind() == T![,] { | ||
339 | ted::remove(sibling); | ||
340 | } | ||
341 | } | ||
342 | ted::remove(self.syntax()); | ||
343 | } | ||
344 | } | ||
345 | |||
346 | impl ast::MatchArmList { | ||
347 | pub fn add_arm(&self, arm: ast::MatchArm) { | ||
348 | normalize_ws_between_braces(self.syntax()); | ||
349 | let position = match self.arms().last() { | ||
350 | Some(last_arm) => { | ||
351 | let curly = last_arm | ||
352 | .syntax() | ||
353 | .siblings_with_tokens(Direction::Next) | ||
354 | .find(|it| it.kind() == T![,]); | ||
355 | Position::after(curly.unwrap_or_else(|| last_arm.syntax().clone().into())) | ||
356 | } | ||
357 | None => match self.l_curly_token() { | ||
358 | Some(it) => Position::after(it), | ||
359 | None => Position::last_child_of(self.syntax()), | ||
360 | }, | ||
361 | }; | ||
362 | let indent = IndentLevel::from_node(self.syntax()) + 1; | ||
363 | let elements = vec![ | ||
364 | make::tokens::whitespace(&format!("\n{}", indent)).into(), | ||
365 | arm.syntax().clone().into(), | ||
366 | ]; | ||
367 | ted::insert_all(position, elements); | ||
368 | } | ||
369 | } | ||
370 | |||
371 | impl ast::RecordExprFieldList { | ||
372 | pub fn add_field(&self, field: ast::RecordExprField) { | ||
373 | let is_multiline = self.syntax().text().contains_char('\n'); | ||
374 | let whitespace = if is_multiline { | ||
375 | let indent = IndentLevel::from_node(self.syntax()) + 1; | ||
376 | make::tokens::whitespace(&format!("\n{}", indent)) | ||
377 | } else { | ||
378 | make::tokens::single_space() | ||
379 | }; | ||
380 | |||
381 | if is_multiline { | ||
382 | normalize_ws_between_braces(self.syntax()); | ||
383 | } | ||
384 | |||
385 | let position = match self.fields().last() { | ||
386 | Some(last_field) => { | ||
387 | let comma = match last_field | ||
388 | .syntax() | ||
389 | .siblings_with_tokens(Direction::Next) | ||
390 | .filter_map(|it| it.into_token()) | ||
391 | .find(|it| it.kind() == T![,]) | ||
392 | { | ||
393 | Some(it) => it, | ||
394 | None => { | ||
395 | let comma = ast::make::token(T![,]); | ||
396 | ted::insert(Position::after(last_field.syntax()), &comma); | ||
397 | comma | ||
398 | } | ||
399 | }; | ||
400 | Position::after(comma) | ||
401 | } | ||
402 | None => match self.l_curly_token() { | ||
403 | Some(it) => Position::after(it), | ||
404 | None => Position::last_child_of(self.syntax()), | ||
405 | }, | ||
406 | }; | ||
407 | |||
408 | ted::insert_all(position, vec![whitespace.into(), field.syntax().clone().into()]); | ||
409 | if is_multiline { | ||
410 | ted::insert(Position::after(field.syntax()), ast::make::token(T![,])); | ||
411 | } | ||
412 | } | ||
413 | } | ||
414 | |||
415 | fn normalize_ws_between_braces(node: &SyntaxNode) -> Option<()> { | ||
416 | let l = node | ||
417 | .children_with_tokens() | ||
418 | .filter_map(|it| it.into_token()) | ||
419 | .find(|it| it.kind() == T!['{'])?; | ||
420 | let r = node | ||
421 | .children_with_tokens() | ||
422 | .filter_map(|it| it.into_token()) | ||
423 | .find(|it| it.kind() == T!['}'])?; | ||
424 | |||
425 | let indent = IndentLevel::from_node(node); | ||
426 | |||
427 | match l.next_sibling_or_token() { | ||
428 | Some(ws) if ws.kind() == SyntaxKind::WHITESPACE => { | ||
429 | if ws.next_sibling_or_token()?.into_token()? == r { | ||
430 | ted::replace(ws, make::tokens::whitespace(&format!("\n{}", indent))); | ||
431 | } | ||
432 | } | ||
433 | Some(ws) if ws.kind() == T!['}'] => { | ||
434 | ted::insert(Position::after(l), make::tokens::whitespace(&format!("\n{}", indent))); | ||
435 | } | ||
436 | _ => (), | ||
437 | } | ||
438 | Some(()) | ||
439 | } | ||
440 | |||
275 | #[cfg(test)] | 441 | #[cfg(test)] |
276 | mod tests { | 442 | mod tests { |
277 | use std::fmt; | 443 | use std::fmt; |
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index 1998ad1f6..d13926ded 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs | |||
@@ -12,7 +12,7 @@ | |||
12 | use itertools::Itertools; | 12 | use itertools::Itertools; |
13 | use stdx::{format_to, never}; | 13 | use stdx::{format_to, never}; |
14 | 14 | ||
15 | use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, SyntaxToken}; | 15 | use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxToken}; |
16 | 16 | ||
17 | /// While the parent module defines basic atomic "constructors", the `ext` | 17 | /// While the parent module defines basic atomic "constructors", the `ext` |
18 | /// module defines shortcuts for common things. | 18 | /// module defines shortcuts for common things. |
@@ -99,7 +99,7 @@ fn ty_from_text(text: &str) -> ast::Type { | |||
99 | } | 99 | } |
100 | 100 | ||
101 | pub fn assoc_item_list() -> ast::AssocItemList { | 101 | pub fn assoc_item_list() -> ast::AssocItemList { |
102 | ast_from_text("impl C for D {};") | 102 | ast_from_text("impl C for D {}") |
103 | } | 103 | } |
104 | 104 | ||
105 | pub fn impl_trait(trait_: ast::Path, ty: ast::Path) -> ast::Impl { | 105 | pub fn impl_trait(trait_: ast::Path, ty: ast::Path) -> ast::Impl { |
@@ -601,17 +601,11 @@ fn ast_from_text<N: AstNode>(text: &str) -> N { | |||
601 | panic!("Failed to make ast node `{}` from text {}", std::any::type_name::<N>(), text) | 601 | panic!("Failed to make ast node `{}` from text {}", std::any::type_name::<N>(), text) |
602 | } | 602 | } |
603 | }; | 603 | }; |
604 | let node = node.syntax().clone(); | 604 | let node = node.clone_subtree(); |
605 | let node = unroot(node); | ||
606 | let node = N::cast(node).unwrap(); | ||
607 | assert_eq!(node.syntax().text_range().start(), 0.into()); | 605 | assert_eq!(node.syntax().text_range().start(), 0.into()); |
608 | node | 606 | node |
609 | } | 607 | } |
610 | 608 | ||
611 | fn unroot(n: SyntaxNode) -> SyntaxNode { | ||
612 | SyntaxNode::new_root(n.green().into()) | ||
613 | } | ||
614 | |||
615 | pub fn token(kind: SyntaxKind) -> SyntaxToken { | 609 | pub fn token(kind: SyntaxKind) -> SyntaxToken { |
616 | tokens::SOURCE_FILE | 610 | tokens::SOURCE_FILE |
617 | .tree() | 611 | .tree() |
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs index 29d25a58a..4b1e1ccee 100644 --- a/crates/syntax/src/ast/token_ext.rs +++ b/crates/syntax/src/ast/token_ext.rs | |||
@@ -143,6 +143,30 @@ impl QuoteOffsets { | |||
143 | } | 143 | } |
144 | } | 144 | } |
145 | 145 | ||
146 | pub trait IsString: AstToken { | ||
147 | fn quote_offsets(&self) -> Option<QuoteOffsets> { | ||
148 | let text = self.text(); | ||
149 | let offsets = QuoteOffsets::new(text)?; | ||
150 | let o = self.syntax().text_range().start(); | ||
151 | let offsets = QuoteOffsets { | ||
152 | quotes: (offsets.quotes.0 + o, offsets.quotes.1 + o), | ||
153 | contents: offsets.contents + o, | ||
154 | }; | ||
155 | Some(offsets) | ||
156 | } | ||
157 | fn text_range_between_quotes(&self) -> Option<TextRange> { | ||
158 | self.quote_offsets().map(|it| it.contents) | ||
159 | } | ||
160 | fn open_quote_text_range(&self) -> Option<TextRange> { | ||
161 | self.quote_offsets().map(|it| it.quotes.0) | ||
162 | } | ||
163 | fn close_quote_text_range(&self) -> Option<TextRange> { | ||
164 | self.quote_offsets().map(|it| it.quotes.1) | ||
165 | } | ||
166 | } | ||
167 | |||
168 | impl IsString for ast::String {} | ||
169 | |||
146 | impl ast::String { | 170 | impl ast::String { |
147 | pub fn is_raw(&self) -> bool { | 171 | pub fn is_raw(&self) -> bool { |
148 | self.text().starts_with('r') | 172 | self.text().starts_with('r') |
@@ -187,32 +211,49 @@ impl ast::String { | |||
187 | (false, false) => Some(Cow::Owned(buf)), | 211 | (false, false) => Some(Cow::Owned(buf)), |
188 | } | 212 | } |
189 | } | 213 | } |
190 | |||
191 | pub fn quote_offsets(&self) -> Option<QuoteOffsets> { | ||
192 | let text = self.text(); | ||
193 | let offsets = QuoteOffsets::new(text)?; | ||
194 | let o = self.syntax().text_range().start(); | ||
195 | let offsets = QuoteOffsets { | ||
196 | quotes: (offsets.quotes.0 + o, offsets.quotes.1 + o), | ||
197 | contents: offsets.contents + o, | ||
198 | }; | ||
199 | Some(offsets) | ||
200 | } | ||
201 | pub fn text_range_between_quotes(&self) -> Option<TextRange> { | ||
202 | self.quote_offsets().map(|it| it.contents) | ||
203 | } | ||
204 | pub fn open_quote_text_range(&self) -> Option<TextRange> { | ||
205 | self.quote_offsets().map(|it| it.quotes.0) | ||
206 | } | ||
207 | pub fn close_quote_text_range(&self) -> Option<TextRange> { | ||
208 | self.quote_offsets().map(|it| it.quotes.1) | ||
209 | } | ||
210 | } | 214 | } |
211 | 215 | ||
216 | impl IsString for ast::ByteString {} | ||
217 | |||
212 | impl ast::ByteString { | 218 | impl ast::ByteString { |
213 | pub fn is_raw(&self) -> bool { | 219 | pub fn is_raw(&self) -> bool { |
214 | self.text().starts_with("br") | 220 | self.text().starts_with("br") |
215 | } | 221 | } |
222 | |||
223 | pub fn value(&self) -> Option<Cow<'_, [u8]>> { | ||
224 | if self.is_raw() { | ||
225 | let text = self.text(); | ||
226 | let text = | ||
227 | &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; | ||
228 | return Some(Cow::Borrowed(text.as_bytes())); | ||
229 | } | ||
230 | |||
231 | let text = self.text(); | ||
232 | let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; | ||
233 | |||
234 | let mut buf: Vec<u8> = Vec::new(); | ||
235 | let mut text_iter = text.chars(); | ||
236 | let mut has_error = false; | ||
237 | unescape_literal(text, Mode::ByteStr, &mut |char_range, unescaped_char| match ( | ||
238 | unescaped_char, | ||
239 | buf.capacity() == 0, | ||
240 | ) { | ||
241 | (Ok(c), false) => buf.push(c as u8), | ||
242 | (Ok(c), true) if char_range.len() == 1 && Some(c) == text_iter.next() => (), | ||
243 | (Ok(c), true) => { | ||
244 | buf.reserve_exact(text.len()); | ||
245 | buf.extend_from_slice(&text[..char_range.start].as_bytes()); | ||
246 | buf.push(c as u8); | ||
247 | } | ||
248 | (Err(_), _) => has_error = true, | ||
249 | }); | ||
250 | |||
251 | match (has_error, buf.capacity() == 0) { | ||
252 | (true, _) => None, | ||
253 | (false, true) => Some(Cow::Borrowed(text.as_bytes())), | ||
254 | (false, false) => Some(Cow::Owned(buf)), | ||
255 | } | ||
256 | } | ||
216 | } | 257 | } |
217 | 258 | ||
218 | #[derive(Debug)] | 259 | #[derive(Debug)] |
diff --git a/crates/syntax/src/parsing.rs b/crates/syntax/src/parsing.rs index 333bde54a..431ed0699 100644 --- a/crates/syntax/src/parsing.rs +++ b/crates/syntax/src/parsing.rs | |||
@@ -6,14 +6,13 @@ mod text_token_source; | |||
6 | mod text_tree_sink; | 6 | mod text_tree_sink; |
7 | mod reparsing; | 7 | mod reparsing; |
8 | 8 | ||
9 | use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode}; | 9 | use parser::SyntaxKind; |
10 | use text_token_source::TextTokenSource; | 10 | use text_token_source::TextTokenSource; |
11 | use text_tree_sink::TextTreeSink; | 11 | use text_tree_sink::TextTreeSink; |
12 | 12 | ||
13 | pub(crate) use lexer::*; | 13 | use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode}; |
14 | 14 | ||
15 | pub(crate) use self::reparsing::incremental_reparse; | 15 | pub(crate) use crate::parsing::{lexer::*, reparsing::incremental_reparse}; |
16 | use parser::SyntaxKind; | ||
17 | 16 | ||
18 | pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { | 17 | pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { |
19 | let (tokens, lexer_errors) = tokenize(&text); | 18 | let (tokens, lexer_errors) = tokenize(&text); |
diff --git a/crates/syntax/src/parsing/text_tree_sink.rs b/crates/syntax/src/parsing/text_tree_sink.rs index 1934204ea..d63ec080b 100644 --- a/crates/syntax/src/parsing/text_tree_sink.rs +++ b/crates/syntax/src/parsing/text_tree_sink.rs | |||
@@ -147,8 +147,8 @@ fn n_attached_trivias<'a>( | |||
147 | trivias: impl Iterator<Item = (SyntaxKind, &'a str)>, | 147 | trivias: impl Iterator<Item = (SyntaxKind, &'a str)>, |
148 | ) -> usize { | 148 | ) -> usize { |
149 | match kind { | 149 | match kind { |
150 | MACRO_CALL | MACRO_RULES | MACRO_DEF | CONST | TYPE_ALIAS | STRUCT | UNION | ENUM | 150 | CONST | ENUM | FN | IMPL | MACRO_CALL | MACRO_DEF | MACRO_RULES | MODULE | RECORD_FIELD |
151 | | VARIANT | FN | TRAIT | MODULE | RECORD_FIELD | STATIC | USE => { | 151 | | STATIC | STRUCT | TRAIT | TUPLE_FIELD | TYPE_ALIAS | UNION | USE | VARIANT => { |
152 | let mut res = 0; | 152 | let mut res = 0; |
153 | let mut trivias = trivias.enumerate().peekable(); | 153 | let mut trivias = trivias.enumerate().peekable(); |
154 | 154 | ||
diff --git a/crates/syntax/test_data/parser/ok/0045_block_attrs.rast b/crates/syntax/test_data/parser/ok/0045_block_attrs.rast index 50ab52d32..5e50b4e0b 100644 --- a/crates/syntax/test_data/parser/ok/0045_block_attrs.rast +++ b/crates/syntax/test_data/parser/ok/0045_block_attrs.rast | |||
@@ -127,9 +127,9 @@ [email protected] | |||
127 | [email protected] "\n" | 127 | [email protected] "\n" |
128 | [email protected] "}" | 128 | [email protected] "}" |
129 | [email protected] "\n\n" | 129 | [email protected] "\n\n" |
130 | COMMENT@541..601 "// https://github.com ..." | 130 | IMPL@541..763 |
131 | WHITESPACE@601..602 "\n" | 131 | COMMENT@541..601 "// https://github.com ..." |
132 | IMPL@602..763 | 132 | WHITESPACE@601..602 "\n" |
133 | [email protected] "impl" | 133 | [email protected] "impl" |
134 | [email protected] " " | 134 | [email protected] " " |
135 | [email protected] | 135 | [email protected] |