diff options
author | bors[bot] <bors[bot]@users.noreply.github.com> | 2019-04-01 10:30:25 +0100 |
---|---|---|
committer | bors[bot] <bors[bot]@users.noreply.github.com> | 2019-04-01 10:30:25 +0100 |
commit | 42a883f06c28ddeab22e5703a578f19110dde7f3 (patch) | |
tree | fe57697b54ccfb791fe96c13cb553a8570516270 /crates/ra_syntax | |
parent | dec9bde10868b5e459535449476d17a6a0987b3e (diff) | |
parent | 9e213385c9d06db3c8ca20812779e2b8f8ad2c71 (diff) |
Merge #1078
1078: rewrite syntax trees r=matklad a=matklad
Co-authored-by: Aleksey Kladov <[email protected]>
Diffstat (limited to 'crates/ra_syntax')
-rw-r--r-- | crates/ra_syntax/Cargo.toml | 2 | ||||
-rw-r--r-- | crates/ra_syntax/src/algo.rs | 41 | ||||
-rw-r--r-- | crates/ra_syntax/src/ast.rs | 246 | ||||
-rw-r--r-- | crates/ra_syntax/src/ast/generated.rs | 527 | ||||
-rw-r--r-- | crates/ra_syntax/src/grammar.ron | 35 | ||||
-rw-r--r-- | crates/ra_syntax/src/lib.rs | 34 | ||||
-rw-r--r-- | crates/ra_syntax/src/parsing/reparsing.rs | 142 | ||||
-rw-r--r-- | crates/ra_syntax/src/parsing/text_tree_sink.rs | 28 | ||||
-rw-r--r-- | crates/ra_syntax/src/syntax_node.rs | 287 | ||||
-rw-r--r-- | crates/ra_syntax/src/syntax_text.rs | 15 | ||||
-rw-r--r-- | crates/ra_syntax/src/validation.rs | 18 | ||||
-rw-r--r-- | crates/ra_syntax/src/validation/byte.rs | 6 | ||||
-rw-r--r-- | crates/ra_syntax/src/validation/byte_string.rs | 6 | ||||
-rw-r--r-- | crates/ra_syntax/src/validation/char.rs | 6 | ||||
-rw-r--r-- | crates/ra_syntax/src/validation/string.rs | 6 |
15 files changed, 599 insertions, 800 deletions
diff --git a/crates/ra_syntax/Cargo.toml b/crates/ra_syntax/Cargo.toml index 7e70dad3f..1a763fb47 100644 --- a/crates/ra_syntax/Cargo.toml +++ b/crates/ra_syntax/Cargo.toml | |||
@@ -13,7 +13,7 @@ unicode-xid = "0.1.0" | |||
13 | itertools = "0.8.0" | 13 | itertools = "0.8.0" |
14 | drop_bomb = "0.1.4" | 14 | drop_bomb = "0.1.4" |
15 | parking_lot = "0.7.0" | 15 | parking_lot = "0.7.0" |
16 | rowan = "0.3.3" | 16 | rowan = "0.4.0" |
17 | 17 | ||
18 | # ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here | 18 | # ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here |
19 | # to reduce number of compilations | 19 | # to reduce number of compilations |
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs index e2b4f0388..06b45135c 100644 --- a/crates/ra_syntax/src/algo.rs +++ b/crates/ra_syntax/src/algo.rs | |||
@@ -1,18 +1,14 @@ | |||
1 | pub mod visit; | 1 | pub mod visit; |
2 | 2 | ||
3 | use rowan::TransparentNewType; | 3 | use crate::{SyntaxNode, TextRange, TextUnit, AstNode, Direction, SyntaxToken, SyntaxElement}; |
4 | 4 | ||
5 | use crate::{SyntaxNode, TextRange, TextUnit, AstNode, Direction}; | 5 | pub use rowan::TokenAtOffset; |
6 | 6 | ||
7 | pub use rowan::LeafAtOffset; | 7 | pub fn find_token_at_offset(node: &SyntaxNode, offset: TextUnit) -> TokenAtOffset<SyntaxToken> { |
8 | 8 | match node.0.token_at_offset(offset) { | |
9 | pub fn find_leaf_at_offset(node: &SyntaxNode, offset: TextUnit) -> LeafAtOffset<&SyntaxNode> { | 9 | TokenAtOffset::None => TokenAtOffset::None, |
10 | match node.0.leaf_at_offset(offset) { | 10 | TokenAtOffset::Single(n) => TokenAtOffset::Single(n.into()), |
11 | LeafAtOffset::None => LeafAtOffset::None, | 11 | TokenAtOffset::Between(l, r) => TokenAtOffset::Between(l.into(), r.into()), |
12 | LeafAtOffset::Single(n) => LeafAtOffset::Single(SyntaxNode::from_repr(n)), | ||
13 | LeafAtOffset::Between(l, r) => { | ||
14 | LeafAtOffset::Between(SyntaxNode::from_repr(l), SyntaxNode::from_repr(r)) | ||
15 | } | ||
16 | } | 12 | } |
17 | } | 13 | } |
18 | 14 | ||
@@ -26,16 +22,29 @@ pub fn find_leaf_at_offset(node: &SyntaxNode, offset: TextUnit) -> LeafAtOffset< | |||
26 | /// | 22 | /// |
27 | /// then the left node will be silently preferred. | 23 | /// then the left node will be silently preferred. |
28 | pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextUnit) -> Option<&N> { | 24 | pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextUnit) -> Option<&N> { |
29 | find_leaf_at_offset(syntax, offset).find_map(|leaf| leaf.ancestors().find_map(N::cast)) | 25 | find_token_at_offset(syntax, offset) |
26 | .find_map(|leaf| leaf.parent().ancestors().find_map(N::cast)) | ||
30 | } | 27 | } |
31 | 28 | ||
32 | /// Finds the first sibling in the given direction which is not `trivia` | 29 | /// Finds the first sibling in the given direction which is not `trivia` |
33 | pub fn non_trivia_sibling(node: &SyntaxNode, direction: Direction) -> Option<&SyntaxNode> { | 30 | pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Option<SyntaxElement> { |
34 | node.siblings(direction).skip(1).find(|node| !node.kind().is_trivia()) | 31 | return match element { |
32 | SyntaxElement::Node(node) => node.siblings_with_tokens(direction).skip(1).find(not_trivia), | ||
33 | SyntaxElement::Token(token) => { | ||
34 | token.siblings_with_tokens(direction).skip(1).find(not_trivia) | ||
35 | } | ||
36 | }; | ||
37 | |||
38 | fn not_trivia(element: &SyntaxElement) -> bool { | ||
39 | match element { | ||
40 | SyntaxElement::Node(_) => true, | ||
41 | SyntaxElement::Token(token) => !token.kind().is_trivia(), | ||
42 | } | ||
43 | } | ||
35 | } | 44 | } |
36 | 45 | ||
37 | pub fn find_covering_node(root: &SyntaxNode, range: TextRange) -> &SyntaxNode { | 46 | pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxElement { |
38 | SyntaxNode::from_repr(root.0.covering_node(range)) | 47 | root.0.covering_node(range).into() |
39 | } | 48 | } |
40 | 49 | ||
41 | // Replace with `std::iter::successors` in `1.34.0` | 50 | // Replace with `std::iter::successors` in `1.34.0` |
diff --git a/crates/ra_syntax/src/ast.rs b/crates/ra_syntax/src/ast.rs index fd7e63f84..9a44afc67 100644 --- a/crates/ra_syntax/src/ast.rs +++ b/crates/ra_syntax/src/ast.rs | |||
@@ -7,7 +7,7 @@ use itertools::Itertools; | |||
7 | 7 | ||
8 | pub use self::generated::*; | 8 | pub use self::generated::*; |
9 | use crate::{ | 9 | use crate::{ |
10 | syntax_node::{SyntaxNode, SyntaxNodeChildren, TreeArc, RaTypes}, | 10 | syntax_node::{SyntaxNode, SyntaxNodeChildren, TreeArc, RaTypes, SyntaxToken, SyntaxElement, SyntaxElementChildren}, |
11 | SmolStr, | 11 | SmolStr, |
12 | SyntaxKind::*, | 12 | SyntaxKind::*, |
13 | }; | 13 | }; |
@@ -27,7 +27,8 @@ pub trait AstNode: | |||
27 | 27 | ||
28 | pub trait AstToken: AstNode { | 28 | pub trait AstToken: AstNode { |
29 | fn text(&self) -> &SmolStr { | 29 | fn text(&self) -> &SmolStr { |
30 | self.syntax().leaf_text().unwrap() | 30 | // self.syntax().leaf_text().unwrap() |
31 | unimplemented!() | ||
31 | } | 32 | } |
32 | } | 33 | } |
33 | 34 | ||
@@ -126,8 +127,8 @@ pub trait AttrsOwner: AstNode { | |||
126 | } | 127 | } |
127 | 128 | ||
128 | pub trait DocCommentsOwner: AstNode { | 129 | pub trait DocCommentsOwner: AstNode { |
129 | fn doc_comments(&self) -> AstChildren<Comment> { | 130 | fn doc_comments(&self) -> CommentIter { |
130 | children(self) | 131 | CommentIter { iter: self.syntax().children_with_tokens() } |
131 | } | 132 | } |
132 | 133 | ||
133 | /// Returns the textual content of a doc comment block as a single string. | 134 | /// Returns the textual content of a doc comment block as a single string. |
@@ -179,9 +180,9 @@ impl Attr { | |||
179 | 180 | ||
180 | pub fn as_atom(&self) -> Option<SmolStr> { | 181 | pub fn as_atom(&self) -> Option<SmolStr> { |
181 | let tt = self.value()?; | 182 | let tt = self.value()?; |
182 | let (_bra, attr, _ket) = tt.syntax().children().collect_tuple()?; | 183 | let (_bra, attr, _ket) = tt.syntax().children_with_tokens().collect_tuple()?; |
183 | if attr.kind() == IDENT { | 184 | if attr.kind() == IDENT { |
184 | Some(attr.leaf_text().unwrap().clone()) | 185 | Some(attr.as_token()?.text().clone()) |
185 | } else { | 186 | } else { |
186 | None | 187 | None |
187 | } | 188 | } |
@@ -189,10 +190,10 @@ impl Attr { | |||
189 | 190 | ||
190 | pub fn as_call(&self) -> Option<(SmolStr, &TokenTree)> { | 191 | pub fn as_call(&self) -> Option<(SmolStr, &TokenTree)> { |
191 | let tt = self.value()?; | 192 | let tt = self.value()?; |
192 | let (_bra, attr, args, _ket) = tt.syntax().children().collect_tuple()?; | 193 | let (_bra, attr, args, _ket) = tt.syntax().children_with_tokens().collect_tuple()?; |
193 | let args = TokenTree::cast(args)?; | 194 | let args = TokenTree::cast(args.as_node()?)?; |
194 | if attr.kind() == IDENT { | 195 | if attr.kind() == IDENT { |
195 | Some((attr.leaf_text().unwrap().clone(), args)) | 196 | Some((attr.as_token()?.text().clone(), args)) |
196 | } else { | 197 | } else { |
197 | None | 198 | None |
198 | } | 199 | } |
@@ -200,16 +201,35 @@ impl Attr { | |||
200 | 201 | ||
201 | pub fn as_named(&self) -> Option<SmolStr> { | 202 | pub fn as_named(&self) -> Option<SmolStr> { |
202 | let tt = self.value()?; | 203 | let tt = self.value()?; |
203 | let attr = tt.syntax().children().nth(1)?; | 204 | let attr = tt.syntax().children_with_tokens().nth(1)?; |
204 | if attr.kind() == IDENT { | 205 | if attr.kind() == IDENT { |
205 | Some(attr.leaf_text().unwrap().clone()) | 206 | Some(attr.as_token()?.text().clone()) |
206 | } else { | 207 | } else { |
207 | None | 208 | None |
208 | } | 209 | } |
209 | } | 210 | } |
210 | } | 211 | } |
211 | 212 | ||
212 | impl Comment { | 213 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] |
214 | pub struct Comment<'a>(SyntaxToken<'a>); | ||
215 | |||
216 | impl<'a> Comment<'a> { | ||
217 | pub fn cast(token: SyntaxToken<'a>) -> Option<Self> { | ||
218 | if token.kind() == COMMENT { | ||
219 | Some(Comment(token)) | ||
220 | } else { | ||
221 | None | ||
222 | } | ||
223 | } | ||
224 | |||
225 | pub fn syntax(&self) -> SyntaxToken<'a> { | ||
226 | self.0 | ||
227 | } | ||
228 | |||
229 | pub fn text(&self) -> &'a SmolStr { | ||
230 | self.0.text() | ||
231 | } | ||
232 | |||
213 | pub fn flavor(&self) -> CommentFlavor { | 233 | pub fn flavor(&self) -> CommentFlavor { |
214 | let text = self.text(); | 234 | let text = self.text(); |
215 | if text.starts_with("///") { | 235 | if text.starts_with("///") { |
@@ -230,13 +250,16 @@ impl Comment { | |||
230 | pub fn prefix(&self) -> &'static str { | 250 | pub fn prefix(&self) -> &'static str { |
231 | self.flavor().prefix() | 251 | self.flavor().prefix() |
232 | } | 252 | } |
253 | } | ||
233 | 254 | ||
234 | pub fn count_newlines_lazy(&self) -> impl Iterator<Item = &()> { | 255 | pub struct CommentIter<'a> { |
235 | self.text().chars().filter(|&c| c == '\n').map(|_| &()) | 256 | iter: SyntaxElementChildren<'a>, |
236 | } | 257 | } |
237 | 258 | ||
238 | pub fn has_newlines(&self) -> bool { | 259 | impl<'a> Iterator for CommentIter<'a> { |
239 | self.count_newlines_lazy().count() > 0 | 260 | type Item = Comment<'a>; |
261 | fn next(&mut self) -> Option<Comment<'a>> { | ||
262 | self.iter.by_ref().find_map(|el| el.as_token().and_then(Comment::cast)) | ||
240 | } | 263 | } |
241 | } | 264 | } |
242 | 265 | ||
@@ -267,27 +290,42 @@ impl CommentFlavor { | |||
267 | } | 290 | } |
268 | } | 291 | } |
269 | 292 | ||
270 | impl Whitespace { | 293 | pub struct Whitespace<'a>(SyntaxToken<'a>); |
271 | pub fn count_newlines_lazy(&self) -> impl Iterator<Item = &()> { | 294 | |
272 | self.text().chars().filter(|&c| c == '\n').map(|_| &()) | 295 | impl<'a> Whitespace<'a> { |
296 | pub fn cast(token: SyntaxToken<'a>) -> Option<Self> { | ||
297 | if token.kind() == WHITESPACE { | ||
298 | Some(Whitespace(token)) | ||
299 | } else { | ||
300 | None | ||
301 | } | ||
302 | } | ||
303 | |||
304 | pub fn syntax(&self) -> SyntaxToken<'a> { | ||
305 | self.0 | ||
273 | } | 306 | } |
274 | 307 | ||
275 | pub fn has_newlines(&self) -> bool { | 308 | pub fn text(&self) -> &'a SmolStr { |
276 | self.text().contains('\n') | 309 | self.0.text() |
310 | } | ||
311 | |||
312 | pub fn spans_multiple_lines(&self) -> bool { | ||
313 | let text = self.text(); | ||
314 | text.find('\n').map_or(false, |idx| text[idx + 1..].contains('\n')) | ||
277 | } | 315 | } |
278 | } | 316 | } |
279 | 317 | ||
280 | impl Name { | 318 | impl Name { |
281 | pub fn text(&self) -> &SmolStr { | 319 | pub fn text(&self) -> &SmolStr { |
282 | let ident = self.syntax().first_child().unwrap(); | 320 | let ident = self.syntax().first_child_or_token().unwrap().as_token().unwrap(); |
283 | ident.leaf_text().unwrap() | 321 | ident.text() |
284 | } | 322 | } |
285 | } | 323 | } |
286 | 324 | ||
287 | impl NameRef { | 325 | impl NameRef { |
288 | pub fn text(&self) -> &SmolStr { | 326 | pub fn text(&self) -> &SmolStr { |
289 | let ident = self.syntax().first_child().unwrap(); | 327 | let ident = self.syntax().first_child_or_token().unwrap().as_token().unwrap(); |
290 | ident.leaf_text().unwrap() | 328 | ident.text() |
291 | } | 329 | } |
292 | } | 330 | } |
293 | 331 | ||
@@ -316,7 +354,7 @@ impl ImplBlock { | |||
316 | 354 | ||
317 | impl Module { | 355 | impl Module { |
318 | pub fn has_semi(&self) -> bool { | 356 | pub fn has_semi(&self) -> bool { |
319 | match self.syntax().last_child() { | 357 | match self.syntax().last_child_or_token() { |
320 | None => false, | 358 | None => false, |
321 | Some(node) => node.kind() == SEMI, | 359 | Some(node) => node.kind() == SEMI, |
322 | } | 360 | } |
@@ -325,7 +363,7 @@ impl Module { | |||
325 | 363 | ||
326 | impl LetStmt { | 364 | impl LetStmt { |
327 | pub fn has_semi(&self) -> bool { | 365 | pub fn has_semi(&self) -> bool { |
328 | match self.syntax().last_child() { | 366 | match self.syntax().last_child_or_token() { |
329 | None => false, | 367 | None => false, |
330 | Some(node) => node.kind() == SEMI, | 368 | Some(node) => node.kind() == SEMI, |
331 | } | 369 | } |
@@ -360,7 +398,7 @@ impl IfExpr { | |||
360 | 398 | ||
361 | impl ExprStmt { | 399 | impl ExprStmt { |
362 | pub fn has_semi(&self) -> bool { | 400 | pub fn has_semi(&self) -> bool { |
363 | match self.syntax().last_child() { | 401 | match self.syntax().last_child_or_token() { |
364 | None => false, | 402 | None => false, |
365 | Some(node) => node.kind() == SEMI, | 403 | Some(node) => node.kind() == SEMI, |
366 | } | 404 | } |
@@ -384,7 +422,7 @@ impl PathSegment { | |||
384 | let res = if let Some(name_ref) = self.name_ref() { | 422 | let res = if let Some(name_ref) = self.name_ref() { |
385 | PathSegmentKind::Name(name_ref) | 423 | PathSegmentKind::Name(name_ref) |
386 | } else { | 424 | } else { |
387 | match self.syntax().first_child()?.kind() { | 425 | match self.syntax().first_child_or_token()?.kind() { |
388 | SELF_KW => PathSegmentKind::SelfKw, | 426 | SELF_KW => PathSegmentKind::SelfKw, |
389 | SUPER_KW => PathSegmentKind::SuperKw, | 427 | SUPER_KW => PathSegmentKind::SuperKw, |
390 | CRATE_KW => PathSegmentKind::CrateKw, | 428 | CRATE_KW => PathSegmentKind::CrateKw, |
@@ -395,7 +433,7 @@ impl PathSegment { | |||
395 | } | 433 | } |
396 | 434 | ||
397 | pub fn has_colon_colon(&self) -> bool { | 435 | pub fn has_colon_colon(&self) -> bool { |
398 | match self.syntax.first_child().map(|s| s.kind()) { | 436 | match self.syntax.first_child_or_token().map(|s| s.kind()) { |
399 | Some(COLONCOLON) => true, | 437 | Some(COLONCOLON) => true, |
400 | _ => false, | 438 | _ => false, |
401 | } | 439 | } |
@@ -410,7 +448,7 @@ impl Path { | |||
410 | 448 | ||
411 | impl UseTree { | 449 | impl UseTree { |
412 | pub fn has_star(&self) -> bool { | 450 | pub fn has_star(&self) -> bool { |
413 | self.syntax().children().any(|it| it.kind() == STAR) | 451 | self.syntax().children_with_tokens().any(|it| it.kind() == STAR) |
414 | } | 452 | } |
415 | } | 453 | } |
416 | 454 | ||
@@ -425,7 +463,7 @@ impl UseTreeList { | |||
425 | 463 | ||
426 | impl RefPat { | 464 | impl RefPat { |
427 | pub fn is_mut(&self) -> bool { | 465 | pub fn is_mut(&self) -> bool { |
428 | self.syntax().children().any(|n| n.kind() == MUT_KW) | 466 | self.syntax().children_with_tokens().any(|n| n.kind() == MUT_KW) |
429 | } | 467 | } |
430 | } | 468 | } |
431 | 469 | ||
@@ -500,19 +538,19 @@ impl EnumVariant { | |||
500 | 538 | ||
501 | impl PointerType { | 539 | impl PointerType { |
502 | pub fn is_mut(&self) -> bool { | 540 | pub fn is_mut(&self) -> bool { |
503 | self.syntax().children().any(|n| n.kind() == MUT_KW) | 541 | self.syntax().children_with_tokens().any(|n| n.kind() == MUT_KW) |
504 | } | 542 | } |
505 | } | 543 | } |
506 | 544 | ||
507 | impl ReferenceType { | 545 | impl ReferenceType { |
508 | pub fn is_mut(&self) -> bool { | 546 | pub fn is_mut(&self) -> bool { |
509 | self.syntax().children().any(|n| n.kind() == MUT_KW) | 547 | self.syntax().children_with_tokens().any(|n| n.kind() == MUT_KW) |
510 | } | 548 | } |
511 | } | 549 | } |
512 | 550 | ||
513 | impl RefExpr { | 551 | impl RefExpr { |
514 | pub fn is_mut(&self) -> bool { | 552 | pub fn is_mut(&self) -> bool { |
515 | self.syntax().children().any(|n| n.kind() == MUT_KW) | 553 | self.syntax().children_with_tokens().any(|n| n.kind() == MUT_KW) |
516 | } | 554 | } |
517 | } | 555 | } |
518 | 556 | ||
@@ -528,7 +566,7 @@ pub enum PrefixOp { | |||
528 | 566 | ||
529 | impl PrefixExpr { | 567 | impl PrefixExpr { |
530 | pub fn op_kind(&self) -> Option<PrefixOp> { | 568 | pub fn op_kind(&self) -> Option<PrefixOp> { |
531 | match self.syntax().first_child()?.kind() { | 569 | match self.op_token()?.kind() { |
532 | STAR => Some(PrefixOp::Deref), | 570 | STAR => Some(PrefixOp::Deref), |
533 | EXCL => Some(PrefixOp::Not), | 571 | EXCL => Some(PrefixOp::Not), |
534 | MINUS => Some(PrefixOp::Neg), | 572 | MINUS => Some(PrefixOp::Neg), |
@@ -536,8 +574,8 @@ impl PrefixExpr { | |||
536 | } | 574 | } |
537 | } | 575 | } |
538 | 576 | ||
539 | pub fn op(&self) -> Option<&SyntaxNode> { | 577 | pub fn op_token(&self) -> Option<SyntaxToken> { |
540 | self.syntax().first_child() | 578 | self.syntax().first_child_or_token()?.as_token() |
541 | } | 579 | } |
542 | } | 580 | } |
543 | 581 | ||
@@ -608,40 +646,42 @@ pub enum BinOp { | |||
608 | } | 646 | } |
609 | 647 | ||
610 | impl BinExpr { | 648 | impl BinExpr { |
611 | fn op_details(&self) -> Option<(&SyntaxNode, BinOp)> { | 649 | fn op_details(&self) -> Option<(SyntaxToken, BinOp)> { |
612 | self.syntax().children().find_map(|c| match c.kind() { | 650 | self.syntax().children_with_tokens().filter_map(|it| it.as_token()).find_map(|c| { |
613 | PIPEPIPE => Some((c, BinOp::BooleanOr)), | 651 | match c.kind() { |
614 | AMPAMP => Some((c, BinOp::BooleanAnd)), | 652 | PIPEPIPE => Some((c, BinOp::BooleanOr)), |
615 | EQEQ => Some((c, BinOp::EqualityTest)), | 653 | AMPAMP => Some((c, BinOp::BooleanAnd)), |
616 | NEQ => Some((c, BinOp::NegatedEqualityTest)), | 654 | EQEQ => Some((c, BinOp::EqualityTest)), |
617 | LTEQ => Some((c, BinOp::LesserEqualTest)), | 655 | NEQ => Some((c, BinOp::NegatedEqualityTest)), |
618 | GTEQ => Some((c, BinOp::GreaterEqualTest)), | 656 | LTEQ => Some((c, BinOp::LesserEqualTest)), |
619 | L_ANGLE => Some((c, BinOp::LesserTest)), | 657 | GTEQ => Some((c, BinOp::GreaterEqualTest)), |
620 | R_ANGLE => Some((c, BinOp::GreaterTest)), | 658 | L_ANGLE => Some((c, BinOp::LesserTest)), |
621 | PLUS => Some((c, BinOp::Addition)), | 659 | R_ANGLE => Some((c, BinOp::GreaterTest)), |
622 | STAR => Some((c, BinOp::Multiplication)), | 660 | PLUS => Some((c, BinOp::Addition)), |
623 | MINUS => Some((c, BinOp::Subtraction)), | 661 | STAR => Some((c, BinOp::Multiplication)), |
624 | SLASH => Some((c, BinOp::Division)), | 662 | MINUS => Some((c, BinOp::Subtraction)), |
625 | PERCENT => Some((c, BinOp::Remainder)), | 663 | SLASH => Some((c, BinOp::Division)), |
626 | SHL => Some((c, BinOp::LeftShift)), | 664 | PERCENT => Some((c, BinOp::Remainder)), |
627 | SHR => Some((c, BinOp::RightShift)), | 665 | SHL => Some((c, BinOp::LeftShift)), |
628 | CARET => Some((c, BinOp::BitwiseXor)), | 666 | SHR => Some((c, BinOp::RightShift)), |
629 | PIPE => Some((c, BinOp::BitwiseOr)), | 667 | CARET => Some((c, BinOp::BitwiseXor)), |
630 | AMP => Some((c, BinOp::BitwiseAnd)), | 668 | PIPE => Some((c, BinOp::BitwiseOr)), |
631 | DOTDOT => Some((c, BinOp::RangeRightOpen)), | 669 | AMP => Some((c, BinOp::BitwiseAnd)), |
632 | DOTDOTEQ => Some((c, BinOp::RangeRightClosed)), | 670 | DOTDOT => Some((c, BinOp::RangeRightOpen)), |
633 | EQ => Some((c, BinOp::Assignment)), | 671 | DOTDOTEQ => Some((c, BinOp::RangeRightClosed)), |
634 | PLUSEQ => Some((c, BinOp::AddAssign)), | 672 | EQ => Some((c, BinOp::Assignment)), |
635 | SLASHEQ => Some((c, BinOp::DivAssign)), | 673 | PLUSEQ => Some((c, BinOp::AddAssign)), |
636 | STAREQ => Some((c, BinOp::MulAssign)), | 674 | SLASHEQ => Some((c, BinOp::DivAssign)), |
637 | PERCENTEQ => Some((c, BinOp::RemAssign)), | 675 | STAREQ => Some((c, BinOp::MulAssign)), |
638 | SHREQ => Some((c, BinOp::ShrAssign)), | 676 | PERCENTEQ => Some((c, BinOp::RemAssign)), |
639 | SHLEQ => Some((c, BinOp::ShlAssign)), | 677 | SHREQ => Some((c, BinOp::ShrAssign)), |
640 | MINUSEQ => Some((c, BinOp::SubAssign)), | 678 | SHLEQ => Some((c, BinOp::ShlAssign)), |
641 | PIPEEQ => Some((c, BinOp::BitOrAssign)), | 679 | MINUSEQ => Some((c, BinOp::SubAssign)), |
642 | AMPEQ => Some((c, BinOp::BitAndAssign)), | 680 | PIPEEQ => Some((c, BinOp::BitOrAssign)), |
643 | CARETEQ => Some((c, BinOp::BitXorAssign)), | 681 | AMPEQ => Some((c, BinOp::BitAndAssign)), |
644 | _ => None, | 682 | CARETEQ => Some((c, BinOp::BitXorAssign)), |
683 | _ => None, | ||
684 | } | ||
645 | }) | 685 | }) |
646 | } | 686 | } |
647 | 687 | ||
@@ -649,7 +689,7 @@ impl BinExpr { | |||
649 | self.op_details().map(|t| t.1) | 689 | self.op_details().map(|t| t.1) |
650 | } | 690 | } |
651 | 691 | ||
652 | pub fn op(&self) -> Option<&SyntaxNode> { | 692 | pub fn op_token(&self) -> Option<SyntaxToken> { |
653 | self.op_details().map(|t| t.0) | 693 | self.op_details().map(|t| t.0) |
654 | } | 694 | } |
655 | 695 | ||
@@ -680,11 +720,23 @@ pub enum SelfParamFlavor { | |||
680 | } | 720 | } |
681 | 721 | ||
682 | impl SelfParam { | 722 | impl SelfParam { |
723 | pub fn self_kw_token(&self) -> SyntaxToken { | ||
724 | self.syntax() | ||
725 | .children_with_tokens() | ||
726 | .filter_map(|it| it.as_token()) | ||
727 | .find(|it| it.kind() == SELF_KW) | ||
728 | .expect("invalid tree: self param must have self") | ||
729 | } | ||
730 | |||
683 | pub fn flavor(&self) -> SelfParamFlavor { | 731 | pub fn flavor(&self) -> SelfParamFlavor { |
684 | let borrowed = self.syntax().children().any(|n| n.kind() == AMP); | 732 | let borrowed = self.syntax().children_with_tokens().any(|n| n.kind() == AMP); |
685 | if borrowed { | 733 | if borrowed { |
686 | // check for a `mut` coming after the & -- `mut &self` != `&mut self` | 734 | // check for a `mut` coming after the & -- `mut &self` != `&mut self` |
687 | if self.syntax().children().skip_while(|n| n.kind() != AMP).any(|n| n.kind() == MUT_KW) | 735 | if self |
736 | .syntax() | ||
737 | .children_with_tokens() | ||
738 | .skip_while(|n| n.kind() != AMP) | ||
739 | .any(|n| n.kind() == MUT_KW) | ||
688 | { | 740 | { |
689 | SelfParamFlavor::MutRef | 741 | SelfParamFlavor::MutRef |
690 | } else { | 742 | } else { |
@@ -707,25 +759,31 @@ pub enum LiteralFlavor { | |||
707 | Bool, | 759 | Bool, |
708 | } | 760 | } |
709 | 761 | ||
710 | impl LiteralExpr { | 762 | impl Literal { |
763 | pub fn token(&self) -> SyntaxToken { | ||
764 | match self.syntax().first_child_or_token().unwrap() { | ||
765 | SyntaxElement::Token(token) => token, | ||
766 | _ => unreachable!(), | ||
767 | } | ||
768 | } | ||
769 | |||
711 | pub fn flavor(&self) -> LiteralFlavor { | 770 | pub fn flavor(&self) -> LiteralFlavor { |
712 | let syntax = self.syntax(); | 771 | match self.token().kind() { |
713 | match syntax.kind() { | ||
714 | INT_NUMBER => { | 772 | INT_NUMBER => { |
715 | let allowed_suffix_list = [ | 773 | let allowed_suffix_list = [ |
716 | "isize", "i128", "i64", "i32", "i16", "i8", "usize", "u128", "u64", "u32", | 774 | "isize", "i128", "i64", "i32", "i16", "i8", "usize", "u128", "u64", "u32", |
717 | "u16", "u8", | 775 | "u16", "u8", |
718 | ]; | 776 | ]; |
719 | let text = syntax.text().to_string(); | 777 | let text = self.token().text().to_string(); |
720 | let suffix = allowed_suffix_list | 778 | let suffix = allowed_suffix_list |
721 | .iter() | 779 | .iter() |
722 | .find(|&s| text.ends_with(s)) | 780 | .find(|&s| text.ends_with(s)) |
723 | .map(|&suf| SmolStr::new(suf)); | 781 | .map(|&suf| SmolStr::new(suf)); |
724 | LiteralFlavor::IntNumber { suffix: suffix } | 782 | LiteralFlavor::IntNumber { suffix } |
725 | } | 783 | } |
726 | FLOAT_NUMBER => { | 784 | FLOAT_NUMBER => { |
727 | let allowed_suffix_list = ["f64", "f32"]; | 785 | let allowed_suffix_list = ["f64", "f32"]; |
728 | let text = syntax.text().to_string(); | 786 | let text = self.token().text().to_string(); |
729 | let suffix = allowed_suffix_list | 787 | let suffix = allowed_suffix_list |
730 | .iter() | 788 | .iter() |
731 | .find(|&s| text.ends_with(s)) | 789 | .find(|&s| text.ends_with(s)) |
@@ -750,11 +808,29 @@ impl NamedField { | |||
750 | 808 | ||
751 | impl BindPat { | 809 | impl BindPat { |
752 | pub fn is_mutable(&self) -> bool { | 810 | pub fn is_mutable(&self) -> bool { |
753 | self.syntax().children().any(|n| n.kind() == MUT_KW) | 811 | self.syntax().children_with_tokens().any(|n| n.kind() == MUT_KW) |
754 | } | 812 | } |
755 | 813 | ||
756 | pub fn is_ref(&self) -> bool { | 814 | pub fn is_ref(&self) -> bool { |
757 | self.syntax().children().any(|n| n.kind() == REF_KW) | 815 | self.syntax().children_with_tokens().any(|n| n.kind() == REF_KW) |
816 | } | ||
817 | } | ||
818 | |||
819 | impl LifetimeParam { | ||
820 | pub fn lifetime_token(&self) -> Option<SyntaxToken> { | ||
821 | self.syntax() | ||
822 | .children_with_tokens() | ||
823 | .filter_map(|it| it.as_token()) | ||
824 | .find(|it| it.kind() == LIFETIME) | ||
825 | } | ||
826 | } | ||
827 | |||
828 | impl WherePred { | ||
829 | pub fn lifetime_token(&self) -> Option<SyntaxToken> { | ||
830 | self.syntax() | ||
831 | .children_with_tokens() | ||
832 | .filter_map(|it| it.as_token()) | ||
833 | .find(|it| it.kind() == LIFETIME) | ||
758 | } | 834 | } |
759 | } | 835 | } |
760 | 836 | ||
@@ -835,7 +911,7 @@ where | |||
835 | let pred = predicates.next().unwrap(); | 911 | let pred = predicates.next().unwrap(); |
836 | let mut bounds = pred.type_bound_list().unwrap().bounds(); | 912 | let mut bounds = pred.type_bound_list().unwrap().bounds(); |
837 | 913 | ||
838 | assert_eq!("'a", pred.lifetime().unwrap().syntax().text().to_string()); | 914 | assert_eq!("'a", pred.lifetime_token().unwrap().text()); |
839 | 915 | ||
840 | assert_bound("'b", bounds.next()); | 916 | assert_bound("'b", bounds.next()); |
841 | assert_bound("'c", bounds.next()); | 917 | assert_bound("'c", bounds.next()); |
diff --git a/crates/ra_syntax/src/ast/generated.rs b/crates/ra_syntax/src/ast/generated.rs index c51b4caa4..4afe1a146 100644 --- a/crates/ra_syntax/src/ast/generated.rs +++ b/crates/ra_syntax/src/ast/generated.rs | |||
@@ -376,64 +376,6 @@ impl BreakExpr { | |||
376 | } | 376 | } |
377 | } | 377 | } |
378 | 378 | ||
379 | // Byte | ||
380 | #[derive(Debug, PartialEq, Eq, Hash)] | ||
381 | #[repr(transparent)] | ||
382 | pub struct Byte { | ||
383 | pub(crate) syntax: SyntaxNode, | ||
384 | } | ||
385 | unsafe impl TransparentNewType for Byte { | ||
386 | type Repr = rowan::SyntaxNode<RaTypes>; | ||
387 | } | ||
388 | |||
389 | impl AstNode for Byte { | ||
390 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | ||
391 | match syntax.kind() { | ||
392 | BYTE => Some(Byte::from_repr(syntax.into_repr())), | ||
393 | _ => None, | ||
394 | } | ||
395 | } | ||
396 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | ||
397 | } | ||
398 | |||
399 | impl ToOwned for Byte { | ||
400 | type Owned = TreeArc<Byte>; | ||
401 | fn to_owned(&self) -> TreeArc<Byte> { TreeArc::cast(self.syntax.to_owned()) } | ||
402 | } | ||
403 | |||
404 | |||
405 | impl ast::AstToken for Byte {} | ||
406 | impl Byte {} | ||
407 | |||
408 | // ByteString | ||
409 | #[derive(Debug, PartialEq, Eq, Hash)] | ||
410 | #[repr(transparent)] | ||
411 | pub struct ByteString { | ||
412 | pub(crate) syntax: SyntaxNode, | ||
413 | } | ||
414 | unsafe impl TransparentNewType for ByteString { | ||
415 | type Repr = rowan::SyntaxNode<RaTypes>; | ||
416 | } | ||
417 | |||
418 | impl AstNode for ByteString { | ||
419 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | ||
420 | match syntax.kind() { | ||
421 | BYTE_STRING => Some(ByteString::from_repr(syntax.into_repr())), | ||
422 | _ => None, | ||
423 | } | ||
424 | } | ||
425 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | ||
426 | } | ||
427 | |||
428 | impl ToOwned for ByteString { | ||
429 | type Owned = TreeArc<ByteString>; | ||
430 | fn to_owned(&self) -> TreeArc<ByteString> { TreeArc::cast(self.syntax.to_owned()) } | ||
431 | } | ||
432 | |||
433 | |||
434 | impl ast::AstToken for ByteString {} | ||
435 | impl ByteString {} | ||
436 | |||
437 | // CallExpr | 379 | // CallExpr |
438 | #[derive(Debug, PartialEq, Eq, Hash)] | 380 | #[derive(Debug, PartialEq, Eq, Hash)] |
439 | #[repr(transparent)] | 381 | #[repr(transparent)] |
@@ -503,64 +445,6 @@ impl CastExpr { | |||
503 | } | 445 | } |
504 | } | 446 | } |
505 | 447 | ||
506 | // Char | ||
507 | #[derive(Debug, PartialEq, Eq, Hash)] | ||
508 | #[repr(transparent)] | ||
509 | pub struct Char { | ||
510 | pub(crate) syntax: SyntaxNode, | ||
511 | } | ||
512 | unsafe impl TransparentNewType for Char { | ||
513 | type Repr = rowan::SyntaxNode<RaTypes>; | ||
514 | } | ||
515 | |||
516 | impl AstNode for Char { | ||
517 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | ||
518 | match syntax.kind() { | ||
519 | CHAR => Some(Char::from_repr(syntax.into_repr())), | ||
520 | _ => None, | ||
521 | } | ||
522 | } | ||
523 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | ||
524 | } | ||
525 | |||
526 | impl ToOwned for Char { | ||
527 | type Owned = TreeArc<Char>; | ||
528 | fn to_owned(&self) -> TreeArc<Char> { TreeArc::cast(self.syntax.to_owned()) } | ||
529 | } | ||
530 | |||
531 | |||
532 | impl ast::AstToken for Char {} | ||
533 | impl Char {} | ||
534 | |||
535 | // Comment | ||
536 | #[derive(Debug, PartialEq, Eq, Hash)] | ||
537 | #[repr(transparent)] | ||
538 | pub struct Comment { | ||
539 | pub(crate) syntax: SyntaxNode, | ||
540 | } | ||
541 | unsafe impl TransparentNewType for Comment { | ||
542 | type Repr = rowan::SyntaxNode<RaTypes>; | ||
543 | } | ||
544 | |||
545 | impl AstNode for Comment { | ||
546 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | ||
547 | match syntax.kind() { | ||
548 | COMMENT => Some(Comment::from_repr(syntax.into_repr())), | ||
549 | _ => None, | ||
550 | } | ||
551 | } | ||
552 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | ||
553 | } | ||
554 | |||
555 | impl ToOwned for Comment { | ||
556 | type Owned = TreeArc<Comment>; | ||
557 | fn to_owned(&self) -> TreeArc<Comment> { TreeArc::cast(self.syntax.to_owned()) } | ||
558 | } | ||
559 | |||
560 | |||
561 | impl ast::AstToken for Comment {} | ||
562 | impl Comment {} | ||
563 | |||
564 | // Condition | 448 | // Condition |
565 | #[derive(Debug, PartialEq, Eq, Hash)] | 449 | #[derive(Debug, PartialEq, Eq, Hash)] |
566 | #[repr(transparent)] | 450 | #[repr(transparent)] |
@@ -1115,35 +999,6 @@ impl ExternCrateItem { | |||
1115 | } | 999 | } |
1116 | } | 1000 | } |
1117 | 1001 | ||
1118 | // FalseKw | ||
1119 | #[derive(Debug, PartialEq, Eq, Hash)] | ||
1120 | #[repr(transparent)] | ||
1121 | pub struct FalseKw { | ||
1122 | pub(crate) syntax: SyntaxNode, | ||
1123 | } | ||
1124 | unsafe impl TransparentNewType for FalseKw { | ||
1125 | type Repr = rowan::SyntaxNode<RaTypes>; | ||
1126 | } | ||
1127 | |||
1128 | impl AstNode for FalseKw { | ||
1129 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | ||
1130 | match syntax.kind() { | ||
1131 | FALSE_KW => Some(FalseKw::from_repr(syntax.into_repr())), | ||
1132 | _ => None, | ||
1133 | } | ||
1134 | } | ||
1135 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | ||
1136 | } | ||
1137 | |||
1138 | impl ToOwned for FalseKw { | ||
1139 | type Owned = TreeArc<FalseKw>; | ||
1140 | fn to_owned(&self) -> TreeArc<FalseKw> { TreeArc::cast(self.syntax.to_owned()) } | ||
1141 | } | ||
1142 | |||
1143 | |||
1144 | impl ast::AstToken for FalseKw {} | ||
1145 | impl FalseKw {} | ||
1146 | |||
1147 | // FieldExpr | 1002 | // FieldExpr |
1148 | #[derive(Debug, PartialEq, Eq, Hash)] | 1003 | #[derive(Debug, PartialEq, Eq, Hash)] |
1149 | #[repr(transparent)] | 1004 | #[repr(transparent)] |
@@ -1249,35 +1104,6 @@ impl FieldPatList { | |||
1249 | } | 1104 | } |
1250 | } | 1105 | } |
1251 | 1106 | ||
1252 | // FloatNumber | ||
1253 | #[derive(Debug, PartialEq, Eq, Hash)] | ||
1254 | #[repr(transparent)] | ||
1255 | pub struct FloatNumber { | ||
1256 | pub(crate) syntax: SyntaxNode, | ||
1257 | } | ||
1258 | unsafe impl TransparentNewType for FloatNumber { | ||
1259 | type Repr = rowan::SyntaxNode<RaTypes>; | ||
1260 | } | ||
1261 | |||
1262 | impl AstNode for FloatNumber { | ||
1263 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | ||
1264 | match syntax.kind() { | ||
1265 | FLOAT_NUMBER => Some(FloatNumber::from_repr(syntax.into_repr())), | ||
1266 | _ => None, | ||
1267 | } | ||
1268 | } | ||
1269 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | ||
1270 | } | ||
1271 | |||
1272 | impl ToOwned for FloatNumber { | ||
1273 | type Owned = TreeArc<FloatNumber>; | ||
1274 | fn to_owned(&self) -> TreeArc<FloatNumber> { TreeArc::cast(self.syntax.to_owned()) } | ||
1275 | } | ||
1276 | |||
1277 | |||
1278 | impl ast::AstToken for FloatNumber {} | ||
1279 | impl FloatNumber {} | ||
1280 | |||
1281 | // FnDef | 1107 | // FnDef |
1282 | #[derive(Debug, PartialEq, Eq, Hash)] | 1108 | #[derive(Debug, PartialEq, Eq, Hash)] |
1283 | #[repr(transparent)] | 1109 | #[repr(transparent)] |
@@ -1613,35 +1439,6 @@ impl ToOwned for IndexExpr { | |||
1613 | 1439 | ||
1614 | impl IndexExpr {} | 1440 | impl IndexExpr {} |
1615 | 1441 | ||
1616 | // IntNumber | ||
1617 | #[derive(Debug, PartialEq, Eq, Hash)] | ||
1618 | #[repr(transparent)] | ||
1619 | pub struct IntNumber { | ||
1620 | pub(crate) syntax: SyntaxNode, | ||
1621 | } | ||
1622 | unsafe impl TransparentNewType for IntNumber { | ||
1623 | type Repr = rowan::SyntaxNode<RaTypes>; | ||
1624 | } | ||
1625 | |||
1626 | impl AstNode for IntNumber { | ||
1627 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | ||
1628 | match syntax.kind() { | ||
1629 | INT_NUMBER => Some(IntNumber::from_repr(syntax.into_repr())), | ||
1630 | _ => None, | ||
1631 | } | ||
1632 | } | ||
1633 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | ||
1634 | } | ||
1635 | |||
1636 | impl ToOwned for IntNumber { | ||
1637 | type Owned = TreeArc<IntNumber>; | ||
1638 | fn to_owned(&self) -> TreeArc<IntNumber> { TreeArc::cast(self.syntax.to_owned()) } | ||
1639 | } | ||
1640 | |||
1641 | |||
1642 | impl ast::AstToken for IntNumber {} | ||
1643 | impl IntNumber {} | ||
1644 | |||
1645 | // ItemList | 1442 | // ItemList |
1646 | #[derive(Debug, PartialEq, Eq, Hash)] | 1443 | #[derive(Debug, PartialEq, Eq, Hash)] |
1647 | #[repr(transparent)] | 1444 | #[repr(transparent)] |
@@ -1777,35 +1574,6 @@ impl LetStmt { | |||
1777 | } | 1574 | } |
1778 | } | 1575 | } |
1779 | 1576 | ||
1780 | // Lifetime | ||
1781 | #[derive(Debug, PartialEq, Eq, Hash)] | ||
1782 | #[repr(transparent)] | ||
1783 | pub struct Lifetime { | ||
1784 | pub(crate) syntax: SyntaxNode, | ||
1785 | } | ||
1786 | unsafe impl TransparentNewType for Lifetime { | ||
1787 | type Repr = rowan::SyntaxNode<RaTypes>; | ||
1788 | } | ||
1789 | |||
1790 | impl AstNode for Lifetime { | ||
1791 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | ||
1792 | match syntax.kind() { | ||
1793 | LIFETIME => Some(Lifetime::from_repr(syntax.into_repr())), | ||
1794 | _ => None, | ||
1795 | } | ||
1796 | } | ||
1797 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | ||
1798 | } | ||
1799 | |||
1800 | impl ToOwned for Lifetime { | ||
1801 | type Owned = TreeArc<Lifetime>; | ||
1802 | fn to_owned(&self) -> TreeArc<Lifetime> { TreeArc::cast(self.syntax.to_owned()) } | ||
1803 | } | ||
1804 | |||
1805 | |||
1806 | impl ast::AstToken for Lifetime {} | ||
1807 | impl Lifetime {} | ||
1808 | |||
1809 | // LifetimeArg | 1577 | // LifetimeArg |
1810 | #[derive(Debug, PartialEq, Eq, Hash)] | 1578 | #[derive(Debug, PartialEq, Eq, Hash)] |
1811 | #[repr(transparent)] | 1579 | #[repr(transparent)] |
@@ -1832,11 +1600,7 @@ impl ToOwned for LifetimeArg { | |||
1832 | } | 1600 | } |
1833 | 1601 | ||
1834 | 1602 | ||
1835 | impl LifetimeArg { | 1603 | impl LifetimeArg {} |
1836 | pub fn lifetime(&self) -> Option<&Lifetime> { | ||
1837 | super::child_opt(self) | ||
1838 | } | ||
1839 | } | ||
1840 | 1604 | ||
1841 | // LifetimeParam | 1605 | // LifetimeParam |
1842 | #[derive(Debug, PartialEq, Eq, Hash)] | 1606 | #[derive(Debug, PartialEq, Eq, Hash)] |
@@ -1865,11 +1629,7 @@ impl ToOwned for LifetimeParam { | |||
1865 | 1629 | ||
1866 | 1630 | ||
1867 | impl ast::AttrsOwner for LifetimeParam {} | 1631 | impl ast::AttrsOwner for LifetimeParam {} |
1868 | impl LifetimeParam { | 1632 | impl LifetimeParam {} |
1869 | pub fn lifetime(&self) -> Option<&Lifetime> { | ||
1870 | super::child_opt(self) | ||
1871 | } | ||
1872 | } | ||
1873 | 1633 | ||
1874 | // Literal | 1634 | // Literal |
1875 | #[derive(Debug, PartialEq, Eq, Hash)] | 1635 | #[derive(Debug, PartialEq, Eq, Hash)] |
@@ -1897,130 +1657,7 @@ impl ToOwned for Literal { | |||
1897 | } | 1657 | } |
1898 | 1658 | ||
1899 | 1659 | ||
1900 | impl Literal { | 1660 | impl Literal {} |
1901 | pub fn literal_expr(&self) -> Option<&LiteralExpr> { | ||
1902 | super::child_opt(self) | ||
1903 | } | ||
1904 | } | ||
1905 | |||
1906 | // LiteralExpr | ||
1907 | #[derive(Debug, PartialEq, Eq, Hash)] | ||
1908 | #[repr(transparent)] | ||
1909 | pub struct LiteralExpr { | ||
1910 | pub(crate) syntax: SyntaxNode, | ||
1911 | } | ||
1912 | unsafe impl TransparentNewType for LiteralExpr { | ||
1913 | type Repr = rowan::SyntaxNode<RaTypes>; | ||
1914 | } | ||
1915 | |||
1916 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] | ||
1917 | pub enum LiteralExprKind<'a> { | ||
1918 | String(&'a String), | ||
1919 | ByteString(&'a ByteString), | ||
1920 | RawString(&'a RawString), | ||
1921 | RawByteString(&'a RawByteString), | ||
1922 | Char(&'a Char), | ||
1923 | Byte(&'a Byte), | ||
1924 | IntNumber(&'a IntNumber), | ||
1925 | FloatNumber(&'a FloatNumber), | ||
1926 | TrueKw(&'a TrueKw), | ||
1927 | FalseKw(&'a FalseKw), | ||
1928 | } | ||
1929 | impl<'a> From<&'a String> for &'a LiteralExpr { | ||
1930 | fn from(n: &'a String) -> &'a LiteralExpr { | ||
1931 | LiteralExpr::cast(&n.syntax).unwrap() | ||
1932 | } | ||
1933 | } | ||
1934 | impl<'a> From<&'a ByteString> for &'a LiteralExpr { | ||
1935 | fn from(n: &'a ByteString) -> &'a LiteralExpr { | ||
1936 | LiteralExpr::cast(&n.syntax).unwrap() | ||
1937 | } | ||
1938 | } | ||
1939 | impl<'a> From<&'a RawString> for &'a LiteralExpr { | ||
1940 | fn from(n: &'a RawString) -> &'a LiteralExpr { | ||
1941 | LiteralExpr::cast(&n.syntax).unwrap() | ||
1942 | } | ||
1943 | } | ||
1944 | impl<'a> From<&'a RawByteString> for &'a LiteralExpr { | ||
1945 | fn from(n: &'a RawByteString) -> &'a LiteralExpr { | ||
1946 | LiteralExpr::cast(&n.syntax).unwrap() | ||
1947 | } | ||
1948 | } | ||
1949 | impl<'a> From<&'a Char> for &'a LiteralExpr { | ||
1950 | fn from(n: &'a Char) -> &'a LiteralExpr { | ||
1951 | LiteralExpr::cast(&n.syntax).unwrap() | ||
1952 | } | ||
1953 | } | ||
1954 | impl<'a> From<&'a Byte> for &'a LiteralExpr { | ||
1955 | fn from(n: &'a Byte) -> &'a LiteralExpr { | ||
1956 | LiteralExpr::cast(&n.syntax).unwrap() | ||
1957 | } | ||
1958 | } | ||
1959 | impl<'a> From<&'a IntNumber> for &'a LiteralExpr { | ||
1960 | fn from(n: &'a IntNumber) -> &'a LiteralExpr { | ||
1961 | LiteralExpr::cast(&n.syntax).unwrap() | ||
1962 | } | ||
1963 | } | ||
1964 | impl<'a> From<&'a FloatNumber> for &'a LiteralExpr { | ||
1965 | fn from(n: &'a FloatNumber) -> &'a LiteralExpr { | ||
1966 | LiteralExpr::cast(&n.syntax).unwrap() | ||
1967 | } | ||
1968 | } | ||
1969 | impl<'a> From<&'a TrueKw> for &'a LiteralExpr { | ||
1970 | fn from(n: &'a TrueKw) -> &'a LiteralExpr { | ||
1971 | LiteralExpr::cast(&n.syntax).unwrap() | ||
1972 | } | ||
1973 | } | ||
1974 | impl<'a> From<&'a FalseKw> for &'a LiteralExpr { | ||
1975 | fn from(n: &'a FalseKw) -> &'a LiteralExpr { | ||
1976 | LiteralExpr::cast(&n.syntax).unwrap() | ||
1977 | } | ||
1978 | } | ||
1979 | |||
1980 | |||
1981 | impl AstNode for LiteralExpr { | ||
1982 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | ||
1983 | match syntax.kind() { | ||
1984 | | STRING | ||
1985 | | BYTE_STRING | ||
1986 | | RAW_STRING | ||
1987 | | RAW_BYTE_STRING | ||
1988 | | CHAR | ||
1989 | | BYTE | ||
1990 | | INT_NUMBER | ||
1991 | | FLOAT_NUMBER | ||
1992 | | TRUE_KW | ||
1993 | | FALSE_KW => Some(LiteralExpr::from_repr(syntax.into_repr())), | ||
1994 | _ => None, | ||
1995 | } | ||
1996 | } | ||
1997 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | ||
1998 | } | ||
1999 | |||
2000 | impl ToOwned for LiteralExpr { | ||
2001 | type Owned = TreeArc<LiteralExpr>; | ||
2002 | fn to_owned(&self) -> TreeArc<LiteralExpr> { TreeArc::cast(self.syntax.to_owned()) } | ||
2003 | } | ||
2004 | |||
2005 | impl LiteralExpr { | ||
2006 | pub fn kind(&self) -> LiteralExprKind { | ||
2007 | match self.syntax.kind() { | ||
2008 | STRING => LiteralExprKind::String(String::cast(&self.syntax).unwrap()), | ||
2009 | BYTE_STRING => LiteralExprKind::ByteString(ByteString::cast(&self.syntax).unwrap()), | ||
2010 | RAW_STRING => LiteralExprKind::RawString(RawString::cast(&self.syntax).unwrap()), | ||
2011 | RAW_BYTE_STRING => LiteralExprKind::RawByteString(RawByteString::cast(&self.syntax).unwrap()), | ||
2012 | CHAR => LiteralExprKind::Char(Char::cast(&self.syntax).unwrap()), | ||
2013 | BYTE => LiteralExprKind::Byte(Byte::cast(&self.syntax).unwrap()), | ||
2014 | INT_NUMBER => LiteralExprKind::IntNumber(IntNumber::cast(&self.syntax).unwrap()), | ||
2015 | FLOAT_NUMBER => LiteralExprKind::FloatNumber(FloatNumber::cast(&self.syntax).unwrap()), | ||
2016 | TRUE_KW => LiteralExprKind::TrueKw(TrueKw::cast(&self.syntax).unwrap()), | ||
2017 | FALSE_KW => LiteralExprKind::FalseKw(FalseKw::cast(&self.syntax).unwrap()), | ||
2018 | _ => unreachable!(), | ||
2019 | } | ||
2020 | } | ||
2021 | } | ||
2022 | |||
2023 | impl LiteralExpr {} | ||
2024 | 1661 | ||
2025 | // LiteralPat | 1662 | // LiteralPat |
2026 | #[derive(Debug, PartialEq, Eq, Hash)] | 1663 | #[derive(Debug, PartialEq, Eq, Hash)] |
@@ -3404,64 +3041,6 @@ impl ToOwned for RangePat { | |||
3404 | 3041 | ||
3405 | impl RangePat {} | 3042 | impl RangePat {} |
3406 | 3043 | ||
3407 | // RawByteString | ||
3408 | #[derive(Debug, PartialEq, Eq, Hash)] | ||
3409 | #[repr(transparent)] | ||
3410 | pub struct RawByteString { | ||
3411 | pub(crate) syntax: SyntaxNode, | ||
3412 | } | ||
3413 | unsafe impl TransparentNewType for RawByteString { | ||
3414 | type Repr = rowan::SyntaxNode<RaTypes>; | ||
3415 | } | ||
3416 | |||
3417 | impl AstNode for RawByteString { | ||
3418 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | ||
3419 | match syntax.kind() { | ||
3420 | RAW_BYTE_STRING => Some(RawByteString::from_repr(syntax.into_repr())), | ||
3421 | _ => None, | ||
3422 | } | ||
3423 | } | ||
3424 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | ||
3425 | } | ||
3426 | |||
3427 | impl ToOwned for RawByteString { | ||
3428 | type Owned = TreeArc<RawByteString>; | ||
3429 | fn to_owned(&self) -> TreeArc<RawByteString> { TreeArc::cast(self.syntax.to_owned()) } | ||
3430 | } | ||
3431 | |||
3432 | |||
3433 | impl ast::AstToken for RawByteString {} | ||
3434 | impl RawByteString {} | ||
3435 | |||
3436 | // RawString | ||
3437 | #[derive(Debug, PartialEq, Eq, Hash)] | ||
3438 | #[repr(transparent)] | ||
3439 | pub struct RawString { | ||
3440 | pub(crate) syntax: SyntaxNode, | ||
3441 | } | ||
3442 | unsafe impl TransparentNewType for RawString { | ||
3443 | type Repr = rowan::SyntaxNode<RaTypes>; | ||
3444 | } | ||
3445 | |||
3446 | impl AstNode for RawString { | ||
3447 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | ||
3448 | match syntax.kind() { | ||
3449 | RAW_STRING => Some(RawString::from_repr(syntax.into_repr())), | ||
3450 | _ => None, | ||
3451 | } | ||
3452 | } | ||
3453 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | ||
3454 | } | ||
3455 | |||
3456 | impl ToOwned for RawString { | ||
3457 | type Owned = TreeArc<RawString>; | ||
3458 | fn to_owned(&self) -> TreeArc<RawString> { TreeArc::cast(self.syntax.to_owned()) } | ||
3459 | } | ||
3460 | |||
3461 | |||
3462 | impl ast::AstToken for RawString {} | ||
3463 | impl RawString {} | ||
3464 | |||
3465 | // RefExpr | 3044 | // RefExpr |
3466 | #[derive(Debug, PartialEq, Eq, Hash)] | 3045 | #[derive(Debug, PartialEq, Eq, Hash)] |
3467 | #[repr(transparent)] | 3046 | #[repr(transparent)] |
@@ -3622,34 +3201,6 @@ impl ReturnExpr { | |||
3622 | } | 3201 | } |
3623 | } | 3202 | } |
3624 | 3203 | ||
3625 | // SelfKw | ||
3626 | #[derive(Debug, PartialEq, Eq, Hash)] | ||
3627 | #[repr(transparent)] | ||
3628 | pub struct SelfKw { | ||
3629 | pub(crate) syntax: SyntaxNode, | ||
3630 | } | ||
3631 | unsafe impl TransparentNewType for SelfKw { | ||
3632 | type Repr = rowan::SyntaxNode<RaTypes>; | ||
3633 | } | ||
3634 | |||
3635 | impl AstNode for SelfKw { | ||
3636 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | ||
3637 | match syntax.kind() { | ||
3638 | SELF_KW => Some(SelfKw::from_repr(syntax.into_repr())), | ||
3639 | _ => None, | ||
3640 | } | ||
3641 | } | ||
3642 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | ||
3643 | } | ||
3644 | |||
3645 | impl ToOwned for SelfKw { | ||
3646 | type Owned = TreeArc<SelfKw>; | ||
3647 | fn to_owned(&self) -> TreeArc<SelfKw> { TreeArc::cast(self.syntax.to_owned()) } | ||
3648 | } | ||
3649 | |||
3650 | |||
3651 | impl SelfKw {} | ||
3652 | |||
3653 | // SelfParam | 3204 | // SelfParam |
3654 | #[derive(Debug, PartialEq, Eq, Hash)] | 3205 | #[derive(Debug, PartialEq, Eq, Hash)] |
3655 | #[repr(transparent)] | 3206 | #[repr(transparent)] |
@@ -3677,11 +3228,7 @@ impl ToOwned for SelfParam { | |||
3677 | 3228 | ||
3678 | 3229 | ||
3679 | impl ast::TypeAscriptionOwner for SelfParam {} | 3230 | impl ast::TypeAscriptionOwner for SelfParam {} |
3680 | impl SelfParam { | 3231 | impl SelfParam {} |
3681 | pub fn self_kw(&self) -> Option<&SelfKw> { | ||
3682 | super::child_opt(self) | ||
3683 | } | ||
3684 | } | ||
3685 | 3232 | ||
3686 | // SlicePat | 3233 | // SlicePat |
3687 | #[derive(Debug, PartialEq, Eq, Hash)] | 3234 | #[derive(Debug, PartialEq, Eq, Hash)] |
@@ -3866,35 +3413,6 @@ impl Stmt { | |||
3866 | 3413 | ||
3867 | impl Stmt {} | 3414 | impl Stmt {} |
3868 | 3415 | ||
3869 | // String | ||
3870 | #[derive(Debug, PartialEq, Eq, Hash)] | ||
3871 | #[repr(transparent)] | ||
3872 | pub struct String { | ||
3873 | pub(crate) syntax: SyntaxNode, | ||
3874 | } | ||
3875 | unsafe impl TransparentNewType for String { | ||
3876 | type Repr = rowan::SyntaxNode<RaTypes>; | ||
3877 | } | ||
3878 | |||
3879 | impl AstNode for String { | ||
3880 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | ||
3881 | match syntax.kind() { | ||
3882 | STRING => Some(String::from_repr(syntax.into_repr())), | ||
3883 | _ => None, | ||
3884 | } | ||
3885 | } | ||
3886 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | ||
3887 | } | ||
3888 | |||
3889 | impl ToOwned for String { | ||
3890 | type Owned = TreeArc<String>; | ||
3891 | fn to_owned(&self) -> TreeArc<String> { TreeArc::cast(self.syntax.to_owned()) } | ||
3892 | } | ||
3893 | |||
3894 | |||
3895 | impl ast::AstToken for String {} | ||
3896 | impl String {} | ||
3897 | |||
3898 | // StructDef | 3416 | // StructDef |
3899 | #[derive(Debug, PartialEq, Eq, Hash)] | 3417 | #[derive(Debug, PartialEq, Eq, Hash)] |
3900 | #[repr(transparent)] | 3418 | #[repr(transparent)] |
@@ -4070,35 +3588,6 @@ impl TraitDef { | |||
4070 | } | 3588 | } |
4071 | } | 3589 | } |
4072 | 3590 | ||
4073 | // TrueKw | ||
4074 | #[derive(Debug, PartialEq, Eq, Hash)] | ||
4075 | #[repr(transparent)] | ||
4076 | pub struct TrueKw { | ||
4077 | pub(crate) syntax: SyntaxNode, | ||
4078 | } | ||
4079 | unsafe impl TransparentNewType for TrueKw { | ||
4080 | type Repr = rowan::SyntaxNode<RaTypes>; | ||
4081 | } | ||
4082 | |||
4083 | impl AstNode for TrueKw { | ||
4084 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | ||
4085 | match syntax.kind() { | ||
4086 | TRUE_KW => Some(TrueKw::from_repr(syntax.into_repr())), | ||
4087 | _ => None, | ||
4088 | } | ||
4089 | } | ||
4090 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | ||
4091 | } | ||
4092 | |||
4093 | impl ToOwned for TrueKw { | ||
4094 | type Owned = TreeArc<TrueKw>; | ||
4095 | fn to_owned(&self) -> TreeArc<TrueKw> { TreeArc::cast(self.syntax.to_owned()) } | ||
4096 | } | ||
4097 | |||
4098 | |||
4099 | impl ast::AstToken for TrueKw {} | ||
4100 | impl TrueKw {} | ||
4101 | |||
4102 | // TryExpr | 3591 | // TryExpr |
4103 | #[derive(Debug, PartialEq, Eq, Hash)] | 3592 | #[derive(Debug, PartialEq, Eq, Hash)] |
4104 | #[repr(transparent)] | 3593 | #[repr(transparent)] |
@@ -4403,10 +3892,6 @@ impl TypeBound { | |||
4403 | pub fn type_ref(&self) -> Option<&TypeRef> { | 3892 | pub fn type_ref(&self) -> Option<&TypeRef> { |
4404 | super::child_opt(self) | 3893 | super::child_opt(self) |
4405 | } | 3894 | } |
4406 | |||
4407 | pub fn lifetime(&self) -> Option<&Lifetime> { | ||
4408 | super::child_opt(self) | ||
4409 | } | ||
4410 | } | 3895 | } |
4411 | 3896 | ||
4412 | // TypeBoundList | 3897 | // TypeBoundList |
@@ -4847,10 +4332,6 @@ impl WherePred { | |||
4847 | pub fn type_ref(&self) -> Option<&TypeRef> { | 4332 | pub fn type_ref(&self) -> Option<&TypeRef> { |
4848 | super::child_opt(self) | 4333 | super::child_opt(self) |
4849 | } | 4334 | } |
4850 | |||
4851 | pub fn lifetime(&self) -> Option<&Lifetime> { | ||
4852 | super::child_opt(self) | ||
4853 | } | ||
4854 | } | 4335 | } |
4855 | 4336 | ||
4856 | // WhileExpr | 4337 | // WhileExpr |
diff --git a/crates/ra_syntax/src/grammar.ron b/crates/ra_syntax/src/grammar.ron index 1123c2e95..6d7a5a1cb 100644 --- a/crates/ra_syntax/src/grammar.ron +++ b/crates/ra_syntax/src/grammar.ron | |||
@@ -463,31 +463,7 @@ Grammar( | |||
463 | "RangeExpr": (), | 463 | "RangeExpr": (), |
464 | "BinExpr": (), | 464 | "BinExpr": (), |
465 | 465 | ||
466 | "IntNumber": ( traits: ["AstToken"] ), | 466 | "Literal": (), |
467 | "FloatNumber": ( traits: ["AstToken"] ), | ||
468 | "String": ( traits: ["AstToken"] ), | ||
469 | "RawString": ( traits: ["AstToken"] ), | ||
470 | "Byte": ( traits: ["AstToken"] ), | ||
471 | "RawByteString": ( traits: ["AstToken"] ), | ||
472 | "ByteString": ( traits: ["AstToken"] ), | ||
473 | "Char": ( traits: ["AstToken"] ), | ||
474 | "TrueKw": ( traits: ["AstToken"] ), | ||
475 | "FalseKw": ( traits: ["AstToken"] ), | ||
476 | "LiteralExpr": ( | ||
477 | enum: [ | ||
478 | "String", | ||
479 | "ByteString", | ||
480 | "RawString", | ||
481 | "RawByteString", | ||
482 | "Char", | ||
483 | "Byte", | ||
484 | "IntNumber", | ||
485 | "FloatNumber", | ||
486 | "TrueKw", | ||
487 | "FalseKw", | ||
488 | ] | ||
489 | ), | ||
490 | "Literal": (options: ["LiteralExpr"]), | ||
491 | 467 | ||
492 | "Expr": ( | 468 | "Expr": ( |
493 | enum: [ | 469 | enum: [ |
@@ -580,14 +556,11 @@ Grammar( | |||
580 | ), | 556 | ), |
581 | "TypeParam": ( traits: ["NameOwner", "AttrsOwner", "TypeBoundsOwner"] ), | 557 | "TypeParam": ( traits: ["NameOwner", "AttrsOwner", "TypeBoundsOwner"] ), |
582 | "LifetimeParam": ( | 558 | "LifetimeParam": ( |
583 | options: [ "Lifetime"], | ||
584 | traits: ["AttrsOwner"], | 559 | traits: ["AttrsOwner"], |
585 | ), | 560 | ), |
586 | "Lifetime": ( traits: ["AstToken"] ), | ||
587 | "TypeBound": ( | 561 | "TypeBound": ( |
588 | options: [ | 562 | options: [ |
589 | "TypeRef", | 563 | "TypeRef", |
590 | "Lifetime", | ||
591 | ] | 564 | ] |
592 | ), | 565 | ), |
593 | "TypeBoundList": ( | 566 | "TypeBoundList": ( |
@@ -598,7 +571,6 @@ Grammar( | |||
598 | "WherePred": ( | 571 | "WherePred": ( |
599 | options: [ | 572 | options: [ |
600 | "TypeRef", | 573 | "TypeRef", |
601 | "Lifetime", | ||
602 | ], | 574 | ], |
603 | traits: [ | 575 | traits: [ |
604 | "TypeBoundsOwner", | 576 | "TypeBoundsOwner", |
@@ -643,12 +615,10 @@ Grammar( | |||
643 | ] | 615 | ] |
644 | ), | 616 | ), |
645 | "SelfParam": ( | 617 | "SelfParam": ( |
646 | options: ["SelfKw"], | ||
647 | traits: [ | 618 | traits: [ |
648 | "TypeAscriptionOwner", | 619 | "TypeAscriptionOwner", |
649 | ] | 620 | ] |
650 | ), | 621 | ), |
651 | "SelfKw": (), | ||
652 | "Param": ( | 622 | "Param": ( |
653 | options: [ "Pat" ], | 623 | options: [ "Pat" ], |
654 | traits: [ | 624 | traits: [ |
@@ -692,8 +662,7 @@ Grammar( | |||
692 | ]), | 662 | ]), |
693 | "TypeArg": (options: ["TypeRef"]), | 663 | "TypeArg": (options: ["TypeRef"]), |
694 | "AssocTypeArg": (options: ["NameRef", "TypeRef"]), | 664 | "AssocTypeArg": (options: ["NameRef", "TypeRef"]), |
695 | "LifetimeArg": (options: ["Lifetime"]), | 665 | "LifetimeArg": (), |
696 | "Comment": ( traits: ["AstToken"] ), | ||
697 | "Whitespace": ( traits: ["AstToken"] ), | 666 | "Whitespace": ( traits: ["AstToken"] ), |
698 | }, | 667 | }, |
699 | ) | 668 | ) |
diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs index 4f3020440..e1088e296 100644 --- a/crates/ra_syntax/src/lib.rs +++ b/crates/ra_syntax/src/lib.rs | |||
@@ -38,7 +38,7 @@ pub use crate::{ | |||
38 | ast::AstNode, | 38 | ast::AstNode, |
39 | syntax_error::{SyntaxError, SyntaxErrorKind, Location}, | 39 | syntax_error::{SyntaxError, SyntaxErrorKind, Location}, |
40 | syntax_text::SyntaxText, | 40 | syntax_text::SyntaxText, |
41 | syntax_node::{Direction, SyntaxNode, WalkEvent, TreeArc, SyntaxTreeBuilder}, | 41 | syntax_node::{Direction, SyntaxNode, WalkEvent, TreeArc, SyntaxTreeBuilder, SyntaxElement, SyntaxToken}, |
42 | ptr::{SyntaxNodePtr, AstPtr}, | 42 | ptr::{SyntaxNodePtr, AstPtr}, |
43 | parsing::{tokenize, Token}, | 43 | parsing::{tokenize, Token}, |
44 | }; | 44 | }; |
@@ -70,7 +70,7 @@ impl SourceFile { | |||
70 | 70 | ||
71 | pub fn incremental_reparse(&self, edit: &AtomTextEdit) -> Option<TreeArc<SourceFile>> { | 71 | pub fn incremental_reparse(&self, edit: &AtomTextEdit) -> Option<TreeArc<SourceFile>> { |
72 | parsing::incremental_reparse(self.syntax(), edit, self.errors()) | 72 | parsing::incremental_reparse(self.syntax(), edit, self.errors()) |
73 | .map(|(green_node, errors)| SourceFile::new(green_node, errors)) | 73 | .map(|(green_node, errors, _reparsed_range)| SourceFile::new(green_node, errors)) |
74 | } | 74 | } |
75 | 75 | ||
76 | fn full_reparse(&self, edit: &AtomTextEdit) -> TreeArc<SourceFile> { | 76 | fn full_reparse(&self, edit: &AtomTextEdit) -> TreeArc<SourceFile> { |
@@ -179,15 +179,23 @@ fn api_walkthrough() { | |||
179 | 179 | ||
180 | // There's a bunch of traversal methods on `SyntaxNode`: | 180 | // There's a bunch of traversal methods on `SyntaxNode`: |
181 | assert_eq!(expr_syntax.parent(), Some(block.syntax())); | 181 | assert_eq!(expr_syntax.parent(), Some(block.syntax())); |
182 | assert_eq!(block.syntax().first_child().map(|it| it.kind()), Some(SyntaxKind::L_CURLY)); | 182 | assert_eq!( |
183 | assert_eq!(expr_syntax.next_sibling().map(|it| it.kind()), Some(SyntaxKind::WHITESPACE)); | 183 | block.syntax().first_child_or_token().map(|it| it.kind()), |
184 | Some(SyntaxKind::L_CURLY) | ||
185 | ); | ||
186 | assert_eq!( | ||
187 | expr_syntax.next_sibling_or_token().map(|it| it.kind()), | ||
188 | Some(SyntaxKind::WHITESPACE) | ||
189 | ); | ||
184 | 190 | ||
185 | // As well as some iterator helpers: | 191 | // As well as some iterator helpers: |
186 | let f = expr_syntax.ancestors().find_map(ast::FnDef::cast); | 192 | let f = expr_syntax.ancestors().find_map(ast::FnDef::cast); |
187 | assert_eq!(f, Some(&*func)); | 193 | assert_eq!(f, Some(&*func)); |
188 | assert!(expr_syntax.siblings(Direction::Next).any(|it| it.kind() == SyntaxKind::R_CURLY)); | 194 | assert!(expr_syntax |
195 | .siblings_with_tokens(Direction::Next) | ||
196 | .any(|it| it.kind() == SyntaxKind::R_CURLY)); | ||
189 | assert_eq!( | 197 | assert_eq!( |
190 | expr_syntax.descendants().count(), | 198 | expr_syntax.descendants_with_tokens().count(), |
191 | 8, // 5 tokens `1`, ` `, `+`, ` `, `!` | 199 | 8, // 5 tokens `1`, ` `, `+`, ` `, `!` |
192 | // 2 child literal expressions: `1`, `1` | 200 | // 2 child literal expressions: `1`, `1` |
193 | // 1 the node itself: `1 + 1` | 201 | // 1 the node itself: `1 + 1` |
@@ -196,16 +204,14 @@ fn api_walkthrough() { | |||
196 | // There's also a `preorder` method with a more fine-grained iteration control: | 204 | // There's also a `preorder` method with a more fine-grained iteration control: |
197 | let mut buf = String::new(); | 205 | let mut buf = String::new(); |
198 | let mut indent = 0; | 206 | let mut indent = 0; |
199 | for event in expr_syntax.preorder() { | 207 | for event in expr_syntax.preorder_with_tokens() { |
200 | match event { | 208 | match event { |
201 | WalkEvent::Enter(node) => { | 209 | WalkEvent::Enter(node) => { |
202 | buf += &format!( | 210 | let text = match node { |
203 | "{:indent$}{:?} {:?}\n", | 211 | SyntaxElement::Node(it) => it.text().to_string(), |
204 | " ", | 212 | SyntaxElement::Token(it) => it.text().to_string(), |
205 | node.text(), | 213 | }; |
206 | node.kind(), | 214 | buf += &format!("{:indent$}{:?} {:?}\n", " ", text, node.kind(), indent = indent); |
207 | indent = indent | ||
208 | ); | ||
209 | indent += 2; | 215 | indent += 2; |
210 | } | 216 | } |
211 | WalkEvent::Leave(_) => indent -= 2, | 217 | WalkEvent::Leave(_) => indent -= 2, |
diff --git a/crates/ra_syntax/src/parsing/reparsing.rs b/crates/ra_syntax/src/parsing/reparsing.rs index 7e7f914f5..69887f500 100644 --- a/crates/ra_syntax/src/parsing/reparsing.rs +++ b/crates/ra_syntax/src/parsing/reparsing.rs | |||
@@ -12,7 +12,7 @@ use ra_parser::Reparser; | |||
12 | use crate::{ | 12 | use crate::{ |
13 | SyntaxKind::*, TextRange, TextUnit, SyntaxError, | 13 | SyntaxKind::*, TextRange, TextUnit, SyntaxError, |
14 | algo, | 14 | algo, |
15 | syntax_node::{GreenNode, SyntaxNode}, | 15 | syntax_node::{GreenNode, SyntaxNode, GreenToken, SyntaxElement}, |
16 | parsing::{ | 16 | parsing::{ |
17 | text_token_source::TextTokenSource, | 17 | text_token_source::TextTokenSource, |
18 | text_tree_sink::TextTreeSink, | 18 | text_tree_sink::TextTreeSink, |
@@ -24,60 +24,62 @@ pub(crate) fn incremental_reparse( | |||
24 | node: &SyntaxNode, | 24 | node: &SyntaxNode, |
25 | edit: &AtomTextEdit, | 25 | edit: &AtomTextEdit, |
26 | errors: Vec<SyntaxError>, | 26 | errors: Vec<SyntaxError>, |
27 | ) -> Option<(GreenNode, Vec<SyntaxError>)> { | 27 | ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { |
28 | let (node, green, new_errors) = | 28 | if let Some((green, old_range)) = reparse_token(node, &edit) { |
29 | reparse_leaf(node, &edit).or_else(|| reparse_block(node, &edit))?; | 29 | return Some((green, merge_errors(errors, Vec::new(), old_range, edit), old_range)); |
30 | let green_root = node.replace_with(green); | 30 | } |
31 | let errors = merge_errors(errors, new_errors, node, edit); | 31 | |
32 | Some((green_root, errors)) | 32 | if let Some((green, new_errors, old_range)) = reparse_block(node, &edit) { |
33 | return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); | ||
34 | } | ||
35 | None | ||
33 | } | 36 | } |
34 | 37 | ||
35 | fn reparse_leaf<'node>( | 38 | fn reparse_token<'node>( |
36 | root: &'node SyntaxNode, | 39 | root: &'node SyntaxNode, |
37 | edit: &AtomTextEdit, | 40 | edit: &AtomTextEdit, |
38 | ) -> Option<(&'node SyntaxNode, GreenNode, Vec<SyntaxError>)> { | 41 | ) -> Option<(GreenNode, TextRange)> { |
39 | let node = algo::find_covering_node(root, edit.delete); | 42 | let token = algo::find_covering_element(root, edit.delete).as_token()?; |
40 | match node.kind() { | 43 | match token.kind() { |
41 | WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => { | 44 | WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => { |
42 | if node.kind() == WHITESPACE || node.kind() == COMMENT { | 45 | if token.kind() == WHITESPACE || token.kind() == COMMENT { |
43 | // removing a new line may extends previous token | 46 | // removing a new line may extends previous token |
44 | if node.text().to_string()[edit.delete - node.range().start()].contains('\n') { | 47 | if token.text().to_string()[edit.delete - token.range().start()].contains('\n') { |
45 | return None; | 48 | return None; |
46 | } | 49 | } |
47 | } | 50 | } |
48 | 51 | ||
49 | let text = get_text_after_edit(node, &edit); | 52 | let text = get_text_after_edit(token.into(), &edit); |
50 | let tokens = tokenize(&text); | 53 | let lex_tokens = tokenize(&text); |
51 | let token = match tokens[..] { | 54 | let lex_token = match lex_tokens[..] { |
52 | [token] if token.kind == node.kind() => token, | 55 | [lex_token] if lex_token.kind == token.kind() => lex_token, |
53 | _ => return None, | 56 | _ => return None, |
54 | }; | 57 | }; |
55 | 58 | ||
56 | if token.kind == IDENT && is_contextual_kw(&text) { | 59 | if lex_token.kind == IDENT && is_contextual_kw(&text) { |
57 | return None; | 60 | return None; |
58 | } | 61 | } |
59 | 62 | ||
60 | if let Some(next_char) = root.text().char_at(node.range().end()) { | 63 | if let Some(next_char) = root.text().char_at(token.range().end()) { |
61 | let tokens_with_next_char = tokenize(&format!("{}{}", text, next_char)); | 64 | let tokens_with_next_char = tokenize(&format!("{}{}", text, next_char)); |
62 | if tokens_with_next_char.len() == 1 { | 65 | if tokens_with_next_char.len() == 1 { |
63 | return None; | 66 | return None; |
64 | } | 67 | } |
65 | } | 68 | } |
66 | 69 | ||
67 | let green = GreenNode::new_leaf(node.kind(), text.into()); | 70 | let new_token = GreenToken::new(token.kind(), text.into()); |
68 | let new_errors = vec![]; | 71 | Some((token.replace_with(new_token), token.range())) |
69 | Some((node, green, new_errors)) | ||
70 | } | 72 | } |
71 | _ => None, | 73 | _ => None, |
72 | } | 74 | } |
73 | } | 75 | } |
74 | 76 | ||
75 | fn reparse_block<'node>( | 77 | fn reparse_block<'node>( |
76 | node: &'node SyntaxNode, | 78 | root: &'node SyntaxNode, |
77 | edit: &AtomTextEdit, | 79 | edit: &AtomTextEdit, |
78 | ) -> Option<(&'node SyntaxNode, GreenNode, Vec<SyntaxError>)> { | 80 | ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { |
79 | let (node, reparser) = find_reparsable_node(node, edit.delete)?; | 81 | let (node, reparser) = find_reparsable_node(root, edit.delete)?; |
80 | let text = get_text_after_edit(node, &edit); | 82 | let text = get_text_after_edit(node.into(), &edit); |
81 | let tokens = tokenize(&text); | 83 | let tokens = tokenize(&text); |
82 | if !is_balanced(&tokens) { | 84 | if !is_balanced(&tokens) { |
83 | return None; | 85 | return None; |
@@ -86,12 +88,16 @@ fn reparse_block<'node>( | |||
86 | let mut tree_sink = TextTreeSink::new(&text, &tokens); | 88 | let mut tree_sink = TextTreeSink::new(&text, &tokens); |
87 | reparser.parse(&token_source, &mut tree_sink); | 89 | reparser.parse(&token_source, &mut tree_sink); |
88 | let (green, new_errors) = tree_sink.finish(); | 90 | let (green, new_errors) = tree_sink.finish(); |
89 | Some((node, green, new_errors)) | 91 | Some((node.replace_with(green), new_errors, node.range())) |
90 | } | 92 | } |
91 | 93 | ||
92 | fn get_text_after_edit(node: &SyntaxNode, edit: &AtomTextEdit) -> String { | 94 | fn get_text_after_edit(element: SyntaxElement, edit: &AtomTextEdit) -> String { |
93 | let edit = AtomTextEdit::replace(edit.delete - node.range().start(), edit.insert.clone()); | 95 | let edit = AtomTextEdit::replace(edit.delete - element.range().start(), edit.insert.clone()); |
94 | edit.apply(node.text().to_string()) | 96 | let text = match element { |
97 | SyntaxElement::Token(token) => token.text().to_string(), | ||
98 | SyntaxElement::Node(node) => node.text().to_string(), | ||
99 | }; | ||
100 | edit.apply(text) | ||
95 | } | 101 | } |
96 | 102 | ||
97 | fn is_contextual_kw(text: &str) -> bool { | 103 | fn is_contextual_kw(text: &str) -> bool { |
@@ -102,9 +108,13 @@ fn is_contextual_kw(text: &str) -> bool { | |||
102 | } | 108 | } |
103 | 109 | ||
104 | fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(&SyntaxNode, Reparser)> { | 110 | fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(&SyntaxNode, Reparser)> { |
105 | let node = algo::find_covering_node(node, range); | 111 | let node = algo::find_covering_element(node, range); |
106 | node.ancestors().find_map(|node| { | 112 | let mut ancestors = match node { |
107 | let first_child = node.first_child().map(|it| it.kind()); | 113 | SyntaxElement::Token(it) => it.parent().ancestors(), |
114 | SyntaxElement::Node(it) => it.ancestors(), | ||
115 | }; | ||
116 | ancestors.find_map(|node| { | ||
117 | let first_child = node.first_child_or_token().map(|it| it.kind()); | ||
108 | let parent = node.parent().map(|it| it.kind()); | 118 | let parent = node.parent().map(|it| it.kind()); |
109 | Reparser::for_node(node.kind(), first_child, parent).map(|r| (node, r)) | 119 | Reparser::for_node(node.kind(), first_child, parent).map(|r| (node, r)) |
110 | }) | 120 | }) |
@@ -136,19 +146,19 @@ fn is_balanced(tokens: &[Token]) -> bool { | |||
136 | fn merge_errors( | 146 | fn merge_errors( |
137 | old_errors: Vec<SyntaxError>, | 147 | old_errors: Vec<SyntaxError>, |
138 | new_errors: Vec<SyntaxError>, | 148 | new_errors: Vec<SyntaxError>, |
139 | old_node: &SyntaxNode, | 149 | old_range: TextRange, |
140 | edit: &AtomTextEdit, | 150 | edit: &AtomTextEdit, |
141 | ) -> Vec<SyntaxError> { | 151 | ) -> Vec<SyntaxError> { |
142 | let mut res = Vec::new(); | 152 | let mut res = Vec::new(); |
143 | for e in old_errors { | 153 | for e in old_errors { |
144 | if e.offset() <= old_node.range().start() { | 154 | if e.offset() <= old_range.start() { |
145 | res.push(e) | 155 | res.push(e) |
146 | } else if e.offset() >= old_node.range().end() { | 156 | } else if e.offset() >= old_range.end() { |
147 | res.push(e.add_offset(TextUnit::of_str(&edit.insert), edit.delete.len())); | 157 | res.push(e.add_offset(TextUnit::of_str(&edit.insert), edit.delete.len())); |
148 | } | 158 | } |
149 | } | 159 | } |
150 | for e in new_errors { | 160 | for e in new_errors { |
151 | res.push(e.add_offset(old_node.range().start(), 0.into())); | 161 | res.push(e.add_offset(old_range.start(), 0.into())); |
152 | } | 162 | } |
153 | res | 163 | res |
154 | } | 164 | } |
@@ -160,13 +170,7 @@ mod tests { | |||
160 | use crate::{SourceFile, AstNode}; | 170 | use crate::{SourceFile, AstNode}; |
161 | use super::*; | 171 | use super::*; |
162 | 172 | ||
163 | fn do_check<F>(before: &str, replace_with: &str, reparser: F) | 173 | fn do_check(before: &str, replace_with: &str, reparsed_len: u32) { |
164 | where | ||
165 | for<'a> F: Fn( | ||
166 | &'a SyntaxNode, | ||
167 | &AtomTextEdit, | ||
168 | ) -> Option<(&'a SyntaxNode, GreenNode, Vec<SyntaxError>)>, | ||
169 | { | ||
170 | let (range, before) = extract_range(before); | 174 | let (range, before) = extract_range(before); |
171 | let edit = AtomTextEdit::replace(range, replace_with.to_owned()); | 175 | let edit = AtomTextEdit::replace(range, replace_with.to_owned()); |
172 | let after = edit.apply(before.clone()); | 176 | let after = edit.apply(before.clone()); |
@@ -175,23 +179,20 @@ mod tests { | |||
175 | let incrementally_reparsed = { | 179 | let incrementally_reparsed = { |
176 | let f = SourceFile::parse(&before); | 180 | let f = SourceFile::parse(&before); |
177 | let edit = AtomTextEdit { delete: range, insert: replace_with.to_string() }; | 181 | let edit = AtomTextEdit { delete: range, insert: replace_with.to_string() }; |
178 | let (node, green, new_errors) = | 182 | let (green, new_errors, range) = |
179 | reparser(f.syntax(), &edit).expect("cannot incrementally reparse"); | 183 | incremental_reparse(f.syntax(), &edit, f.errors()).unwrap(); |
180 | let green_root = node.replace_with(green); | 184 | assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length"); |
181 | let errors = super::merge_errors(f.errors(), new_errors, node, &edit); | 185 | SourceFile::new(green, new_errors) |
182 | SourceFile::new(green_root, errors) | ||
183 | }; | 186 | }; |
184 | 187 | ||
185 | assert_eq_text!( | 188 | assert_eq_text!( |
186 | &fully_reparsed.syntax().debug_dump(), | 189 | &fully_reparsed.syntax().debug_dump(), |
187 | &incrementally_reparsed.syntax().debug_dump(), | 190 | &incrementally_reparsed.syntax().debug_dump(), |
188 | ) | 191 | ); |
189 | } | 192 | } |
190 | 193 | ||
191 | #[test] | 194 | #[test] // FIXME: some test here actually test token reparsing |
192 | fn reparse_block_tests() { | 195 | fn reparse_block_tests() { |
193 | let do_check = |before, replace_to| do_check(before, replace_to, reparse_block); | ||
194 | |||
195 | do_check( | 196 | do_check( |
196 | r" | 197 | r" |
197 | fn foo() { | 198 | fn foo() { |
@@ -199,6 +200,7 @@ fn foo() { | |||
199 | } | 200 | } |
200 | ", | 201 | ", |
201 | "baz", | 202 | "baz", |
203 | 3, | ||
202 | ); | 204 | ); |
203 | do_check( | 205 | do_check( |
204 | r" | 206 | r" |
@@ -207,6 +209,7 @@ fn foo() { | |||
207 | } | 209 | } |
208 | ", | 210 | ", |
209 | "baz", | 211 | "baz", |
212 | 25, | ||
210 | ); | 213 | ); |
211 | do_check( | 214 | do_check( |
212 | r" | 215 | r" |
@@ -215,6 +218,7 @@ struct Foo { | |||
215 | } | 218 | } |
216 | ", | 219 | ", |
217 | ",\n g: (),", | 220 | ",\n g: (),", |
221 | 14, | ||
218 | ); | 222 | ); |
219 | do_check( | 223 | do_check( |
220 | r" | 224 | r" |
@@ -225,6 +229,7 @@ fn foo { | |||
225 | } | 229 | } |
226 | ", | 230 | ", |
227 | "62", | 231 | "62", |
232 | 31, // FIXME: reparse only int literal here | ||
228 | ); | 233 | ); |
229 | do_check( | 234 | do_check( |
230 | r" | 235 | r" |
@@ -233,7 +238,9 @@ mod foo { | |||
233 | } | 238 | } |
234 | ", | 239 | ", |
235 | "bar", | 240 | "bar", |
241 | 11, | ||
236 | ); | 242 | ); |
243 | |||
237 | do_check( | 244 | do_check( |
238 | r" | 245 | r" |
239 | trait Foo { | 246 | trait Foo { |
@@ -241,6 +248,7 @@ trait Foo { | |||
241 | } | 248 | } |
242 | ", | 249 | ", |
243 | "Output", | 250 | "Output", |
251 | 3, | ||
244 | ); | 252 | ); |
245 | do_check( | 253 | do_check( |
246 | r" | 254 | r" |
@@ -249,13 +257,9 @@ impl IntoIterator<Item=i32> for Foo { | |||
249 | } | 257 | } |
250 | ", | 258 | ", |
251 | "n next(", | 259 | "n next(", |
260 | 9, | ||
252 | ); | 261 | ); |
253 | do_check( | 262 | do_check(r"use a::b::{foo,<|>,bar<|>};", "baz", 10); |
254 | r" | ||
255 | use a::b::{foo,<|>,bar<|>}; | ||
256 | ", | ||
257 | "baz", | ||
258 | ); | ||
259 | do_check( | 263 | do_check( |
260 | r" | 264 | r" |
261 | pub enum A { | 265 | pub enum A { |
@@ -263,12 +267,14 @@ pub enum A { | |||
263 | } | 267 | } |
264 | ", | 268 | ", |
265 | "\nBar;\n", | 269 | "\nBar;\n", |
270 | 11, | ||
266 | ); | 271 | ); |
267 | do_check( | 272 | do_check( |
268 | r" | 273 | r" |
269 | foo!{a, b<|><|> d} | 274 | foo!{a, b<|><|> d} |
270 | ", | 275 | ", |
271 | ", c[3]", | 276 | ", c[3]", |
277 | 8, | ||
272 | ); | 278 | ); |
273 | do_check( | 279 | do_check( |
274 | r" | 280 | r" |
@@ -277,6 +283,7 @@ fn foo() { | |||
277 | } | 283 | } |
278 | ", | 284 | ", |
279 | "123", | 285 | "123", |
286 | 14, | ||
280 | ); | 287 | ); |
281 | do_check( | 288 | do_check( |
282 | r" | 289 | r" |
@@ -285,54 +292,60 @@ extern { | |||
285 | } | 292 | } |
286 | ", | 293 | ", |
287 | " exit(code: c_int)", | 294 | " exit(code: c_int)", |
295 | 11, | ||
288 | ); | 296 | ); |
289 | } | 297 | } |
290 | 298 | ||
291 | #[test] | 299 | #[test] |
292 | fn reparse_leaf_tests() { | 300 | fn reparse_token_tests() { |
293 | let do_check = |before, replace_to| do_check(before, replace_to, reparse_leaf); | ||
294 | |||
295 | do_check( | 301 | do_check( |
296 | r"<|><|> | 302 | r"<|><|> |
297 | fn foo() -> i32 { 1 } | 303 | fn foo() -> i32 { 1 } |
298 | ", | 304 | ", |
299 | "\n\n\n \n", | 305 | "\n\n\n \n", |
306 | 1, | ||
300 | ); | 307 | ); |
301 | do_check( | 308 | do_check( |
302 | r" | 309 | r" |
303 | fn foo() -> <|><|> {} | 310 | fn foo() -> <|><|> {} |
304 | ", | 311 | ", |
305 | " \n", | 312 | " \n", |
313 | 2, | ||
306 | ); | 314 | ); |
307 | do_check( | 315 | do_check( |
308 | r" | 316 | r" |
309 | fn <|>foo<|>() -> i32 { 1 } | 317 | fn <|>foo<|>() -> i32 { 1 } |
310 | ", | 318 | ", |
311 | "bar", | 319 | "bar", |
320 | 3, | ||
312 | ); | 321 | ); |
313 | do_check( | 322 | do_check( |
314 | r" | 323 | r" |
315 | fn foo<|><|>foo() { } | 324 | fn foo<|><|>foo() { } |
316 | ", | 325 | ", |
317 | "bar", | 326 | "bar", |
327 | 6, | ||
318 | ); | 328 | ); |
319 | do_check( | 329 | do_check( |
320 | r" | 330 | r" |
321 | fn foo /* <|><|> */ () {} | 331 | fn foo /* <|><|> */ () {} |
322 | ", | 332 | ", |
323 | "some comment", | 333 | "some comment", |
334 | 6, | ||
324 | ); | 335 | ); |
325 | do_check( | 336 | do_check( |
326 | r" | 337 | r" |
327 | fn baz <|><|> () {} | 338 | fn baz <|><|> () {} |
328 | ", | 339 | ", |
329 | " \t\t\n\n", | 340 | " \t\t\n\n", |
341 | 2, | ||
330 | ); | 342 | ); |
331 | do_check( | 343 | do_check( |
332 | r" | 344 | r" |
333 | fn baz <|><|> () {} | 345 | fn baz <|><|> () {} |
334 | ", | 346 | ", |
335 | " \t\t\n\n", | 347 | " \t\t\n\n", |
348 | 2, | ||
336 | ); | 349 | ); |
337 | do_check( | 350 | do_check( |
338 | r" | 351 | r" |
@@ -340,24 +353,28 @@ fn baz <|><|> () {} | |||
340 | mod { } | 353 | mod { } |
341 | ", | 354 | ", |
342 | "c", | 355 | "c", |
356 | 14, | ||
343 | ); | 357 | ); |
344 | do_check( | 358 | do_check( |
345 | r#" | 359 | r#" |
346 | fn -> &str { "Hello<|><|>" } | 360 | fn -> &str { "Hello<|><|>" } |
347 | "#, | 361 | "#, |
348 | ", world", | 362 | ", world", |
363 | 7, | ||
349 | ); | 364 | ); |
350 | do_check( | 365 | do_check( |
351 | r#" | 366 | r#" |
352 | fn -> &str { // "Hello<|><|>" | 367 | fn -> &str { // "Hello<|><|>" |
353 | "#, | 368 | "#, |
354 | ", world", | 369 | ", world", |
370 | 10, | ||
355 | ); | 371 | ); |
356 | do_check( | 372 | do_check( |
357 | r##" | 373 | r##" |
358 | fn -> &str { r#"Hello<|><|>"# | 374 | fn -> &str { r#"Hello<|><|>"# |
359 | "##, | 375 | "##, |
360 | ", world", | 376 | ", world", |
377 | 10, | ||
361 | ); | 378 | ); |
362 | do_check( | 379 | do_check( |
363 | r" | 380 | r" |
@@ -367,6 +384,7 @@ enum Foo { | |||
367 | } | 384 | } |
368 | ", | 385 | ", |
369 | "Clone", | 386 | "Clone", |
387 | 4, | ||
370 | ); | 388 | ); |
371 | } | 389 | } |
372 | } | 390 | } |
diff --git a/crates/ra_syntax/src/parsing/text_tree_sink.rs b/crates/ra_syntax/src/parsing/text_tree_sink.rs index b17d06c61..71fc515f2 100644 --- a/crates/ra_syntax/src/parsing/text_tree_sink.rs +++ b/crates/ra_syntax/src/parsing/text_tree_sink.rs | |||
@@ -28,10 +28,10 @@ enum State { | |||
28 | } | 28 | } |
29 | 29 | ||
30 | impl<'a> TreeSink for TextTreeSink<'a> { | 30 | impl<'a> TreeSink for TextTreeSink<'a> { |
31 | fn leaf(&mut self, kind: SyntaxKind, n_tokens: u8) { | 31 | fn token(&mut self, kind: SyntaxKind, n_tokens: u8) { |
32 | match mem::replace(&mut self.state, State::Normal) { | 32 | match mem::replace(&mut self.state, State::Normal) { |
33 | State::PendingStart => unreachable!(), | 33 | State::PendingStart => unreachable!(), |
34 | State::PendingFinish => self.inner.finish_branch(), | 34 | State::PendingFinish => self.inner.finish_node(), |
35 | State::Normal => (), | 35 | State::Normal => (), |
36 | } | 36 | } |
37 | self.eat_trivias(); | 37 | self.eat_trivias(); |
@@ -40,18 +40,18 @@ impl<'a> TreeSink for TextTreeSink<'a> { | |||
40 | .iter() | 40 | .iter() |
41 | .map(|it| it.len) | 41 | .map(|it| it.len) |
42 | .sum::<TextUnit>(); | 42 | .sum::<TextUnit>(); |
43 | self.do_leaf(kind, len, n_tokens); | 43 | self.do_token(kind, len, n_tokens); |
44 | } | 44 | } |
45 | 45 | ||
46 | fn start_branch(&mut self, kind: SyntaxKind) { | 46 | fn start_node(&mut self, kind: SyntaxKind) { |
47 | match mem::replace(&mut self.state, State::Normal) { | 47 | match mem::replace(&mut self.state, State::Normal) { |
48 | State::PendingStart => { | 48 | State::PendingStart => { |
49 | self.inner.start_branch(kind); | 49 | self.inner.start_node(kind); |
50 | // No need to attach trivias to previous node: there is no | 50 | // No need to attach trivias to previous node: there is no |
51 | // previous node. | 51 | // previous node. |
52 | return; | 52 | return; |
53 | } | 53 | } |
54 | State::PendingFinish => self.inner.finish_branch(), | 54 | State::PendingFinish => self.inner.finish_node(), |
55 | State::Normal => (), | 55 | State::Normal => (), |
56 | } | 56 | } |
57 | 57 | ||
@@ -71,14 +71,14 @@ impl<'a> TreeSink for TextTreeSink<'a> { | |||
71 | n_attached_trivias(kind, leading_trivias) | 71 | n_attached_trivias(kind, leading_trivias) |
72 | }; | 72 | }; |
73 | self.eat_n_trivias(n_trivias - n_attached_trivias); | 73 | self.eat_n_trivias(n_trivias - n_attached_trivias); |
74 | self.inner.start_branch(kind); | 74 | self.inner.start_node(kind); |
75 | self.eat_n_trivias(n_attached_trivias); | 75 | self.eat_n_trivias(n_attached_trivias); |
76 | } | 76 | } |
77 | 77 | ||
78 | fn finish_branch(&mut self) { | 78 | fn finish_node(&mut self) { |
79 | match mem::replace(&mut self.state, State::PendingFinish) { | 79 | match mem::replace(&mut self.state, State::PendingFinish) { |
80 | State::PendingStart => unreachable!(), | 80 | State::PendingStart => unreachable!(), |
81 | State::PendingFinish => self.inner.finish_branch(), | 81 | State::PendingFinish => self.inner.finish_node(), |
82 | State::Normal => (), | 82 | State::Normal => (), |
83 | } | 83 | } |
84 | } | 84 | } |
@@ -104,7 +104,7 @@ impl<'a> TextTreeSink<'a> { | |||
104 | match mem::replace(&mut self.state, State::Normal) { | 104 | match mem::replace(&mut self.state, State::Normal) { |
105 | State::PendingFinish => { | 105 | State::PendingFinish => { |
106 | self.eat_trivias(); | 106 | self.eat_trivias(); |
107 | self.inner.finish_branch() | 107 | self.inner.finish_node() |
108 | } | 108 | } |
109 | State::PendingStart | State::Normal => unreachable!(), | 109 | State::PendingStart | State::Normal => unreachable!(), |
110 | } | 110 | } |
@@ -117,7 +117,7 @@ impl<'a> TextTreeSink<'a> { | |||
117 | if !token.kind.is_trivia() { | 117 | if !token.kind.is_trivia() { |
118 | break; | 118 | break; |
119 | } | 119 | } |
120 | self.do_leaf(token.kind, token.len, 1); | 120 | self.do_token(token.kind, token.len, 1); |
121 | } | 121 | } |
122 | } | 122 | } |
123 | 123 | ||
@@ -125,16 +125,16 @@ impl<'a> TextTreeSink<'a> { | |||
125 | for _ in 0..n { | 125 | for _ in 0..n { |
126 | let token = self.tokens[self.token_pos]; | 126 | let token = self.tokens[self.token_pos]; |
127 | assert!(token.kind.is_trivia()); | 127 | assert!(token.kind.is_trivia()); |
128 | self.do_leaf(token.kind, token.len, 1); | 128 | self.do_token(token.kind, token.len, 1); |
129 | } | 129 | } |
130 | } | 130 | } |
131 | 131 | ||
132 | fn do_leaf(&mut self, kind: SyntaxKind, len: TextUnit, n_tokens: usize) { | 132 | fn do_token(&mut self, kind: SyntaxKind, len: TextUnit, n_tokens: usize) { |
133 | let range = TextRange::offset_len(self.text_pos, len); | 133 | let range = TextRange::offset_len(self.text_pos, len); |
134 | let text: SmolStr = self.text[range].into(); | 134 | let text: SmolStr = self.text[range].into(); |
135 | self.text_pos += len; | 135 | self.text_pos += len; |
136 | self.token_pos += n_tokens; | 136 | self.token_pos += n_tokens; |
137 | self.inner.leaf(kind, text); | 137 | self.inner.token(kind, text); |
138 | } | 138 | } |
139 | } | 139 | } |
140 | 140 | ||
diff --git a/crates/ra_syntax/src/syntax_node.rs b/crates/ra_syntax/src/syntax_node.rs index e5b4cdb11..be181d0ae 100644 --- a/crates/ra_syntax/src/syntax_node.rs +++ b/crates/ra_syntax/src/syntax_node.rs | |||
@@ -29,6 +29,9 @@ impl Types for RaTypes { | |||
29 | } | 29 | } |
30 | 30 | ||
31 | pub(crate) type GreenNode = rowan::GreenNode<RaTypes>; | 31 | pub(crate) type GreenNode = rowan::GreenNode<RaTypes>; |
32 | pub(crate) type GreenToken = rowan::GreenToken<RaTypes>; | ||
33 | #[allow(unused)] | ||
34 | pub(crate) type GreenElement = rowan::GreenElement<RaTypes>; | ||
32 | 35 | ||
33 | /// Marker trait for CST and AST nodes | 36 | /// Marker trait for CST and AST nodes |
34 | pub trait SyntaxNodeWrapper: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>> {} | 37 | pub trait SyntaxNodeWrapper: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>> {} |
@@ -113,11 +116,13 @@ impl ToOwned for SyntaxNode { | |||
113 | 116 | ||
114 | impl fmt::Debug for SyntaxNode { | 117 | impl fmt::Debug for SyntaxNode { |
115 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | 118 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { |
116 | write!(fmt, "{:?}@{:?}", self.kind(), self.range())?; | 119 | write!(fmt, "{:?}@{:?}", self.kind(), self.range()) |
117 | if has_short_text(self.kind()) { | 120 | } |
118 | write!(fmt, " \"{}\"", self.text())?; | 121 | } |
119 | } | 122 | |
120 | Ok(()) | 123 | impl fmt::Display for SyntaxNode { |
124 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | ||
125 | fmt::Display::fmt(&self.text(), fmt) | ||
121 | } | 126 | } |
122 | } | 127 | } |
123 | 128 | ||
@@ -145,14 +150,6 @@ impl SyntaxNode { | |||
145 | SyntaxText::new(self) | 150 | SyntaxText::new(self) |
146 | } | 151 | } |
147 | 152 | ||
148 | pub fn is_leaf(&self) -> bool { | ||
149 | self.0.is_leaf() | ||
150 | } | ||
151 | |||
152 | pub fn leaf_text(&self) -> Option<&SmolStr> { | ||
153 | self.0.leaf_text() | ||
154 | } | ||
155 | |||
156 | pub fn parent(&self) -> Option<&SyntaxNode> { | 153 | pub fn parent(&self) -> Option<&SyntaxNode> { |
157 | self.0.parent().map(SyntaxNode::from_repr) | 154 | self.0.parent().map(SyntaxNode::from_repr) |
158 | } | 155 | } |
@@ -161,22 +158,50 @@ impl SyntaxNode { | |||
161 | self.0.first_child().map(SyntaxNode::from_repr) | 158 | self.0.first_child().map(SyntaxNode::from_repr) |
162 | } | 159 | } |
163 | 160 | ||
161 | pub fn first_child_or_token(&self) -> Option<SyntaxElement> { | ||
162 | self.0.first_child_or_token().map(SyntaxElement::from) | ||
163 | } | ||
164 | |||
164 | pub fn last_child(&self) -> Option<&SyntaxNode> { | 165 | pub fn last_child(&self) -> Option<&SyntaxNode> { |
165 | self.0.last_child().map(SyntaxNode::from_repr) | 166 | self.0.last_child().map(SyntaxNode::from_repr) |
166 | } | 167 | } |
167 | 168 | ||
169 | pub fn last_child_or_token(&self) -> Option<SyntaxElement> { | ||
170 | self.0.last_child_or_token().map(SyntaxElement::from) | ||
171 | } | ||
172 | |||
168 | pub fn next_sibling(&self) -> Option<&SyntaxNode> { | 173 | pub fn next_sibling(&self) -> Option<&SyntaxNode> { |
169 | self.0.next_sibling().map(SyntaxNode::from_repr) | 174 | self.0.next_sibling().map(SyntaxNode::from_repr) |
170 | } | 175 | } |
171 | 176 | ||
177 | pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> { | ||
178 | self.0.next_sibling_or_token().map(SyntaxElement::from) | ||
179 | } | ||
180 | |||
172 | pub fn prev_sibling(&self) -> Option<&SyntaxNode> { | 181 | pub fn prev_sibling(&self) -> Option<&SyntaxNode> { |
173 | self.0.prev_sibling().map(SyntaxNode::from_repr) | 182 | self.0.prev_sibling().map(SyntaxNode::from_repr) |
174 | } | 183 | } |
175 | 184 | ||
185 | pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> { | ||
186 | self.0.prev_sibling_or_token().map(SyntaxElement::from) | ||
187 | } | ||
188 | |||
176 | pub fn children(&self) -> SyntaxNodeChildren { | 189 | pub fn children(&self) -> SyntaxNodeChildren { |
177 | SyntaxNodeChildren(self.0.children()) | 190 | SyntaxNodeChildren(self.0.children()) |
178 | } | 191 | } |
179 | 192 | ||
193 | pub fn children_with_tokens(&self) -> SyntaxElementChildren { | ||
194 | SyntaxElementChildren(self.0.children_with_tokens()) | ||
195 | } | ||
196 | |||
197 | pub fn first_token(&self) -> Option<SyntaxToken> { | ||
198 | self.0.first_token().map(SyntaxToken::from) | ||
199 | } | ||
200 | |||
201 | pub fn last_token(&self) -> Option<SyntaxToken> { | ||
202 | self.0.last_token().map(SyntaxToken::from) | ||
203 | } | ||
204 | |||
180 | pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode> { | 205 | pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode> { |
181 | crate::algo::generate(Some(self), |&node| node.parent()) | 206 | crate::algo::generate(Some(self), |&node| node.parent()) |
182 | } | 207 | } |
@@ -188,6 +213,13 @@ impl SyntaxNode { | |||
188 | }) | 213 | }) |
189 | } | 214 | } |
190 | 215 | ||
216 | pub fn descendants_with_tokens(&self) -> impl Iterator<Item = SyntaxElement> { | ||
217 | self.preorder_with_tokens().filter_map(|event| match event { | ||
218 | WalkEvent::Enter(it) => Some(it), | ||
219 | WalkEvent::Leave(_) => None, | ||
220 | }) | ||
221 | } | ||
222 | |||
191 | pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = &SyntaxNode> { | 223 | pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = &SyntaxNode> { |
192 | crate::algo::generate(Some(self), move |&node| match direction { | 224 | crate::algo::generate(Some(self), move |&node| match direction { |
193 | Direction::Next => node.next_sibling(), | 225 | Direction::Next => node.next_sibling(), |
@@ -195,6 +227,17 @@ impl SyntaxNode { | |||
195 | }) | 227 | }) |
196 | } | 228 | } |
197 | 229 | ||
230 | pub fn siblings_with_tokens( | ||
231 | &self, | ||
232 | direction: Direction, | ||
233 | ) -> impl Iterator<Item = SyntaxElement> { | ||
234 | let me: SyntaxElement = self.into(); | ||
235 | crate::algo::generate(Some(me), move |el| match direction { | ||
236 | Direction::Next => el.next_sibling_or_token(), | ||
237 | Direction::Prev => el.prev_sibling_or_token(), | ||
238 | }) | ||
239 | } | ||
240 | |||
198 | pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<&SyntaxNode>> { | 241 | pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<&SyntaxNode>> { |
199 | self.0.preorder().map(|event| match event { | 242 | self.0.preorder().map(|event| match event { |
200 | WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxNode::from_repr(n)), | 243 | WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxNode::from_repr(n)), |
@@ -202,6 +245,13 @@ impl SyntaxNode { | |||
202 | }) | 245 | }) |
203 | } | 246 | } |
204 | 247 | ||
248 | pub fn preorder_with_tokens(&self) -> impl Iterator<Item = WalkEvent<SyntaxElement>> { | ||
249 | self.0.preorder_with_tokens().map(|event| match event { | ||
250 | WalkEvent::Enter(n) => WalkEvent::Enter(n.into()), | ||
251 | WalkEvent::Leave(n) => WalkEvent::Leave(n.into()), | ||
252 | }) | ||
253 | } | ||
254 | |||
205 | pub fn memory_size_of_subtree(&self) -> usize { | 255 | pub fn memory_size_of_subtree(&self) -> usize { |
206 | self.0.memory_size_of_subtree() | 256 | self.0.memory_size_of_subtree() |
207 | } | 257 | } |
@@ -223,17 +273,20 @@ impl SyntaxNode { | |||
223 | }; | 273 | }; |
224 | } | 274 | } |
225 | 275 | ||
226 | for event in self.preorder() { | 276 | for event in self.preorder_with_tokens() { |
227 | match event { | 277 | match event { |
228 | WalkEvent::Enter(node) => { | 278 | WalkEvent::Enter(element) => { |
229 | indent!(); | 279 | indent!(); |
230 | writeln!(buf, "{:?}", node).unwrap(); | 280 | match element { |
231 | if node.first_child().is_none() { | 281 | SyntaxElement::Node(node) => writeln!(buf, "{:?}", node).unwrap(), |
232 | let off = node.range().end(); | 282 | SyntaxElement::Token(token) => { |
233 | while err_pos < errors.len() && errors[err_pos].offset() <= off { | 283 | writeln!(buf, "{:?}", token).unwrap(); |
234 | indent!(); | 284 | let off = token.range().end(); |
235 | writeln!(buf, "err: `{}`", errors[err_pos]).unwrap(); | 285 | while err_pos < errors.len() && errors[err_pos].offset() <= off { |
236 | err_pos += 1; | 286 | indent!(); |
287 | writeln!(buf, "err: `{}`", errors[err_pos]).unwrap(); | ||
288 | err_pos += 1; | ||
289 | } | ||
237 | } | 290 | } |
238 | } | 291 | } |
239 | level += 1; | 292 | level += 1; |
@@ -255,7 +308,172 @@ impl SyntaxNode { | |||
255 | } | 308 | } |
256 | 309 | ||
257 | pub(crate) fn replace_with(&self, replacement: GreenNode) -> GreenNode { | 310 | pub(crate) fn replace_with(&self, replacement: GreenNode) -> GreenNode { |
258 | self.0.replace_self(replacement) | 311 | self.0.replace_with(replacement) |
312 | } | ||
313 | } | ||
314 | |||
315 | #[derive(Clone, Copy, PartialEq, Eq, Hash)] | ||
316 | pub struct SyntaxToken<'a>(pub(crate) rowan::SyntaxToken<'a, RaTypes>); | ||
317 | |||
318 | //FIXME: always output text | ||
319 | impl<'a> fmt::Debug for SyntaxToken<'a> { | ||
320 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | ||
321 | write!(fmt, "{:?}@{:?}", self.kind(), self.range())?; | ||
322 | if has_short_text(self.kind()) { | ||
323 | write!(fmt, " \"{}\"", self.text())?; | ||
324 | } | ||
325 | Ok(()) | ||
326 | } | ||
327 | } | ||
328 | |||
329 | impl<'a> fmt::Display for SyntaxToken<'a> { | ||
330 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | ||
331 | fmt::Display::fmt(self.text(), fmt) | ||
332 | } | ||
333 | } | ||
334 | |||
335 | impl<'a> From<rowan::SyntaxToken<'a, RaTypes>> for SyntaxToken<'a> { | ||
336 | fn from(t: rowan::SyntaxToken<'a, RaTypes>) -> Self { | ||
337 | SyntaxToken(t) | ||
338 | } | ||
339 | } | ||
340 | |||
341 | impl<'a> SyntaxToken<'a> { | ||
342 | pub fn kind(&self) -> SyntaxKind { | ||
343 | self.0.kind() | ||
344 | } | ||
345 | |||
346 | pub fn text(&self) -> &'a SmolStr { | ||
347 | self.0.text() | ||
348 | } | ||
349 | |||
350 | pub fn range(&self) -> TextRange { | ||
351 | self.0.range() | ||
352 | } | ||
353 | |||
354 | pub fn parent(&self) -> &'a SyntaxNode { | ||
355 | SyntaxNode::from_repr(self.0.parent()) | ||
356 | } | ||
357 | |||
358 | pub fn next_sibling_or_token(&self) -> Option<SyntaxElement<'a>> { | ||
359 | self.0.next_sibling_or_token().map(SyntaxElement::from) | ||
360 | } | ||
361 | |||
362 | pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement<'a>> { | ||
363 | self.0.prev_sibling_or_token().map(SyntaxElement::from) | ||
364 | } | ||
365 | |||
366 | pub fn siblings_with_tokens( | ||
367 | &self, | ||
368 | direction: Direction, | ||
369 | ) -> impl Iterator<Item = SyntaxElement<'a>> { | ||
370 | let me: SyntaxElement = (*self).into(); | ||
371 | crate::algo::generate(Some(me), move |el| match direction { | ||
372 | Direction::Next => el.next_sibling_or_token(), | ||
373 | Direction::Prev => el.prev_sibling_or_token(), | ||
374 | }) | ||
375 | } | ||
376 | |||
377 | pub fn next_token(&self) -> Option<SyntaxToken<'a>> { | ||
378 | self.0.next_token().map(SyntaxToken::from) | ||
379 | } | ||
380 | |||
381 | pub fn prev_token(&self) -> Option<SyntaxToken<'a>> { | ||
382 | self.0.prev_token().map(SyntaxToken::from) | ||
383 | } | ||
384 | |||
385 | pub(crate) fn replace_with(&self, new_token: GreenToken) -> GreenNode { | ||
386 | self.0.replace_with(new_token) | ||
387 | } | ||
388 | } | ||
389 | |||
390 | #[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] | ||
391 | pub enum SyntaxElement<'a> { | ||
392 | Node(&'a SyntaxNode), | ||
393 | Token(SyntaxToken<'a>), | ||
394 | } | ||
395 | |||
396 | impl<'a> fmt::Display for SyntaxElement<'a> { | ||
397 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | ||
398 | match self { | ||
399 | SyntaxElement::Node(it) => fmt::Display::fmt(it, fmt), | ||
400 | SyntaxElement::Token(it) => fmt::Display::fmt(it, fmt), | ||
401 | } | ||
402 | } | ||
403 | } | ||
404 | |||
405 | impl<'a> SyntaxElement<'a> { | ||
406 | pub fn kind(&self) -> SyntaxKind { | ||
407 | match self { | ||
408 | SyntaxElement::Node(it) => it.kind(), | ||
409 | SyntaxElement::Token(it) => it.kind(), | ||
410 | } | ||
411 | } | ||
412 | |||
413 | pub fn as_node(&self) -> Option<&'a SyntaxNode> { | ||
414 | match self { | ||
415 | SyntaxElement::Node(node) => Some(*node), | ||
416 | SyntaxElement::Token(_) => None, | ||
417 | } | ||
418 | } | ||
419 | |||
420 | pub fn as_token(&self) -> Option<SyntaxToken<'a>> { | ||
421 | match self { | ||
422 | SyntaxElement::Node(_) => None, | ||
423 | SyntaxElement::Token(token) => Some(*token), | ||
424 | } | ||
425 | } | ||
426 | |||
427 | pub fn next_sibling_or_token(&self) -> Option<SyntaxElement<'a>> { | ||
428 | match self { | ||
429 | SyntaxElement::Node(it) => it.next_sibling_or_token(), | ||
430 | SyntaxElement::Token(it) => it.next_sibling_or_token(), | ||
431 | } | ||
432 | } | ||
433 | |||
434 | pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement<'a>> { | ||
435 | match self { | ||
436 | SyntaxElement::Node(it) => it.prev_sibling_or_token(), | ||
437 | SyntaxElement::Token(it) => it.prev_sibling_or_token(), | ||
438 | } | ||
439 | } | ||
440 | |||
441 | pub fn ancestors(&self) -> impl Iterator<Item = &'a SyntaxNode> { | ||
442 | match self { | ||
443 | SyntaxElement::Node(it) => it, | ||
444 | SyntaxElement::Token(it) => it.parent(), | ||
445 | } | ||
446 | .ancestors() | ||
447 | } | ||
448 | } | ||
449 | |||
450 | impl<'a> From<rowan::SyntaxElement<'a, RaTypes>> for SyntaxElement<'a> { | ||
451 | fn from(el: rowan::SyntaxElement<'a, RaTypes>) -> Self { | ||
452 | match el { | ||
453 | rowan::SyntaxElement::Node(n) => SyntaxElement::Node(SyntaxNode::from_repr(n)), | ||
454 | rowan::SyntaxElement::Token(t) => SyntaxElement::Token(t.into()), | ||
455 | } | ||
456 | } | ||
457 | } | ||
458 | |||
459 | impl<'a> From<&'a SyntaxNode> for SyntaxElement<'a> { | ||
460 | fn from(node: &'a SyntaxNode) -> SyntaxElement<'a> { | ||
461 | SyntaxElement::Node(node) | ||
462 | } | ||
463 | } | ||
464 | |||
465 | impl<'a> From<SyntaxToken<'a>> for SyntaxElement<'a> { | ||
466 | fn from(token: SyntaxToken<'a>) -> SyntaxElement<'a> { | ||
467 | SyntaxElement::Token(token) | ||
468 | } | ||
469 | } | ||
470 | |||
471 | impl<'a> SyntaxElement<'a> { | ||
472 | pub fn range(&self) -> TextRange { | ||
473 | match self { | ||
474 | SyntaxElement::Node(it) => it.range(), | ||
475 | SyntaxElement::Token(it) => it.range(), | ||
476 | } | ||
259 | } | 477 | } |
260 | } | 478 | } |
261 | 479 | ||
@@ -270,6 +488,17 @@ impl<'a> Iterator for SyntaxNodeChildren<'a> { | |||
270 | } | 488 | } |
271 | } | 489 | } |
272 | 490 | ||
491 | #[derive(Debug)] | ||
492 | pub struct SyntaxElementChildren<'a>(rowan::SyntaxElementChildren<'a, RaTypes>); | ||
493 | |||
494 | impl<'a> Iterator for SyntaxElementChildren<'a> { | ||
495 | type Item = SyntaxElement<'a>; | ||
496 | |||
497 | fn next(&mut self) -> Option<SyntaxElement<'a>> { | ||
498 | self.0.next().map(SyntaxElement::from) | ||
499 | } | ||
500 | } | ||
501 | |||
273 | fn has_short_text(kind: SyntaxKind) -> bool { | 502 | fn has_short_text(kind: SyntaxKind) -> bool { |
274 | use crate::SyntaxKind::*; | 503 | use crate::SyntaxKind::*; |
275 | match kind { | 504 | match kind { |
@@ -304,16 +533,16 @@ impl SyntaxTreeBuilder { | |||
304 | node | 533 | node |
305 | } | 534 | } |
306 | 535 | ||
307 | pub fn leaf(&mut self, kind: SyntaxKind, text: SmolStr) { | 536 | pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) { |
308 | self.inner.leaf(kind, text) | 537 | self.inner.token(kind, text) |
309 | } | 538 | } |
310 | 539 | ||
311 | pub fn start_branch(&mut self, kind: SyntaxKind) { | 540 | pub fn start_node(&mut self, kind: SyntaxKind) { |
312 | self.inner.start_internal(kind) | 541 | self.inner.start_node(kind) |
313 | } | 542 | } |
314 | 543 | ||
315 | pub fn finish_branch(&mut self) { | 544 | pub fn finish_node(&mut self) { |
316 | self.inner.finish_internal() | 545 | self.inner.finish_node() |
317 | } | 546 | } |
318 | 547 | ||
319 | pub fn error(&mut self, error: ParseError, text_pos: TextUnit) { | 548 | pub fn error(&mut self, error: ParseError, text_pos: TextUnit) { |
diff --git a/crates/ra_syntax/src/syntax_text.rs b/crates/ra_syntax/src/syntax_text.rs index 84e5b231a..6bb2ff461 100644 --- a/crates/ra_syntax/src/syntax_text.rs +++ b/crates/ra_syntax/src/syntax_text.rs | |||
@@ -1,6 +1,6 @@ | |||
1 | use std::{fmt, ops}; | 1 | use std::{fmt, ops}; |
2 | 2 | ||
3 | use crate::{SyntaxNode, TextRange, TextUnit}; | 3 | use crate::{SyntaxNode, TextRange, TextUnit, SyntaxElement}; |
4 | 4 | ||
5 | #[derive(Clone)] | 5 | #[derive(Clone)] |
6 | pub struct SyntaxText<'a> { | 6 | pub struct SyntaxText<'a> { |
@@ -15,11 +15,14 @@ impl<'a> SyntaxText<'a> { | |||
15 | 15 | ||
16 | pub fn chunks(&self) -> impl Iterator<Item = &'a str> { | 16 | pub fn chunks(&self) -> impl Iterator<Item = &'a str> { |
17 | let range = self.range; | 17 | let range = self.range; |
18 | self.node.descendants().filter_map(move |node| { | 18 | self.node.descendants_with_tokens().filter_map(move |el| match el { |
19 | let text = node.leaf_text()?; | 19 | SyntaxElement::Token(t) => { |
20 | let range = range.intersection(&node.range())?; | 20 | let text = t.text(); |
21 | let range = range - node.range().start(); | 21 | let range = range.intersection(&t.range())?; |
22 | Some(&text[range]) | 22 | let range = range - t.range().start(); |
23 | Some(&text[range]) | ||
24 | } | ||
25 | SyntaxElement::Node(_) => None, | ||
23 | }) | 26 | }) |
24 | } | 27 | } |
25 | 28 | ||
diff --git a/crates/ra_syntax/src/validation.rs b/crates/ra_syntax/src/validation.rs index 69f344d65..fc534df83 100644 --- a/crates/ra_syntax/src/validation.rs +++ b/crates/ra_syntax/src/validation.rs | |||
@@ -6,7 +6,7 @@ mod block; | |||
6 | 6 | ||
7 | use crate::{ | 7 | use crate::{ |
8 | SourceFile, SyntaxError, AstNode, SyntaxNode, | 8 | SourceFile, SyntaxError, AstNode, SyntaxNode, |
9 | SyntaxKind::{L_CURLY, R_CURLY}, | 9 | SyntaxKind::{L_CURLY, R_CURLY, BYTE, BYTE_STRING, STRING, CHAR}, |
10 | ast, | 10 | ast, |
11 | algo::visit::{visitor_ctx, VisitorCtx}, | 11 | algo::visit::{visitor_ctx, VisitorCtx}, |
12 | }; | 12 | }; |
@@ -15,16 +15,24 @@ pub(crate) fn validate(file: &SourceFile) -> Vec<SyntaxError> { | |||
15 | let mut errors = Vec::new(); | 15 | let mut errors = Vec::new(); |
16 | for node in file.syntax().descendants() { | 16 | for node in file.syntax().descendants() { |
17 | let _ = visitor_ctx(&mut errors) | 17 | let _ = visitor_ctx(&mut errors) |
18 | .visit::<ast::Byte, _>(byte::validate_byte_node) | 18 | .visit::<ast::Literal, _>(validate_literal) |
19 | .visit::<ast::ByteString, _>(byte_string::validate_byte_string_node) | ||
20 | .visit::<ast::Char, _>(char::validate_char_node) | ||
21 | .visit::<ast::String, _>(string::validate_string_node) | ||
22 | .visit::<ast::Block, _>(block::validate_block_node) | 19 | .visit::<ast::Block, _>(block::validate_block_node) |
23 | .accept(node); | 20 | .accept(node); |
24 | } | 21 | } |
25 | errors | 22 | errors |
26 | } | 23 | } |
27 | 24 | ||
25 | // FIXME: kill duplication | ||
26 | fn validate_literal(literal: &ast::Literal, acc: &mut Vec<SyntaxError>) { | ||
27 | match literal.token().kind() { | ||
28 | BYTE => byte::validate_byte_node(literal.token(), acc), | ||
29 | BYTE_STRING => byte_string::validate_byte_string_node(literal.token(), acc), | ||
30 | STRING => string::validate_string_node(literal.token(), acc), | ||
31 | CHAR => char::validate_char_node(literal.token(), acc), | ||
32 | _ => (), | ||
33 | } | ||
34 | } | ||
35 | |||
28 | pub(crate) fn validate_block_structure(root: &SyntaxNode) { | 36 | pub(crate) fn validate_block_structure(root: &SyntaxNode) { |
29 | let mut stack = Vec::new(); | 37 | let mut stack = Vec::new(); |
30 | for node in root.descendants() { | 38 | for node in root.descendants() { |
diff --git a/crates/ra_syntax/src/validation/byte.rs b/crates/ra_syntax/src/validation/byte.rs index 838e7a65f..290f80fc6 100644 --- a/crates/ra_syntax/src/validation/byte.rs +++ b/crates/ra_syntax/src/validation/byte.rs | |||
@@ -1,17 +1,17 @@ | |||
1 | //! Validation of byte literals | 1 | //! Validation of byte literals |
2 | 2 | ||
3 | use crate::{ | 3 | use crate::{ |
4 | ast::{self, AstNode, AstToken}, | ||
5 | string_lexing::{self, StringComponentKind}, | 4 | string_lexing::{self, StringComponentKind}, |
6 | TextRange, | 5 | TextRange, |
7 | validation::char, | 6 | validation::char, |
8 | SyntaxError, | 7 | SyntaxError, |
9 | SyntaxErrorKind::*, | 8 | SyntaxErrorKind::*, |
9 | SyntaxToken, | ||
10 | }; | 10 | }; |
11 | 11 | ||
12 | pub(super) fn validate_byte_node(node: &ast::Byte, errors: &mut Vec<SyntaxError>) { | 12 | pub(super) fn validate_byte_node(node: SyntaxToken, errors: &mut Vec<SyntaxError>) { |
13 | let literal_text = node.text(); | 13 | let literal_text = node.text(); |
14 | let literal_range = node.syntax().range(); | 14 | let literal_range = node.range(); |
15 | let mut components = string_lexing::parse_byte_literal(literal_text); | 15 | let mut components = string_lexing::parse_byte_literal(literal_text); |
16 | let mut len = 0; | 16 | let mut len = 0; |
17 | for component in &mut components { | 17 | for component in &mut components { |
diff --git a/crates/ra_syntax/src/validation/byte_string.rs b/crates/ra_syntax/src/validation/byte_string.rs index 64c7054a1..eae395e9d 100644 --- a/crates/ra_syntax/src/validation/byte_string.rs +++ b/crates/ra_syntax/src/validation/byte_string.rs | |||
@@ -1,15 +1,15 @@ | |||
1 | use crate::{ | 1 | use crate::{ |
2 | ast::{self, AstNode, AstToken}, | ||
3 | string_lexing::{self, StringComponentKind}, | 2 | string_lexing::{self, StringComponentKind}, |
4 | SyntaxError, | 3 | SyntaxError, |
5 | SyntaxErrorKind::*, | 4 | SyntaxErrorKind::*, |
5 | SyntaxToken, | ||
6 | }; | 6 | }; |
7 | 7 | ||
8 | use super::byte; | 8 | use super::byte; |
9 | 9 | ||
10 | pub(crate) fn validate_byte_string_node(node: &ast::ByteString, errors: &mut Vec<SyntaxError>) { | 10 | pub(crate) fn validate_byte_string_node(node: SyntaxToken, errors: &mut Vec<SyntaxError>) { |
11 | let literal_text = node.text(); | 11 | let literal_text = node.text(); |
12 | let literal_range = node.syntax().range(); | 12 | let literal_range = node.range(); |
13 | let mut components = string_lexing::parse_byte_string_literal(literal_text); | 13 | let mut components = string_lexing::parse_byte_string_literal(literal_text); |
14 | for component in &mut components { | 14 | for component in &mut components { |
15 | let range = component.range + literal_range.start(); | 15 | let range = component.range + literal_range.start(); |
diff --git a/crates/ra_syntax/src/validation/char.rs b/crates/ra_syntax/src/validation/char.rs index c874e5d08..a385accdd 100644 --- a/crates/ra_syntax/src/validation/char.rs +++ b/crates/ra_syntax/src/validation/char.rs | |||
@@ -5,16 +5,16 @@ use std::u32; | |||
5 | use arrayvec::ArrayString; | 5 | use arrayvec::ArrayString; |
6 | 6 | ||
7 | use crate::{ | 7 | use crate::{ |
8 | ast::{self, AstNode, AstToken}, | ||
9 | string_lexing::{self, StringComponentKind}, | 8 | string_lexing::{self, StringComponentKind}, |
10 | TextRange, | 9 | TextRange, |
11 | SyntaxError, | 10 | SyntaxError, |
12 | SyntaxErrorKind::*, | 11 | SyntaxErrorKind::*, |
12 | SyntaxToken, | ||
13 | }; | 13 | }; |
14 | 14 | ||
15 | pub(super) fn validate_char_node(node: &ast::Char, errors: &mut Vec<SyntaxError>) { | 15 | pub(super) fn validate_char_node(node: SyntaxToken, errors: &mut Vec<SyntaxError>) { |
16 | let literal_text = node.text(); | 16 | let literal_text = node.text(); |
17 | let literal_range = node.syntax().range(); | 17 | let literal_range = node.range(); |
18 | let mut components = string_lexing::parse_char_literal(literal_text); | 18 | let mut components = string_lexing::parse_char_literal(literal_text); |
19 | let mut len = 0; | 19 | let mut len = 0; |
20 | for component in &mut components { | 20 | for component in &mut components { |
diff --git a/crates/ra_syntax/src/validation/string.rs b/crates/ra_syntax/src/validation/string.rs index d857d088c..f7f5c02c0 100644 --- a/crates/ra_syntax/src/validation/string.rs +++ b/crates/ra_syntax/src/validation/string.rs | |||
@@ -1,15 +1,15 @@ | |||
1 | use crate::{ | 1 | use crate::{ |
2 | ast::{self, AstNode, AstToken}, | ||
3 | string_lexing, | 2 | string_lexing, |
4 | SyntaxError, | 3 | SyntaxError, |
5 | SyntaxErrorKind::*, | 4 | SyntaxErrorKind::*, |
5 | SyntaxToken, | ||
6 | }; | 6 | }; |
7 | 7 | ||
8 | use super::char; | 8 | use super::char; |
9 | 9 | ||
10 | pub(crate) fn validate_string_node(node: &ast::String, errors: &mut Vec<SyntaxError>) { | 10 | pub(crate) fn validate_string_node(node: SyntaxToken, errors: &mut Vec<SyntaxError>) { |
11 | let literal_text = node.text(); | 11 | let literal_text = node.text(); |
12 | let literal_range = node.syntax().range(); | 12 | let literal_range = node.range(); |
13 | let mut components = string_lexing::parse_string_literal(literal_text); | 13 | let mut components = string_lexing::parse_string_literal(literal_text); |
14 | for component in &mut components { | 14 | for component in &mut components { |
15 | let range = component.range + literal_range.start(); | 15 | let range = component.range + literal_range.start(); |