aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_syntax
diff options
context:
space:
mode:
Diffstat (limited to 'crates/ra_syntax')
-rw-r--r--crates/ra_syntax/src/algo/mod.rs54
-rw-r--r--crates/ra_syntax/src/algo/visit.rs63
-rw-r--r--crates/ra_syntax/src/algo/walk.rs6
-rw-r--r--crates/ra_syntax/src/ast/generated.rs2
-rw-r--r--crates/ra_syntax/src/ast/generated.rs.tera2
-rw-r--r--crates/ra_syntax/src/ast/mod.rs24
-rw-r--r--crates/ra_syntax/src/grammar/expressions/atom.rs58
-rw-r--r--crates/ra_syntax/src/grammar/expressions/mod.rs53
-rw-r--r--crates/ra_syntax/src/grammar/items/mod.rs45
-rw-r--r--crates/ra_syntax/src/grammar/items/traits.rs1
-rw-r--r--crates/ra_syntax/src/grammar/mod.rs31
-rw-r--r--crates/ra_syntax/src/grammar/params.rs13
-rw-r--r--crates/ra_syntax/src/grammar/paths.rs2
-rw-r--r--crates/ra_syntax/src/grammar/patterns.rs22
-rw-r--r--crates/ra_syntax/src/grammar/type_params.rs11
-rw-r--r--crates/ra_syntax/src/grammar/types.rs18
-rw-r--r--crates/ra_syntax/src/lexer/mod.rs16
-rw-r--r--crates/ra_syntax/src/lexer/ptr.rs8
-rw-r--r--crates/ra_syntax/src/lexer/strings.rs4
-rw-r--r--crates/ra_syntax/src/lib.rs34
-rw-r--r--crates/ra_syntax/src/parser_api.rs8
-rw-r--r--crates/ra_syntax/src/parser_impl/event.rs54
-rw-r--r--crates/ra_syntax/src/parser_impl/mod.rs13
-rw-r--r--crates/ra_syntax/src/reparsing.rs247
-rw-r--r--crates/ra_syntax/src/syntax_kinds/mod.rs2
-rw-r--r--crates/ra_syntax/src/text_utils.rs2
-rw-r--r--crates/ra_syntax/src/utils.rs8
-rw-r--r--crates/ra_syntax/src/yellow/builder.rs12
-rw-r--r--crates/ra_syntax/src/yellow/mod.rs23
-rw-r--r--crates/ra_syntax/src/yellow/syntax_text.rs36
-rw-r--r--crates/ra_syntax/tests/test.rs9
31 files changed, 490 insertions, 391 deletions
diff --git a/crates/ra_syntax/src/algo/mod.rs b/crates/ra_syntax/src/algo/mod.rs
index e686a5704..b4896c482 100644
--- a/crates/ra_syntax/src/algo/mod.rs
+++ b/crates/ra_syntax/src/algo/mod.rs
@@ -1,16 +1,18 @@
1pub mod walk;
2pub mod visit; 1pub mod visit;
2pub mod walk;
3 3
4use crate::{ 4use crate::{
5 SyntaxNodeRef, TextUnit, TextRange,
6 text_utils::{contains_offset_nonstrict, is_subrange}, 5 text_utils::{contains_offset_nonstrict, is_subrange},
6 SyntaxNodeRef, TextRange, TextUnit,
7}; 7};
8 8
9pub fn find_leaf_at_offset(node: SyntaxNodeRef, offset: TextUnit) -> LeafAtOffset { 9pub fn find_leaf_at_offset(node: SyntaxNodeRef, offset: TextUnit) -> LeafAtOffset {
10 let range = node.range(); 10 let range = node.range();
11 assert!( 11 assert!(
12 contains_offset_nonstrict(range, offset), 12 contains_offset_nonstrict(range, offset),
13 "Bad offset: range {:?} offset {:?}", range, offset 13 "Bad offset: range {:?} offset {:?}",
14 range,
15 offset
14 ); 16 );
15 if range.is_empty() { 17 if range.is_empty() {
16 return LeafAtOffset::None; 18 return LeafAtOffset::None;
@@ -20,20 +22,23 @@ pub fn find_leaf_at_offset(node: SyntaxNodeRef, offset: TextUnit) -> LeafAtOffse
20 return LeafAtOffset::Single(node); 22 return LeafAtOffset::Single(node);
21 } 23 }
22 24
23 let mut children = node.children() 25 let mut children = node.children().filter(|child| {
24 .filter(|child| { 26 let child_range = child.range();
25 let child_range = child.range(); 27 !child_range.is_empty() && contains_offset_nonstrict(child_range, offset)
26 !child_range.is_empty() && contains_offset_nonstrict(child_range, offset) 28 });
27 });
28 29
29 let left = children.next().unwrap(); 30 let left = children.next().unwrap();
30 let right = children.next(); 31 let right = children.next();
31 assert!(children.next().is_none()); 32 assert!(children.next().is_none());
32 return if let Some(right) = right { 33 return if let Some(right) = right {
33 match (find_leaf_at_offset(left, offset), find_leaf_at_offset(right, offset)) { 34 match (
34 (LeafAtOffset::Single(left), LeafAtOffset::Single(right)) => 35 find_leaf_at_offset(left, offset),
35 LeafAtOffset::Between(left, right), 36 find_leaf_at_offset(right, offset),
36 _ => unreachable!() 37 ) {
38 (LeafAtOffset::Single(left), LeafAtOffset::Single(right)) => {
39 LeafAtOffset::Between(left, right)
40 }
41 _ => unreachable!(),
37 } 42 }
38 } else { 43 } else {
39 find_leaf_at_offset(left, offset) 44 find_leaf_at_offset(left, offset)
@@ -44,7 +49,7 @@ pub fn find_leaf_at_offset(node: SyntaxNodeRef, offset: TextUnit) -> LeafAtOffse
44pub enum LeafAtOffset<'a> { 49pub enum LeafAtOffset<'a> {
45 None, 50 None,
46 Single(SyntaxNodeRef<'a>), 51 Single(SyntaxNodeRef<'a>),
47 Between(SyntaxNodeRef<'a>, SyntaxNodeRef<'a>) 52 Between(SyntaxNodeRef<'a>, SyntaxNodeRef<'a>),
48} 53}
49 54
50impl<'a> LeafAtOffset<'a> { 55impl<'a> LeafAtOffset<'a> {
@@ -52,7 +57,7 @@ impl<'a> LeafAtOffset<'a> {
52 match self { 57 match self {
53 LeafAtOffset::None => None, 58 LeafAtOffset::None => None,
54 LeafAtOffset::Single(node) => Some(node), 59 LeafAtOffset::Single(node) => Some(node),
55 LeafAtOffset::Between(_, right) => Some(right) 60 LeafAtOffset::Between(_, right) => Some(right),
56 } 61 }
57 } 62 }
58 63
@@ -60,7 +65,7 @@ impl<'a> LeafAtOffset<'a> {
60 match self { 65 match self {
61 LeafAtOffset::None => None, 66 LeafAtOffset::None => None,
62 LeafAtOffset::Single(node) => Some(node), 67 LeafAtOffset::Single(node) => Some(node),
63 LeafAtOffset::Between(left, _) => Some(left) 68 LeafAtOffset::Between(left, _) => Some(left),
64 } 69 }
65 } 70 }
66} 71}
@@ -71,8 +76,14 @@ impl<'f> Iterator for LeafAtOffset<'f> {
71 fn next(&mut self) -> Option<SyntaxNodeRef<'f>> { 76 fn next(&mut self) -> Option<SyntaxNodeRef<'f>> {
72 match *self { 77 match *self {
73 LeafAtOffset::None => None, 78 LeafAtOffset::None => None,
74 LeafAtOffset::Single(node) => { *self = LeafAtOffset::None; Some(node) } 79 LeafAtOffset::Single(node) => {
75 LeafAtOffset::Between(left, right) => { *self = LeafAtOffset::Single(right); Some(left) } 80 *self = LeafAtOffset::None;
81 Some(node)
82 }
83 LeafAtOffset::Between(left, right) => {
84 *self = LeafAtOffset::Single(right);
85 Some(left)
86 }
76 } 87 }
77 } 88 }
78} 89}
@@ -81,14 +92,15 @@ pub fn find_covering_node(root: SyntaxNodeRef, range: TextRange) -> SyntaxNodeRe
81 assert!( 92 assert!(
82 is_subrange(root.range(), range), 93 is_subrange(root.range(), range),
83 "node range: {:?}, target range: {:?}", 94 "node range: {:?}, target range: {:?}",
84 root.range(), range, 95 root.range(),
96 range,
85 ); 97 );
86 let (left, right) = match ( 98 let (left, right) = match (
87 find_leaf_at_offset(root, range.start()).right_biased(), 99 find_leaf_at_offset(root, range.start()).right_biased(),
88 find_leaf_at_offset(root, range.end()).left_biased() 100 find_leaf_at_offset(root, range.end()).left_biased(),
89 ) { 101 ) {
90 (Some(l), Some(r)) => (l, r), 102 (Some(l), Some(r)) => (l, r),
91 _ => return root 103 _ => return root,
92 }; 104 };
93 105
94 common_ancestor(left, right) 106 common_ancestor(left, right)
@@ -103,7 +115,7 @@ fn common_ancestor<'a>(n1: SyntaxNodeRef<'a>, n2: SyntaxNodeRef<'a>) -> SyntaxNo
103 panic!("Can't find common ancestor of {:?} and {:?}", n1, n2) 115 panic!("Can't find common ancestor of {:?} and {:?}", n1, n2)
104} 116}
105 117
106pub fn generate<T>(seed: Option<T>, step: impl Fn(&T) -> Option<T>) -> impl Iterator<Item=T> { 118pub fn generate<T>(seed: Option<T>, step: impl Fn(&T) -> Option<T>) -> impl Iterator<Item = T> {
107 ::itertools::unfold(seed, move |slot| { 119 ::itertools::unfold(seed, move |slot| {
108 slot.take().map(|curr| { 120 slot.take().map(|curr| {
109 *slot = step(&curr); 121 *slot = step(&curr);
diff --git a/crates/ra_syntax/src/algo/visit.rs b/crates/ra_syntax/src/algo/visit.rs
index 1ae988a87..c021f464c 100644
--- a/crates/ra_syntax/src/algo/visit.rs
+++ b/crates/ra_syntax/src/algo/visit.rs
@@ -1,23 +1,31 @@
1use std::marker::PhantomData; 1use crate::{AstNode, SyntaxNodeRef};
2use crate::{SyntaxNodeRef, AstNode};
3 2
3use std::marker::PhantomData;
4 4
5pub fn visitor<'a, T>() -> impl Visitor<'a, Output=T> { 5pub fn visitor<'a, T>() -> impl Visitor<'a, Output = T> {
6 EmptyVisitor { ph: PhantomData } 6 EmptyVisitor { ph: PhantomData }
7} 7}
8 8
9pub fn visitor_ctx<'a, T, C>(ctx: C) -> impl VisitorCtx<'a, Output=T, Ctx=C> { 9pub fn visitor_ctx<'a, T, C>(ctx: C) -> impl VisitorCtx<'a, Output = T, Ctx = C> {
10 EmptyVisitorCtx { ph: PhantomData, ctx } 10 EmptyVisitorCtx {
11 ph: PhantomData,
12 ctx,
13 }
11} 14}
12 15
13pub trait Visitor<'a>: Sized { 16pub trait Visitor<'a>: Sized {
14 type Output; 17 type Output;
15 fn accept(self, node: SyntaxNodeRef<'a>) -> Option<Self::Output>; 18 fn accept(self, node: SyntaxNodeRef<'a>) -> Option<Self::Output>;
16 fn visit<N, F>(self, f: F) -> Vis<Self, N, F> 19 fn visit<N, F>(self, f: F) -> Vis<Self, N, F>
17 where N: AstNode<'a>, 20 where
18 F: FnOnce(N) -> Self::Output, 21 N: AstNode<'a>,
22 F: FnOnce(N) -> Self::Output,
19 { 23 {
20 Vis { inner: self, f, ph: PhantomData } 24 Vis {
25 inner: self,
26 f,
27 ph: PhantomData,
28 }
21 } 29 }
22} 30}
23 31
@@ -26,16 +34,21 @@ pub trait VisitorCtx<'a>: Sized {
26 type Ctx; 34 type Ctx;
27 fn accept(self, node: SyntaxNodeRef<'a>) -> Result<Self::Output, Self::Ctx>; 35 fn accept(self, node: SyntaxNodeRef<'a>) -> Result<Self::Output, Self::Ctx>;
28 fn visit<N, F>(self, f: F) -> VisCtx<Self, N, F> 36 fn visit<N, F>(self, f: F) -> VisCtx<Self, N, F>
29 where N: AstNode<'a>, 37 where
30 F: FnOnce(N, Self::Ctx) -> Self::Output, 38 N: AstNode<'a>,
39 F: FnOnce(N, Self::Ctx) -> Self::Output,
31 { 40 {
32 VisCtx { inner: self, f, ph: PhantomData } 41 VisCtx {
42 inner: self,
43 f,
44 ph: PhantomData,
45 }
33 } 46 }
34} 47}
35 48
36#[derive(Debug)] 49#[derive(Debug)]
37struct EmptyVisitor<T> { 50struct EmptyVisitor<T> {
38 ph: PhantomData<fn() -> T> 51 ph: PhantomData<fn() -> T>,
39} 52}
40 53
41impl<'a, T> Visitor<'a> for EmptyVisitor<T> { 54impl<'a, T> Visitor<'a> for EmptyVisitor<T> {
@@ -69,10 +82,10 @@ pub struct Vis<V, N, F> {
69} 82}
70 83
71impl<'a, V, N, F> Visitor<'a> for Vis<V, N, F> 84impl<'a, V, N, F> Visitor<'a> for Vis<V, N, F>
72 where 85where
73 V: Visitor<'a>, 86 V: Visitor<'a>,
74 N: AstNode<'a>, 87 N: AstNode<'a>,
75 F: FnOnce(N) -> <V as Visitor<'a>>::Output, 88 F: FnOnce(N) -> <V as Visitor<'a>>::Output,
76{ 89{
77 type Output = <V as Visitor<'a>>::Output; 90 type Output = <V as Visitor<'a>>::Output;
78 91
@@ -90,21 +103,19 @@ pub struct VisCtx<V, N, F> {
90} 103}
91 104
92impl<'a, V, N, F> VisitorCtx<'a> for VisCtx<V, N, F> 105impl<'a, V, N, F> VisitorCtx<'a> for VisCtx<V, N, F>
93 where 106where
94 V: VisitorCtx<'a>, 107 V: VisitorCtx<'a>,
95 N: AstNode<'a>, 108 N: AstNode<'a>,
96 F: FnOnce(N, <V as VisitorCtx<'a>>::Ctx) -> <V as VisitorCtx<'a>>::Output, 109 F: FnOnce(N, <V as VisitorCtx<'a>>::Ctx) -> <V as VisitorCtx<'a>>::Output,
97{ 110{
98 type Output = <V as VisitorCtx<'a>>::Output; 111 type Output = <V as VisitorCtx<'a>>::Output;
99 type Ctx = <V as VisitorCtx<'a>>::Ctx; 112 type Ctx = <V as VisitorCtx<'a>>::Ctx;
100 113
101 fn accept(self, node: SyntaxNodeRef<'a>) -> Result<Self::Output, Self::Ctx> { 114 fn accept(self, node: SyntaxNodeRef<'a>) -> Result<Self::Output, Self::Ctx> {
102 let VisCtx { inner, f, .. } = self; 115 let VisCtx { inner, f, .. } = self;
103 inner.accept(node).or_else(|ctx| 116 inner.accept(node).or_else(|ctx| match N::cast(node) {
104 match N::cast(node) { 117 None => Err(ctx),
105 None => Err(ctx), 118 Some(node) => Ok(f(node, ctx)),
106 Some(node) => Ok(f(node, ctx)) 119 })
107 }
108 )
109 } 120 }
110} 121}
diff --git a/crates/ra_syntax/src/algo/walk.rs b/crates/ra_syntax/src/algo/walk.rs
index d34415626..9afa86401 100644
--- a/crates/ra_syntax/src/algo/walk.rs
+++ b/crates/ra_syntax/src/algo/walk.rs
@@ -1,8 +1,4 @@
1use crate::{ 1use crate::{algo::generate, SyntaxNodeRef};
2 SyntaxNodeRef,
3 algo::generate,
4};
5
6 2
7#[derive(Debug, Copy, Clone)] 3#[derive(Debug, Copy, Clone)]
8pub enum WalkEvent<'a> { 4pub enum WalkEvent<'a> {
diff --git a/crates/ra_syntax/src/ast/generated.rs b/crates/ra_syntax/src/ast/generated.rs
index 160d186b8..a15e00176 100644
--- a/crates/ra_syntax/src/ast/generated.rs
+++ b/crates/ra_syntax/src/ast/generated.rs
@@ -1,6 +1,8 @@
1// This file is automatically generated based on the file `./generated.rs.tera` when `cargo gen-kinds` is run 1// This file is automatically generated based on the file `./generated.rs.tera` when `cargo gen-kinds` is run
2// Do not edit manually 2// Do not edit manually
3 3
4#![cfg_attr(rustfmt, rustfmt_skip)]
5
4use crate::{ 6use crate::{
5 ast, 7 ast,
6 SyntaxNodeRef, AstNode, 8 SyntaxNodeRef, AstNode,
diff --git a/crates/ra_syntax/src/ast/generated.rs.tera b/crates/ra_syntax/src/ast/generated.rs.tera
index 5cb7a35ed..d2a281137 100644
--- a/crates/ra_syntax/src/ast/generated.rs.tera
+++ b/crates/ra_syntax/src/ast/generated.rs.tera
@@ -3,6 +3,8 @@ the below applies to the result of this template
3#}// This file is automatically generated based on the file `./generated.rs.tera` when `cargo gen-kinds` is run 3#}// This file is automatically generated based on the file `./generated.rs.tera` when `cargo gen-kinds` is run
4// Do not edit manually 4// Do not edit manually
5 5
6#![cfg_attr(rustfmt, rustfmt_skip)]
7
6use crate::{ 8use crate::{
7 ast, 9 ast,
8 SyntaxNodeRef, AstNode, 10 SyntaxNodeRef, AstNode,
diff --git a/crates/ra_syntax/src/ast/mod.rs b/crates/ra_syntax/src/ast/mod.rs
index 88193a1ed..34958b6cb 100644
--- a/crates/ra_syntax/src/ast/mod.rs
+++ b/crates/ra_syntax/src/ast/mod.rs
@@ -4,15 +4,18 @@ use std::marker::PhantomData;
4 4
5use itertools::Itertools; 5use itertools::Itertools;
6 6
7pub use self::generated::*;
7use crate::{ 8use crate::{
8 SmolStr, SyntaxNodeRef, SyntaxKind::*,
9 yellow::{RefRoot, SyntaxNodeChildren}, 9 yellow::{RefRoot, SyntaxNodeChildren},
10 SmolStr,
11 SyntaxKind::*,
12 SyntaxNodeRef,
10}; 13};
11pub use self::generated::*;
12 14
13pub trait AstNode<'a>: Clone + Copy + 'a { 15pub trait AstNode<'a>: Clone + Copy + 'a {
14 fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> 16 fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self>
15 where Self: Sized; 17 where
18 Self: Sized;
16 fn syntax(self) -> SyntaxNodeRef<'a>; 19 fn syntax(self) -> SyntaxNodeRef<'a>;
17} 20}
18 21
@@ -64,9 +67,7 @@ pub trait AttrsOwner<'a>: AstNode<'a> {
64 67
65impl<'a> FnDef<'a> { 68impl<'a> FnDef<'a> {
66 pub fn has_atom_attr(&self, atom: &str) -> bool { 69 pub fn has_atom_attr(&self, atom: &str) -> bool {
67 self.attrs() 70 self.attrs().filter_map(|x| x.as_atom()).any(|x| x == atom)
68 .filter_map(|x| x.as_atom())
69 .any(|x| x == atom)
70 } 71 }
71} 72}
72 73
@@ -135,7 +136,7 @@ pub enum CommentFlavor {
135 Line, 136 Line,
136 Doc, 137 Doc,
137 ModuleDoc, 138 ModuleDoc,
138 Multiline 139 Multiline,
139} 140}
140 141
141impl CommentFlavor { 142impl CommentFlavor {
@@ -145,7 +146,7 @@ impl CommentFlavor {
145 Line => "//", 146 Line => "//",
146 Doc => "///", 147 Doc => "///",
147 ModuleDoc => "//!", 148 ModuleDoc => "//!",
148 Multiline => "/*" 149 Multiline => "/*",
149 } 150 }
150 } 151 }
151} 152}
@@ -166,16 +167,14 @@ impl<'a> Whitespace<'a> {
166 167
167impl<'a> Name<'a> { 168impl<'a> Name<'a> {
168 pub fn text(&self) -> SmolStr { 169 pub fn text(&self) -> SmolStr {
169 let ident = self.syntax().first_child() 170 let ident = self.syntax().first_child().unwrap();
170 .unwrap();
171 ident.leaf_text().unwrap().clone() 171 ident.leaf_text().unwrap().clone()
172 } 172 }
173} 173}
174 174
175impl<'a> NameRef<'a> { 175impl<'a> NameRef<'a> {
176 pub fn text(&self) -> SmolStr { 176 pub fn text(&self) -> SmolStr {
177 let ident = self.syntax().first_child() 177 let ident = self.syntax().first_child().unwrap();
178 .unwrap();
179 ident.leaf_text().unwrap().clone() 178 ident.leaf_text().unwrap().clone()
180 } 179 }
181} 180}
@@ -241,7 +240,6 @@ fn children<'a, P: AstNode<'a>, C: AstNode<'a>>(parent: P) -> AstChildren<'a, C>
241 AstChildren::new(parent.syntax()) 240 AstChildren::new(parent.syntax())
242} 241}
243 242
244
245#[derive(Debug)] 243#[derive(Debug)]
246pub struct AstChildren<'a, N> { 244pub struct AstChildren<'a, N> {
247 inner: SyntaxNodeChildren<RefRoot<'a>>, 245 inner: SyntaxNodeChildren<RefRoot<'a>>,
diff --git a/crates/ra_syntax/src/grammar/expressions/atom.rs b/crates/ra_syntax/src/grammar/expressions/atom.rs
index e21de68c5..11f766d33 100644
--- a/crates/ra_syntax/src/grammar/expressions/atom.rs
+++ b/crates/ra_syntax/src/grammar/expressions/atom.rs
@@ -13,9 +13,18 @@ use super::*;
13// let _ = b"e"; 13// let _ = b"e";
14// let _ = br"f"; 14// let _ = br"f";
15// } 15// }
16pub(crate) const LITERAL_FIRST: TokenSet = 16pub(crate) const LITERAL_FIRST: TokenSet = token_set![
17 token_set![TRUE_KW, FALSE_KW, INT_NUMBER, FLOAT_NUMBER, BYTE, CHAR, 17 TRUE_KW,
18 STRING, RAW_STRING, BYTE_STRING, RAW_BYTE_STRING]; 18 FALSE_KW,
19 INT_NUMBER,
20 FLOAT_NUMBER,
21 BYTE,
22 CHAR,
23 STRING,
24 RAW_STRING,
25 BYTE_STRING,
26 RAW_BYTE_STRING
27];
19 28
20pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> { 29pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> {
21 if !p.at_ts(LITERAL_FIRST) { 30 if !p.at_ts(LITERAL_FIRST) {
@@ -26,15 +35,31 @@ pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> {
26 Some(m.complete(p, LITERAL)) 35 Some(m.complete(p, LITERAL))
27} 36}
28 37
29pub(super) const ATOM_EXPR_FIRST: TokenSet = 38pub(super) const ATOM_EXPR_FIRST: TokenSet = token_set_union![
30 token_set_union![ 39 LITERAL_FIRST,
31 LITERAL_FIRST, 40 token_set![
32 token_set![L_CURLY, L_PAREN, L_BRACK, PIPE, MOVE_KW, IF_KW, WHILE_KW, MATCH_KW, UNSAFE_KW, 41 L_CURLY,
33 RETURN_KW, IDENT, SELF_KW, SUPER_KW, CRATE_KW, COLONCOLON, BREAK_KW, CONTINUE_KW, LIFETIME ], 42 L_PAREN,
34 ]; 43 L_BRACK,
44 PIPE,
45 MOVE_KW,
46 IF_KW,
47 WHILE_KW,
48 MATCH_KW,
49 UNSAFE_KW,
50 RETURN_KW,
51 IDENT,
52 SELF_KW,
53 SUPER_KW,
54 CRATE_KW,
55 COLONCOLON,
56 BREAK_KW,
57 CONTINUE_KW,
58 LIFETIME
59 ],
60];
35 61
36const EXPR_RECOVERY_SET: TokenSet = 62const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW];
37 token_set![LET_KW];
38 63
39pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<CompletedMarker> { 64pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<CompletedMarker> {
40 match literal(p) { 65 match literal(p) {
@@ -80,7 +105,7 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<CompletedMark
80 let m = p.start(); 105 let m = p.start();
81 p.bump(); 106 p.bump();
82 block_expr(p, Some(m)) 107 block_expr(p, Some(m))
83 }, 108 }
84 L_CURLY => block_expr(p, None), 109 L_CURLY => block_expr(p, None),
85 RETURN_KW => return_expr(p), 110 RETURN_KW => return_expr(p),
86 CONTINUE_KW => continue_expr(p), 111 CONTINUE_KW => continue_expr(p),
@@ -119,7 +144,14 @@ fn tuple_expr(p: &mut Parser) -> CompletedMarker {
119 } 144 }
120 } 145 }
121 p.expect(R_PAREN); 146 p.expect(R_PAREN);
122 m.complete(p, if saw_expr && !saw_comma { PAREN_EXPR } else { TUPLE_EXPR }) 147 m.complete(
148 p,
149 if saw_expr && !saw_comma {
150 PAREN_EXPR
151 } else {
152 TUPLE_EXPR
153 },
154 )
123} 155}
124 156
125// test array_expr 157// test array_expr
diff --git a/crates/ra_syntax/src/grammar/expressions/mod.rs b/crates/ra_syntax/src/grammar/expressions/mod.rs
index 20e0fa328..60c8602f9 100644
--- a/crates/ra_syntax/src/grammar/expressions/mod.rs
+++ b/crates/ra_syntax/src/grammar/expressions/mod.rs
@@ -1,23 +1,32 @@
1mod atom; 1mod atom;
2 2
3use super::*;
4pub(super) use self::atom::{literal, LITERAL_FIRST};
5pub(crate) use self::atom::match_arm_list; 3pub(crate) use self::atom::match_arm_list;
4pub(super) use self::atom::{literal, LITERAL_FIRST};
5use super::*;
6 6
7const EXPR_FIRST: TokenSet = LHS_FIRST; 7const EXPR_FIRST: TokenSet = LHS_FIRST;
8 8
9pub(super) fn expr(p: &mut Parser) -> BlockLike { 9pub(super) fn expr(p: &mut Parser) -> BlockLike {
10 let r = Restrictions { forbid_structs: false, prefer_stmt: false }; 10 let r = Restrictions {
11 forbid_structs: false,
12 prefer_stmt: false,
13 };
11 expr_bp(p, r, 1) 14 expr_bp(p, r, 1)
12} 15}
13 16
14pub(super) fn expr_stmt(p: &mut Parser) -> BlockLike { 17pub(super) fn expr_stmt(p: &mut Parser) -> BlockLike {
15 let r = Restrictions { forbid_structs: false, prefer_stmt: true }; 18 let r = Restrictions {
19 forbid_structs: false,
20 prefer_stmt: true,
21 };
16 expr_bp(p, r, 1) 22 expr_bp(p, r, 1)
17} 23}
18 24
19fn expr_no_struct(p: &mut Parser) { 25fn expr_no_struct(p: &mut Parser) {
20 let r = Restrictions { forbid_structs: true, prefer_stmt: false }; 26 let r = Restrictions {
27 forbid_structs: true,
28 prefer_stmt: false,
29 };
21 expr_bp(p, r, 1); 30 expr_bp(p, r, 1);
22} 31}
23 32
@@ -107,10 +116,8 @@ enum Op {
107fn current_op(p: &Parser) -> (u8, Op) { 116fn current_op(p: &Parser) -> (u8, Op) {
108 if let Some(t) = p.next3() { 117 if let Some(t) = p.next3() {
109 match t { 118 match t {
110 (L_ANGLE, L_ANGLE, EQ) => 119 (L_ANGLE, L_ANGLE, EQ) => return (1, Op::Composite(SHLEQ, 3)),
111 return (1, Op::Composite(SHLEQ, 3)), 120 (R_ANGLE, R_ANGLE, EQ) => return (1, Op::Composite(SHREQ, 3)),
112 (R_ANGLE, R_ANGLE, EQ) =>
113 return (1, Op::Composite(SHREQ, 3)),
114 _ => (), 121 _ => (),
115 } 122 }
116 } 123 }
@@ -201,11 +208,10 @@ fn is_block(kind: SyntaxKind) -> bool {
201 } 208 }
202} 209}
203 210
204const LHS_FIRST: TokenSet = 211const LHS_FIRST: TokenSet = token_set_union![
205 token_set_union![ 212 token_set![AMP, STAR, EXCL, DOTDOT, MINUS],
206 token_set![AMP, STAR, EXCL, DOTDOT, MINUS], 213 atom::ATOM_EXPR_FIRST,
207 atom::ATOM_EXPR_FIRST, 214];
208 ];
209 215
210fn lhs(p: &mut Parser, r: Restrictions) -> Option<CompletedMarker> { 216fn lhs(p: &mut Parser, r: Restrictions) -> Option<CompletedMarker> {
211 let m; 217 let m;
@@ -265,11 +271,13 @@ fn postfix_expr(p: &mut Parser, r: Restrictions, mut lhs: CompletedMarker) -> Co
265 // } 271 // }
266 L_PAREN if allow_calls => call_expr(p, lhs), 272 L_PAREN if allow_calls => call_expr(p, lhs),
267 L_BRACK if allow_calls => index_expr(p, lhs), 273 L_BRACK if allow_calls => index_expr(p, lhs),
268 DOT if p.nth(1) == IDENT => if p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON { 274 DOT if p.nth(1) == IDENT => {
269 method_call_expr(p, lhs) 275 if p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON {
270 } else { 276 method_call_expr(p, lhs)
271 field_expr(p, lhs) 277 } else {
272 }, 278 field_expr(p, lhs)
279 }
280 }
273 DOT if p.nth(1) == INT_NUMBER => field_expr(p, lhs), 281 DOT if p.nth(1) == INT_NUMBER => field_expr(p, lhs),
274 // test postfix_range 282 // test postfix_range
275 // fn foo() { let x = 1..; } 283 // fn foo() { let x = 1..; }
@@ -318,10 +326,7 @@ fn index_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
318// y.bar::<T>(1, 2,); 326// y.bar::<T>(1, 2,);
319// } 327// }
320fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { 328fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
321 assert!( 329 assert!(p.at(DOT) && p.nth(1) == IDENT && (p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON));
322 p.at(DOT) && p.nth(1) == IDENT
323 && (p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON)
324 );
325 let m = lhs.precede(p); 330 let m = lhs.precede(p);
326 p.bump(); 331 p.bump();
327 name_ref(p); 332 name_ref(p);
@@ -410,7 +415,7 @@ fn path_expr(p: &mut Parser, r: Restrictions) -> CompletedMarker {
410 items::macro_call_after_excl(p); 415 items::macro_call_after_excl(p);
411 m.complete(p, MACRO_CALL) 416 m.complete(p, MACRO_CALL)
412 } 417 }
413 _ => m.complete(p, PATH_EXPR) 418 _ => m.complete(p, PATH_EXPR),
414 } 419 }
415} 420}
416 421
diff --git a/crates/ra_syntax/src/grammar/items/mod.rs b/crates/ra_syntax/src/grammar/items/mod.rs
index 2567313ab..dc4742bce 100644
--- a/crates/ra_syntax/src/grammar/items/mod.rs
+++ b/crates/ra_syntax/src/grammar/items/mod.rs
@@ -1,16 +1,15 @@
1
2mod consts; 1mod consts;
3mod nominal; 2mod nominal;
4mod traits; 3mod traits;
5mod use_item; 4mod use_item;
6 5
7use super::*;
8pub(crate) use self::{ 6pub(crate) use self::{
9 expressions::{named_field_list, match_arm_list}, 7 expressions::{match_arm_list, named_field_list},
10 nominal::{enum_variant_list, named_field_def_list}, 8 nominal::{enum_variant_list, named_field_def_list},
11 traits::{trait_item_list, impl_item_list}, 9 traits::{impl_item_list, trait_item_list},
12 use_item::use_tree_list, 10 use_item::use_tree_list,
13}; 11};
12use super::*;
14 13
15// test mod_contents 14// test mod_contents
16// fn foo() {} 15// fn foo() {}
@@ -26,12 +25,14 @@ pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) {
26} 25}
27 26
28pub(super) enum ItemFlavor { 27pub(super) enum ItemFlavor {
29 Mod, Trait 28 Mod,
29 Trait,
30} 30}
31 31
32const ITEM_RECOVERY_SET: TokenSet = 32const ITEM_RECOVERY_SET: TokenSet = token_set![
33 token_set![FN_KW, STRUCT_KW, ENUM_KW, IMPL_KW, TRAIT_KW, CONST_KW, STATIC_KW, LET_KW, 33 FN_KW, STRUCT_KW, ENUM_KW, IMPL_KW, TRAIT_KW, CONST_KW, STATIC_KW, LET_KW, MOD_KW, PUB_KW,
34 MOD_KW, PUB_KW, CRATE_KW]; 34 CRATE_KW
35];
35 36
36pub(super) fn item_or_macro(p: &mut Parser, stop_on_r_curly: bool, flavor: ItemFlavor) { 37pub(super) fn item_or_macro(p: &mut Parser, stop_on_r_curly: bool, flavor: ItemFlavor) {
37 let m = p.start(); 38 let m = p.start();
@@ -153,10 +154,12 @@ pub(super) fn maybe_item(p: &mut Parser, flavor: ItemFlavor) -> MaybeItem {
153 traits::impl_item(p); 154 traits::impl_item(p);
154 IMPL_ITEM 155 IMPL_ITEM
155 } 156 }
156 _ => return if has_mods { 157 _ => {
157 MaybeItem::Modifiers 158 return if has_mods {
158 } else { 159 MaybeItem::Modifiers
159 MaybeItem::None 160 } else {
161 MaybeItem::None
162 }
160 } 163 }
161 }; 164 };
162 165
@@ -194,7 +197,7 @@ fn items_without_modifiers(p: &mut Parser) -> Option<SyntaxKind> {
194 if p.at(SEMI) { 197 if p.at(SEMI) {
195 p.err_and_bump( 198 p.err_and_bump(
196 "expected item, found `;`\n\ 199 "expected item, found `;`\n\
197 consider removing this semicolon" 200 consider removing this semicolon",
198 ); 201 );
199 } 202 }
200 STRUCT_DEF 203 STRUCT_DEF
@@ -227,7 +230,9 @@ fn items_without_modifiers(p: &mut Parser) -> Option<SyntaxKind> {
227 } 230 }
228 // test extern_block 231 // test extern_block
229 // extern {} 232 // extern {}
230 EXTERN_KW if la == L_CURLY || ((la == STRING || la == RAW_STRING) && p.nth(2) == L_CURLY) => { 233 EXTERN_KW
234 if la == L_CURLY || ((la == STRING || la == RAW_STRING) && p.nth(2) == L_CURLY) =>
235 {
231 abi(p); 236 abi(p);
232 extern_item_list(p); 237 extern_item_list(p);
233 EXTERN_BLOCK 238 EXTERN_BLOCK
@@ -267,10 +272,8 @@ fn fn_def(p: &mut Parser, flavor: ItemFlavor) {
267 272
268 if p.at(L_PAREN) { 273 if p.at(L_PAREN) {
269 match flavor { 274 match flavor {
270 ItemFlavor::Mod => 275 ItemFlavor::Mod => params::param_list(p),
271 params::param_list(p), 276 ItemFlavor::Trait => params::param_list_opt_patterns(p),
272 ItemFlavor::Trait =>
273 params::param_list_opt_patterns(p),
274 } 277 }
275 } else { 278 } else {
276 p.error("expected function arguments"); 279 p.error("expected function arguments");
@@ -361,7 +364,7 @@ pub(super) fn macro_call_after_excl(p: &mut Parser) -> BlockLike {
361 _ => { 364 _ => {
362 p.error("expected `{`, `[`, `(`"); 365 p.error("expected `{`, `[`, `(`");
363 BlockLike::NotBlock 366 BlockLike::NotBlock
364 }, 367 }
365 }; 368 };
366 369
367 flavor 370 flavor
@@ -385,9 +388,9 @@ pub(crate) fn token_tree(p: &mut Parser) {
385 return; 388 return;
386 } 389 }
387 R_PAREN | R_BRACK => p.err_and_bump("unmatched brace"), 390 R_PAREN | R_BRACK => p.err_and_bump("unmatched brace"),
388 _ => p.bump() 391 _ => p.bump(),
389 } 392 }
390 }; 393 }
391 p.expect(closing_paren_kind); 394 p.expect(closing_paren_kind);
392 m.complete(p, TOKEN_TREE); 395 m.complete(p, TOKEN_TREE);
393} 396}
diff --git a/crates/ra_syntax/src/grammar/items/traits.rs b/crates/ra_syntax/src/grammar/items/traits.rs
index 5dfdb470c..31258c253 100644
--- a/crates/ra_syntax/src/grammar/items/traits.rs
+++ b/crates/ra_syntax/src/grammar/items/traits.rs
@@ -128,4 +128,3 @@ pub(crate) fn impl_type(p: &mut Parser) {
128 } 128 }
129 types::type_(p); 129 types::type_(p);
130} 130}
131
diff --git a/crates/ra_syntax/src/grammar/mod.rs b/crates/ra_syntax/src/grammar/mod.rs
index 1199ba230..c87564073 100644
--- a/crates/ra_syntax/src/grammar/mod.rs
+++ b/crates/ra_syntax/src/grammar/mod.rs
@@ -31,28 +31,18 @@ mod type_args;
31mod type_params; 31mod type_params;
32mod types; 32mod types;
33 33
34use crate::{
35 token_set::TokenSet,
36 parser_api::{Marker, CompletedMarker, Parser},
37 SyntaxKind::{self, *},
38};
39pub(crate) use self::{ 34pub(crate) use self::{
40 expressions::{ 35 expressions::block,
41 block,
42 },
43 items::{ 36 items::{
44 enum_variant_list, 37 enum_variant_list, extern_item_list, impl_item_list, match_arm_list, mod_item_list,
45 extern_item_list, 38 named_field_def_list, named_field_list, token_tree, trait_item_list, use_tree_list,
46 impl_item_list,
47 match_arm_list,
48 mod_item_list,
49 named_field_def_list,
50 named_field_list,
51 token_tree,
52 trait_item_list,
53 use_tree_list,
54 }, 39 },
55}; 40};
41use crate::{
42 parser_api::{CompletedMarker, Marker, Parser},
43 token_set::TokenSet,
44 SyntaxKind::{self, *},
45};
56 46
57pub(crate) fn root(p: &mut Parser) { 47pub(crate) fn root(p: &mut Parser) {
58 let m = p.start(); 48 let m = p.start();
@@ -61,7 +51,6 @@ pub(crate) fn root(p: &mut Parser) {
61 m.complete(p, ROOT); 51 m.complete(p, ROOT);
62} 52}
63 53
64
65#[derive(Clone, Copy, PartialEq, Eq)] 54#[derive(Clone, Copy, PartialEq, Eq)]
66enum BlockLike { 55enum BlockLike {
67 Block, 56 Block,
@@ -69,7 +58,9 @@ enum BlockLike {
69} 58}
70 59
71impl BlockLike { 60impl BlockLike {
72 fn is_block(self) -> bool { self == BlockLike::Block } 61 fn is_block(self) -> bool {
62 self == BlockLike::Block
63 }
73} 64}
74 65
75fn opt_visibility(p: &mut Parser) { 66fn opt_visibility(p: &mut Parser) {
diff --git a/crates/ra_syntax/src/grammar/params.rs b/crates/ra_syntax/src/grammar/params.rs
index 903c25939..b71a72ca3 100644
--- a/crates/ra_syntax/src/grammar/params.rs
+++ b/crates/ra_syntax/src/grammar/params.rs
@@ -61,12 +61,8 @@ fn list_(p: &mut Parser, flavor: Flavor) {
61 m.complete(p, PARAM_LIST); 61 m.complete(p, PARAM_LIST);
62} 62}
63 63
64
65const VALUE_PARAMETER_FIRST: TokenSet = 64const VALUE_PARAMETER_FIRST: TokenSet =
66 token_set_union![ 65 token_set_union![patterns::PATTERN_FIRST, types::TYPE_FIRST,];
67 patterns::PATTERN_FIRST,
68 types::TYPE_FIRST,
69 ];
70 66
71fn value_parameter(p: &mut Parser, flavor: Flavor) { 67fn value_parameter(p: &mut Parser, flavor: Flavor) {
72 let m = p.start(); 68 let m = p.start();
@@ -76,7 +72,7 @@ fn value_parameter(p: &mut Parser, flavor: Flavor) {
76 if p.at(COLON) || flavor.type_required() { 72 if p.at(COLON) || flavor.type_required() {
77 types::ascription(p) 73 types::ascription(p)
78 } 74 }
79 }, 75 }
80 // test value_parameters_no_patterns 76 // test value_parameters_no_patterns
81 // type F = Box<Fn(a: i32, &b: &i32, &mut c: &i32, ())>; 77 // type F = Box<Fn(a: i32, &b: &i32, &mut c: &i32, ())>;
82 Flavor::OptionalPattern => { 78 Flavor::OptionalPattern => {
@@ -86,13 +82,14 @@ fn value_parameter(p: &mut Parser, flavor: Flavor) {
86 let la3 = p.nth(3); 82 let la3 = p.nth(3);
87 if la0 == IDENT && la1 == COLON 83 if la0 == IDENT && la1 == COLON
88 || la0 == AMP && la1 == IDENT && la2 == COLON 84 || la0 == AMP && la1 == IDENT && la2 == COLON
89 || la0 == AMP && la1 == MUT_KW && la2 == IDENT && la3 == COLON { 85 || la0 == AMP && la1 == MUT_KW && la2 == IDENT && la3 == COLON
86 {
90 patterns::pattern(p); 87 patterns::pattern(p);
91 types::ascription(p); 88 types::ascription(p);
92 } else { 89 } else {
93 types::type_(p); 90 types::type_(p);
94 } 91 }
95 }, 92 }
96 } 93 }
97 m.complete(p, PARAM); 94 m.complete(p, PARAM);
98} 95}
diff --git a/crates/ra_syntax/src/grammar/paths.rs b/crates/ra_syntax/src/grammar/paths.rs
index b6d44d53a..a35a339cc 100644
--- a/crates/ra_syntax/src/grammar/paths.rs
+++ b/crates/ra_syntax/src/grammar/paths.rs
@@ -97,7 +97,7 @@ fn opt_path_type_args(p: &mut Parser, mode: Mode) {
97 } else { 97 } else {
98 type_args::opt_type_arg_list(p, false) 98 type_args::opt_type_arg_list(p, false)
99 } 99 }
100 }, 100 }
101 Mode::Expr => type_args::opt_type_arg_list(p, true), 101 Mode::Expr => type_args::opt_type_arg_list(p, true),
102 } 102 }
103} 103}
diff --git a/crates/ra_syntax/src/grammar/patterns.rs b/crates/ra_syntax/src/grammar/patterns.rs
index 420bae7a7..9d35dbb3d 100644
--- a/crates/ra_syntax/src/grammar/patterns.rs
+++ b/crates/ra_syntax/src/grammar/patterns.rs
@@ -1,11 +1,10 @@
1use super::*; 1use super::*;
2 2
3pub(super) const PATTERN_FIRST: TokenSet = 3pub(super) const PATTERN_FIRST: TokenSet = token_set_union![
4 token_set_union![ 4 token_set![REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE],
5 token_set![REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE], 5 expressions::LITERAL_FIRST,
6 expressions::LITERAL_FIRST, 6 paths::PATH_FIRST,
7 paths::PATH_FIRST, 7];
8 ];
9 8
10pub(super) fn pattern(p: &mut Parser) { 9pub(super) fn pattern(p: &mut Parser) {
11 pattern_r(p, PAT_RECOVERY_SET) 10 pattern_r(p, PAT_RECOVERY_SET)
@@ -29,12 +28,13 @@ pub(super) fn pattern_r(p: &mut Parser, recovery_set: TokenSet) {
29const PAT_RECOVERY_SET: TokenSet = 28const PAT_RECOVERY_SET: TokenSet =
30 token_set![LET_KW, IF_KW, WHILE_KW, LOOP_KW, MATCH_KW, R_PAREN, COMMA]; 29 token_set![LET_KW, IF_KW, WHILE_KW, LOOP_KW, MATCH_KW, R_PAREN, COMMA];
31 30
32
33fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> { 31fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> {
34 let la0 = p.nth(0); 32 let la0 = p.nth(0);
35 let la1 = p.nth(1); 33 let la1 = p.nth(1);
36 if la0 == REF_KW || la0 == MUT_KW 34 if la0 == REF_KW
37 || (la0 == IDENT && !(la1 == COLONCOLON || la1 == L_PAREN || la1 == L_CURLY)) { 35 || la0 == MUT_KW
36 || (la0 == IDENT && !(la1 == COLONCOLON || la1 == L_PAREN || la1 == L_CURLY))
37 {
38 return Some(bind_pat(p, true)); 38 return Some(bind_pat(p, true));
39 } 39 }
40 if paths::is_path_start(p) { 40 if paths::is_path_start(p) {
@@ -87,7 +87,7 @@ fn path_pat(p: &mut Parser) -> CompletedMarker {
87 field_pat_list(p); 87 field_pat_list(p);
88 STRUCT_PAT 88 STRUCT_PAT
89 } 89 }
90 _ => PATH_PAT 90 _ => PATH_PAT,
91 }; 91 };
92 m.complete(p, kind) 92 m.complete(p, kind)
93} 93}
@@ -195,7 +195,7 @@ fn pat_list(p: &mut Parser, ket: SyntaxKind) {
195 break; 195 break;
196 } 196 }
197 pattern(p) 197 pattern(p)
198 }, 198 }
199 } 199 }
200 if !p.at(ket) { 200 if !p.at(ket) {
201 p.expect(COMMA); 201 p.expect(COMMA);
diff --git a/crates/ra_syntax/src/grammar/type_params.rs b/crates/ra_syntax/src/grammar/type_params.rs
index 79bc95ce4..79f5036b4 100644
--- a/crates/ra_syntax/src/grammar/type_params.rs
+++ b/crates/ra_syntax/src/grammar/type_params.rs
@@ -72,12 +72,8 @@ pub(super) fn bounds_without_colon(p: &mut Parser) {
72 p.eat(QUESTION); 72 p.eat(QUESTION);
73 match p.current() { 73 match p.current() {
74 LIFETIME => p.bump(), 74 LIFETIME => p.bump(),
75 FOR_KW => { 75 FOR_KW => types::for_type(p),
76 types::for_type(p) 76 _ if paths::is_path_start(p) => types::path_type(p),
77 }
78 _ if paths::is_path_start(p) => {
79 types::path_type(p)
80 }
81 _ => break, 77 _ => break,
82 } 78 }
83 if has_paren { 79 if has_paren {
@@ -104,7 +100,7 @@ pub(super) fn opt_where_clause(p: &mut Parser) {
104 p.bump(); 100 p.bump();
105 loop { 101 loop {
106 if !(paths::is_path_start(p) || p.current() == LIFETIME) { 102 if !(paths::is_path_start(p) || p.current() == LIFETIME) {
107 break 103 break;
108 } 104 }
109 where_predicate(p); 105 where_predicate(p);
110 if p.current() != L_CURLY && p.current() != SEMI { 106 if p.current() != L_CURLY && p.current() != SEMI {
@@ -130,7 +126,6 @@ fn where_predicate(p: &mut Parser) {
130 } else { 126 } else {
131 p.error("expected colon") 127 p.error("expected colon")
132 } 128 }
133
134 } 129 }
135 m.complete(p, WHERE_PRED); 130 m.complete(p, WHERE_PRED);
136} 131}
diff --git a/crates/ra_syntax/src/grammar/types.rs b/crates/ra_syntax/src/grammar/types.rs
index 27e5b086e..f308aef89 100644
--- a/crates/ra_syntax/src/grammar/types.rs
+++ b/crates/ra_syntax/src/grammar/types.rs
@@ -1,15 +1,14 @@
1use super::*; 1use super::*;
2 2
3pub(super) const TYPE_FIRST: TokenSet = 3pub(super) const TYPE_FIRST: TokenSet = token_set_union![
4 token_set_union![ 4 token_set![
5 token_set![ 5 L_PAREN, EXCL, STAR, L_BRACK, AMP, UNDERSCORE, FN_KW, UNSAFE_KW, EXTERN_KW, FOR_KW,
6 L_PAREN, EXCL, STAR, L_BRACK, AMP, UNDERSCORE, FN_KW, UNSAFE_KW, EXTERN_KW, FOR_KW, IMPL_KW, DYN_KW, L_ANGLE, 6 IMPL_KW, DYN_KW, L_ANGLE,
7 ], 7 ],
8 paths::PATH_FIRST, 8 paths::PATH_FIRST,
9 ]; 9];
10 10
11const TYPE_RECOVERY_SET: TokenSet = 11const TYPE_RECOVERY_SET: TokenSet = token_set![R_PAREN, COMMA];
12 token_set![R_PAREN, COMMA];
13 12
14pub(super) fn type_(p: &mut Parser) { 13pub(super) fn type_(p: &mut Parser) {
15 match p.current() { 14 match p.current() {
@@ -200,7 +199,6 @@ pub(super) fn for_type(p: &mut Parser) {
200 FN_KW | UNSAFE_KW | EXTERN_KW => fn_pointer_type(p), 199 FN_KW | UNSAFE_KW | EXTERN_KW => fn_pointer_type(p),
201 _ if paths::is_path_start(p) => path_type_(p, false), 200 _ if paths::is_path_start(p) => path_type_(p, false),
202 _ => p.error("expected a path"), 201 _ => p.error("expected a path"),
203
204 } 202 }
205 m.complete(p, FOR_TYPE); 203 m.complete(p, FOR_TYPE);
206} 204}
diff --git a/crates/ra_syntax/src/lexer/mod.rs b/crates/ra_syntax/src/lexer/mod.rs
index 9dc0b63d6..f388da273 100644
--- a/crates/ra_syntax/src/lexer/mod.rs
+++ b/crates/ra_syntax/src/lexer/mod.rs
@@ -58,12 +58,16 @@ fn next_token_inner(c: char, ptr: &mut Ptr) -> SyntaxKind {
58 } 58 }
59 59
60 match c { 60 match c {
61 '#' => if scan_shebang(ptr) { 61 '#' => {
62 return SHEBANG; 62 if scan_shebang(ptr) {
63 }, 63 return SHEBANG;
64 '/' => if let Some(kind) = scan_comment(ptr) { 64 }
65 return kind; 65 }
66 }, 66 '/' => {
67 if let Some(kind) = scan_comment(ptr) {
68 return kind;
69 }
70 }
67 _ => (), 71 _ => (),
68 } 72 }
69 73
diff --git a/crates/ra_syntax/src/lexer/ptr.rs b/crates/ra_syntax/src/lexer/ptr.rs
index c4708cb1c..fa79d8862 100644
--- a/crates/ra_syntax/src/lexer/ptr.rs
+++ b/crates/ra_syntax/src/lexer/ptr.rs
@@ -134,10 +134,10 @@ mod tests {
134 #[test] 134 #[test]
135 fn test_nth_is_p() { 135 fn test_nth_is_p() {
136 let ptr = Ptr::new("test"); 136 let ptr = Ptr::new("test");
137 assert!(ptr.nth_is_p(0,|c| c == 't')); 137 assert!(ptr.nth_is_p(0, |c| c == 't'));
138 assert!(!ptr.nth_is_p(1,|c| c == 't')); 138 assert!(!ptr.nth_is_p(1, |c| c == 't'));
139 assert!(ptr.nth_is_p(3,|c| c == 't')); 139 assert!(ptr.nth_is_p(3, |c| c == 't'));
140 assert!(!ptr.nth_is_p(150,|c| c == 't')); 140 assert!(!ptr.nth_is_p(150, |c| c == 't'));
141 } 141 }
142 142
143 #[test] 143 #[test]
diff --git a/crates/ra_syntax/src/lexer/strings.rs b/crates/ra_syntax/src/lexer/strings.rs
index bceacdcac..5090feae6 100644
--- a/crates/ra_syntax/src/lexer/strings.rs
+++ b/crates/ra_syntax/src/lexer/strings.rs
@@ -71,7 +71,7 @@ pub(crate) fn scan_string(ptr: &mut Ptr) {
71 } 71 }
72 _ => { 72 _ => {
73 ptr.bump(); 73 ptr.bump();
74 }, 74 }
75 } 75 }
76 } 76 }
77} 77}
@@ -90,7 +90,7 @@ pub(crate) fn scan_raw_string(ptr: &mut Ptr) {
90 while let Some(c) = ptr.bump() { 90 while let Some(c) = ptr.bump() {
91 if c == '"' { 91 if c == '"' {
92 let mut hashes_left = hashes; 92 let mut hashes_left = hashes;
93 while ptr.at('#') && hashes_left > 0{ 93 while ptr.at('#') && hashes_left > 0 {
94 hashes_left -= 1; 94 hashes_left -= 1;
95 ptr.bump(); 95 ptr.bump();
96 } 96 }
diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs
index 7eba5ee61..7a9718aad 100644
--- a/crates/ra_syntax/src/lib.rs
+++ b/crates/ra_syntax/src/lib.rs
@@ -20,11 +20,11 @@
20#![allow(missing_docs)] 20#![allow(missing_docs)]
21//#![warn(unreachable_pub)] // rust-lang/rust#47816 21//#![warn(unreachable_pub)] // rust-lang/rust#47816
22 22
23extern crate itertools;
24extern crate unicode_xid;
25extern crate drop_bomb; 23extern crate drop_bomb;
24extern crate itertools;
26extern crate parking_lot; 25extern crate parking_lot;
27extern crate rowan; 26extern crate rowan;
27extern crate unicode_xid;
28 28
29#[cfg(test)] 29#[cfg(test)]
30#[macro_use] 30#[macro_use]
@@ -35,33 +35,31 @@ pub mod ast;
35mod lexer; 35mod lexer;
36#[macro_use] 36#[macro_use]
37mod token_set; 37mod token_set;
38mod parser_api;
39mod grammar; 38mod grammar;
39mod parser_api;
40mod parser_impl; 40mod parser_impl;
41mod reparsing; 41mod reparsing;
42 42
43mod syntax_kinds; 43mod syntax_kinds;
44mod yellow; 44pub mod text_utils;
45/// Utilities for simple uses of the parser. 45/// Utilities for simple uses of the parser.
46pub mod utils; 46pub mod utils;
47pub mod text_utils; 47mod yellow;
48 48
49pub use crate::{ 49pub use crate::{
50 rowan::{SmolStr, TextRange, TextUnit},
51 ast::AstNode, 50 ast::AstNode,
52 lexer::{tokenize, Token}, 51 lexer::{tokenize, Token},
53 syntax_kinds::SyntaxKind,
54 yellow::{SyntaxNode, SyntaxNodeRef, OwnedRoot, RefRoot, TreeRoot, SyntaxError, Direction},
55 reparsing::AtomEdit, 52 reparsing::AtomEdit,
53 rowan::{SmolStr, TextRange, TextUnit},
54 syntax_kinds::SyntaxKind,
55 yellow::{Direction, OwnedRoot, RefRoot, SyntaxError, SyntaxNode, SyntaxNodeRef, TreeRoot},
56}; 56};
57 57
58use crate::{ 58use crate::yellow::GreenNode;
59 yellow::{GreenNode},
60};
61 59
62#[derive(Clone, Debug, Hash, PartialEq, Eq)] 60#[derive(Clone, Debug, Hash, PartialEq, Eq)]
63pub struct File { 61pub struct File {
64 root: SyntaxNode 62 root: SyntaxNode,
65} 63}
66 64
67impl File { 65impl File {
@@ -74,21 +72,21 @@ impl File {
74 } 72 }
75 pub fn parse(text: &str) -> File { 73 pub fn parse(text: &str) -> File {
76 let tokens = tokenize(&text); 74 let tokens = tokenize(&text);
77 let (green, errors) = parser_impl::parse_with( 75 let (green, errors) =
78 yellow::GreenBuilder::new(), 76 parser_impl::parse_with(yellow::GreenBuilder::new(), text, &tokens, grammar::root);
79 text, &tokens, grammar::root,
80 );
81 File::new(green, errors) 77 File::new(green, errors)
82 } 78 }
83 pub fn reparse(&self, edit: &AtomEdit) -> File { 79 pub fn reparse(&self, edit: &AtomEdit) -> File {
84 self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit)) 80 self.incremental_reparse(edit)
81 .unwrap_or_else(|| self.full_reparse(edit))
85 } 82 }
86 pub fn incremental_reparse(&self, edit: &AtomEdit) -> Option<File> { 83 pub fn incremental_reparse(&self, edit: &AtomEdit) -> Option<File> {
87 reparsing::incremental_reparse(self.syntax(), edit, self.errors()) 84 reparsing::incremental_reparse(self.syntax(), edit, self.errors())
88 .map(|(green_node, errors)| File::new(green_node, errors)) 85 .map(|(green_node, errors)| File::new(green_node, errors))
89 } 86 }
90 fn full_reparse(&self, edit: &AtomEdit) -> File { 87 fn full_reparse(&self, edit: &AtomEdit) -> File {
91 let text = text_utils::replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert); 88 let text =
89 text_utils::replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert);
92 File::parse(&text) 90 File::parse(&text)
93 } 91 }
94 pub fn ast(&self) -> ast::Root { 92 pub fn ast(&self) -> ast::Root {
diff --git a/crates/ra_syntax/src/parser_api.rs b/crates/ra_syntax/src/parser_api.rs
index cc23bb75e..42046d36f 100644
--- a/crates/ra_syntax/src/parser_api.rs
+++ b/crates/ra_syntax/src/parser_api.rs
@@ -1,8 +1,8 @@
1use crate::{ 1use crate::{
2 token_set::TokenSet, 2 drop_bomb::DropBomb,
3 parser_impl::ParserImpl, 3 parser_impl::ParserImpl,
4 token_set::TokenSet,
4 SyntaxKind::{self, ERROR}, 5 SyntaxKind::{self, ERROR},
5 drop_bomb::DropBomb,
6}; 6};
7 7
8/// `Parser` struct provides the low-level API for 8/// `Parser` struct provides the low-level API for
@@ -116,9 +116,7 @@ impl<'t> Parser<'t> {
116 116
117 /// Create an error node and consume the next token. 117 /// Create an error node and consume the next token.
118 pub(crate) fn err_recover(&mut self, message: &str, recovery: TokenSet) { 118 pub(crate) fn err_recover(&mut self, message: &str, recovery: TokenSet) {
119 if self.at(SyntaxKind::L_CURLY) 119 if self.at(SyntaxKind::L_CURLY) || self.at(SyntaxKind::R_CURLY) || self.at_ts(recovery) {
120 || self.at(SyntaxKind::R_CURLY)
121 || self.at_ts(recovery) {
122 self.error(message); 120 self.error(message);
123 } else { 121 } else {
124 let m = self.start(); 122 let m = self.start();
diff --git a/crates/ra_syntax/src/parser_impl/event.rs b/crates/ra_syntax/src/parser_impl/event.rs
index 928d2cc7a..79fa21389 100644
--- a/crates/ra_syntax/src/parser_impl/event.rs
+++ b/crates/ra_syntax/src/parser_impl/event.rs
@@ -7,14 +7,14 @@
7//! tree builder: the parser produces a stream of events like 7//! tree builder: the parser produces a stream of events like
8//! `start node`, `finish node`, and `FileBuilder` converts 8//! `start node`, `finish node`, and `FileBuilder` converts
9//! this stream to a real tree. 9//! this stream to a real tree.
10use std::mem;
11use crate::{ 10use crate::{
12 TextUnit, TextRange, SmolStr,
13 lexer::Token, 11 lexer::Token,
14 parser_impl::Sink, 12 parser_impl::Sink,
13 SmolStr,
15 SyntaxKind::{self, *}, 14 SyntaxKind::{self, *},
15 TextRange, TextUnit,
16}; 16};
17 17use std::mem;
18 18
19/// `Parser` produces a flat list of `Event`s. 19/// `Parser` produces a flat list of `Event`s.
20/// They are converted to a tree-structure in 20/// They are converted to a tree-structure in
@@ -89,20 +89,28 @@ pub(super) struct EventProcessor<'a, S: Sink> {
89} 89}
90 90
91impl<'a, S: Sink> EventProcessor<'a, S> { 91impl<'a, S: Sink> EventProcessor<'a, S> {
92 pub(super) fn new(sink: S, text: &'a str, tokens: &'a[Token], events: &'a mut [Event]) -> EventProcessor<'a, S> { 92 pub(super) fn new(
93 sink: S,
94 text: &'a str,
95 tokens: &'a [Token],
96 events: &'a mut [Event],
97 ) -> EventProcessor<'a, S> {
93 EventProcessor { 98 EventProcessor {
94 sink, 99 sink,
95 text_pos: 0.into(), 100 text_pos: 0.into(),
96 text, 101 text,
97 token_pos: 0, 102 token_pos: 0,
98 tokens, 103 tokens,
99 events 104 events,
100 } 105 }
101 } 106 }
102 107
103 pub(super) fn process(mut self) -> S { 108 pub(super) fn process(mut self) -> S {
104 fn tombstone() -> Event { 109 fn tombstone() -> Event {
105 Event::Start { kind: TOMBSTONE, forward_parent: None } 110 Event::Start {
111 kind: TOMBSTONE,
112 forward_parent: None,
113 }
106 } 114 }
107 let mut forward_parents = Vec::new(); 115 let mut forward_parents = Vec::new();
108 116
@@ -112,7 +120,10 @@ impl<'a, S: Sink> EventProcessor<'a, S> {
112 kind: TOMBSTONE, .. 120 kind: TOMBSTONE, ..
113 } => (), 121 } => (),
114 122
115 Event::Start { kind, forward_parent } => { 123 Event::Start {
124 kind,
125 forward_parent,
126 } => {
116 forward_parents.push(kind); 127 forward_parents.push(kind);
117 let mut idx = i; 128 let mut idx = i;
118 let mut fp = forward_parent; 129 let mut fp = forward_parent;
@@ -125,7 +136,7 @@ impl<'a, S: Sink> EventProcessor<'a, S> {
125 } => { 136 } => {
126 forward_parents.push(kind); 137 forward_parents.push(kind);
127 forward_parent 138 forward_parent
128 }, 139 }
129 _ => unreachable!(), 140 _ => unreachable!(),
130 }; 141 };
131 } 142 }
@@ -136,7 +147,7 @@ impl<'a, S: Sink> EventProcessor<'a, S> {
136 Event::Finish => { 147 Event::Finish => {
137 let last = i == self.events.len() - 1; 148 let last = i == self.events.len() - 1;
138 self.finish(last); 149 self.finish(last);
139 }, 150 }
140 Event::Token { kind, n_raw_tokens } => { 151 Event::Token { kind, n_raw_tokens } => {
141 self.eat_ws(); 152 self.eat_ws();
142 let n_raw_tokens = n_raw_tokens as usize; 153 let n_raw_tokens = n_raw_tokens as usize;
@@ -162,19 +173,16 @@ impl<'a, S: Sink> EventProcessor<'a, S> {
162 .take_while(|it| it.kind.is_trivia()) 173 .take_while(|it| it.kind.is_trivia())
163 .count(); 174 .count();
164 let leading_trivias = &self.tokens[self.token_pos..self.token_pos + n_trivias]; 175 let leading_trivias = &self.tokens[self.token_pos..self.token_pos + n_trivias];
165 let mut trivia_end = self.text_pos + leading_trivias 176 let mut trivia_end =
166 .iter() 177 self.text_pos + leading_trivias.iter().map(|it| it.len).sum::<TextUnit>();
167 .map(|it| it.len)
168 .sum::<TextUnit>();
169 178
170 let n_attached_trivias = { 179 let n_attached_trivias = {
171 let leading_trivias = leading_trivias.iter().rev() 180 let leading_trivias = leading_trivias.iter().rev().map(|it| {
172 .map(|it| { 181 let next_end = trivia_end - it.len;
173 let next_end = trivia_end - it.len; 182 let range = TextRange::from_to(next_end, trivia_end);
174 let range = TextRange::from_to(next_end, trivia_end); 183 trivia_end = next_end;
175 trivia_end = next_end; 184 (it.kind, &self.text[range])
176 (it.kind, &self.text[range]) 185 });
177 });
178 n_attached_trivias(kind, leading_trivias) 186 n_attached_trivias(kind, leading_trivias)
179 }; 187 };
180 self.eat_n_trivias(n_trivias - n_attached_trivias); 188 self.eat_n_trivias(n_trivias - n_attached_trivias);
@@ -215,7 +223,10 @@ impl<'a, S: Sink> EventProcessor<'a, S> {
215 } 223 }
216} 224}
217 225
218fn n_attached_trivias<'a>(kind: SyntaxKind, trivias: impl Iterator<Item=(SyntaxKind, &'a str)>) -> usize { 226fn n_attached_trivias<'a>(
227 kind: SyntaxKind,
228 trivias: impl Iterator<Item = (SyntaxKind, &'a str)>,
229) -> usize {
219 match kind { 230 match kind {
220 STRUCT_DEF | ENUM_DEF | FN_DEF | TRAIT_DEF | MODULE => { 231 STRUCT_DEF | ENUM_DEF | FN_DEF | TRAIT_DEF | MODULE => {
221 let mut res = 0; 232 let mut res = 0;
@@ -236,5 +247,4 @@ fn n_attached_trivias<'a>(kind: SyntaxKind, trivias: impl Iterator<Item=(SyntaxK
236 } 247 }
237 _ => 0, 248 _ => 0,
238 } 249 }
239
240} 250}
diff --git a/crates/ra_syntax/src/parser_impl/mod.rs b/crates/ra_syntax/src/parser_impl/mod.rs
index c2a6448e7..2b026d61e 100644
--- a/crates/ra_syntax/src/parser_impl/mod.rs
+++ b/crates/ra_syntax/src/parser_impl/mod.rs
@@ -4,13 +4,13 @@ mod input;
4use std::cell::Cell; 4use std::cell::Cell;
5 5
6use crate::{ 6use crate::{
7 TextUnit, SmolStr,
8 lexer::Token, 7 lexer::Token,
9 parser_api::Parser, 8 parser_api::Parser,
10 parser_impl::{ 9 parser_impl::{
11 event::{EventProcessor, Event}, 10 event::{Event, EventProcessor},
12 input::{InputPosition, ParserInput}, 11 input::{InputPosition, ParserInput},
13 }, 12 },
13 SmolStr, TextUnit,
14}; 14};
15 15
16use crate::SyntaxKind::{self, EOF, TOMBSTONE}; 16use crate::SyntaxKind::{self, EOF, TOMBSTONE};
@@ -86,7 +86,9 @@ impl<'t> ParserImpl<'t> {
86 let c2 = self.inp.kind(self.pos + 1); 86 let c2 = self.inp.kind(self.pos + 1);
87 let c3 = self.inp.kind(self.pos + 2); 87 let c3 = self.inp.kind(self.pos + 2);
88 if self.inp.start(self.pos + 1) == self.inp.start(self.pos) + self.inp.len(self.pos) 88 if self.inp.start(self.pos + 1) == self.inp.start(self.pos) + self.inp.len(self.pos)
89 && self.inp.start(self.pos + 2) == self.inp.start(self.pos + 1) + self.inp.len(self.pos + 1){ 89 && self.inp.start(self.pos + 2)
90 == self.inp.start(self.pos + 1) + self.inp.len(self.pos + 1)
91 {
90 Some((c1, c2, c3)) 92 Some((c1, c2, c3))
91 } else { 93 } else {
92 None 94 None
@@ -138,10 +140,7 @@ impl<'t> ParserImpl<'t> {
138 140
139 fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) { 141 fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) {
140 self.pos += u32::from(n_raw_tokens); 142 self.pos += u32::from(n_raw_tokens);
141 self.event(Event::Token { 143 self.event(Event::Token { kind, n_raw_tokens });
142 kind,
143 n_raw_tokens,
144 });
145 } 144 }
146 145
147 pub(super) fn error(&mut self, msg: String) { 146 pub(super) fn error(&mut self, msg: String) {
diff --git a/crates/ra_syntax/src/reparsing.rs b/crates/ra_syntax/src/reparsing.rs
index 16272fe88..a0014e016 100644
--- a/crates/ra_syntax/src/reparsing.rs
+++ b/crates/ra_syntax/src/reparsing.rs
@@ -1,14 +1,11 @@
1use crate::algo; 1use crate::algo;
2use crate::grammar; 2use crate::grammar;
3use crate::lexer::{tokenize, Token}; 3use crate::lexer::{tokenize, Token};
4use crate::yellow::{self, GreenNode, SyntaxNodeRef, SyntaxError};
5use crate::parser_impl;
6use crate::parser_api::Parser; 4use crate::parser_api::Parser;
7use crate::{ 5use crate::parser_impl;
8 TextUnit, TextRange,
9 SyntaxKind::*,
10};
11use crate::text_utils::replace_range; 6use crate::text_utils::replace_range;
7use crate::yellow::{self, GreenNode, SyntaxError, SyntaxNodeRef};
8use crate::{SyntaxKind::*, TextRange, TextUnit};
12 9
13#[derive(Debug, Clone)] 10#[derive(Debug, Clone)]
14pub struct AtomEdit { 11pub struct AtomEdit {
@@ -18,7 +15,10 @@ pub struct AtomEdit {
18 15
19impl AtomEdit { 16impl AtomEdit {
20 pub fn replace(range: TextRange, replace_with: String) -> AtomEdit { 17 pub fn replace(range: TextRange, replace_with: String) -> AtomEdit {
21 AtomEdit { delete: range, insert: replace_with } 18 AtomEdit {
19 delete: range,
20 insert: replace_with,
21 }
22 } 22 }
23 23
24 pub fn delete(range: TextRange) -> AtomEdit { 24 pub fn delete(range: TextRange) -> AtomEdit {
@@ -48,12 +48,7 @@ fn reparse_leaf<'node>(
48) -> Option<(SyntaxNodeRef<'node>, GreenNode, Vec<SyntaxError>)> { 48) -> Option<(SyntaxNodeRef<'node>, GreenNode, Vec<SyntaxError>)> {
49 let node = algo::find_covering_node(node, edit.delete); 49 let node = algo::find_covering_node(node, edit.delete);
50 match node.kind() { 50 match node.kind() {
51 | WHITESPACE 51 WHITESPACE | COMMENT | DOC_COMMENT | IDENT | STRING | RAW_STRING => {
52 | COMMENT
53 | DOC_COMMENT
54 | IDENT
55 | STRING
56 | RAW_STRING => {
57 let text = get_text_after_edit(node, &edit); 52 let text = get_text_after_edit(node, &edit);
58 let tokens = tokenize(&text); 53 let tokens = tokenize(&text);
59 let token = match tokens[..] { 54 let token = match tokens[..] {
@@ -84,10 +79,7 @@ fn reparse_block<'node>(
84 return None; 79 return None;
85 } 80 }
86 let (green, new_errors) = 81 let (green, new_errors) =
87 parser_impl::parse_with( 82 parser_impl::parse_with(yellow::GreenBuilder::new(), &text, &tokens, reparser);
88 yellow::GreenBuilder::new(),
89 &text, &tokens, reparser,
90 );
91 Some((node, green, new_errors)) 83 Some((node, green, new_errors))
92} 84}
93 85
@@ -101,9 +93,7 @@ fn get_text_after_edit(node: SyntaxNodeRef, edit: &AtomEdit) -> String {
101 93
102fn is_contextual_kw(text: &str) -> bool { 94fn is_contextual_kw(text: &str) -> bool {
103 match text { 95 match text {
104 | "auto" 96 "auto" | "default" | "union" => true,
105 | "default"
106 | "union" => true,
107 _ => false, 97 _ => false,
108 } 98 }
109} 99}
@@ -113,7 +103,8 @@ fn find_reparsable_node<'node>(
113 range: TextRange, 103 range: TextRange,
114) -> Option<(SyntaxNodeRef<'node>, fn(&mut Parser))> { 104) -> Option<(SyntaxNodeRef<'node>, fn(&mut Parser))> {
115 let node = algo::find_covering_node(node, range); 105 let node = algo::find_covering_node(node, range);
116 return node.ancestors() 106 return node
107 .ancestors()
117 .filter_map(|node| reparser(node).map(|r| (node, r))) 108 .filter_map(|node| reparser(node).map(|r| (node, r)))
118 .next(); 109 .next();
119 110
@@ -145,17 +136,20 @@ fn find_reparsable_node<'node>(
145fn is_balanced(tokens: &[Token]) -> bool { 136fn is_balanced(tokens: &[Token]) -> bool {
146 if tokens.len() == 0 137 if tokens.len() == 0
147 || tokens.first().unwrap().kind != L_CURLY 138 || tokens.first().unwrap().kind != L_CURLY
148 || tokens.last().unwrap().kind != R_CURLY { 139 || tokens.last().unwrap().kind != R_CURLY
140 {
149 return false; 141 return false;
150 } 142 }
151 let mut balance = 0usize; 143 let mut balance = 0usize;
152 for t in tokens.iter() { 144 for t in tokens.iter() {
153 match t.kind { 145 match t.kind {
154 L_CURLY => balance += 1, 146 L_CURLY => balance += 1,
155 R_CURLY => balance = match balance.checked_sub(1) { 147 R_CURLY => {
156 Some(b) => b, 148 balance = match balance.checked_sub(1) {
157 None => return false, 149 Some(b) => b,
158 }, 150 None => return false,
151 }
152 }
159 _ => (), 153 _ => (),
160 } 154 }
161 } 155 }
@@ -191,24 +185,14 @@ fn merge_errors(
191#[cfg(test)] 185#[cfg(test)]
192mod tests { 186mod tests {
193 use super::{ 187 use super::{
194 super::{ 188 super::{test_utils::extract_range, text_utils::replace_range, utils::dump_tree, File},
195 File, 189 reparse_block, reparse_leaf, AtomEdit, GreenNode, SyntaxError, SyntaxNodeRef,
196 test_utils::extract_range,
197 text_utils::replace_range,
198 utils::dump_tree,
199 },
200 reparse_leaf, reparse_block, AtomEdit, GreenNode, SyntaxError, SyntaxNodeRef,
201 }; 190 };
202 191
203 fn do_check<F>( 192 fn do_check<F>(before: &str, replace_with: &str, reparser: F)
204 before: &str, 193 where
205 replace_with: &str, 194 for<'a> F: Fn(SyntaxNodeRef<'a>, &AtomEdit)
206 reparser: F, 195 -> Option<(SyntaxNodeRef<'a>, GreenNode, Vec<SyntaxError>)>,
207 ) where
208 for<'a> F: Fn(
209 SyntaxNodeRef<'a>,
210 &AtomEdit,
211 ) -> Option<(SyntaxNodeRef<'a>, GreenNode, Vec<SyntaxError>)>
212 { 196 {
213 let (range, before) = extract_range(before); 197 let (range, before) = extract_range(before);
214 let after = replace_range(before.clone(), range, replace_with); 198 let after = replace_range(before.clone(), range, replace_with);
@@ -216,7 +200,10 @@ mod tests {
216 let fully_reparsed = File::parse(&after); 200 let fully_reparsed = File::parse(&after);
217 let incrementally_reparsed = { 201 let incrementally_reparsed = {
218 let f = File::parse(&before); 202 let f = File::parse(&before);
219 let edit = AtomEdit { delete: range, insert: replace_with.to_string() }; 203 let edit = AtomEdit {
204 delete: range,
205 insert: replace_with.to_string(),
206 };
220 let (node, green, new_errors) = 207 let (node, green, new_errors) =
221 reparser(f.syntax(), &edit).expect("cannot incrementally reparse"); 208 reparser(f.syntax(), &edit).expect("cannot incrementally reparse");
222 let green_root = node.replace_with(green); 209 let green_root = node.replace_with(green);
@@ -232,113 +219,183 @@ mod tests {
232 219
233 #[test] 220 #[test]
234 fn reparse_block_tests() { 221 fn reparse_block_tests() {
235 let do_check = |before, replace_to| 222 let do_check = |before, replace_to| do_check(before, replace_to, reparse_block);
236 do_check(before, replace_to, reparse_block);
237 223
238 do_check(r" 224 do_check(
225 r"
239fn foo() { 226fn foo() {
240 let x = foo + <|>bar<|> 227 let x = foo + <|>bar<|>
241} 228}
242", "baz"); 229",
243 do_check(r" 230 "baz",
231 );
232 do_check(
233 r"
244fn foo() { 234fn foo() {
245 let x = foo<|> + bar<|> 235 let x = foo<|> + bar<|>
246} 236}
247", "baz"); 237",
248 do_check(r" 238 "baz",
239 );
240 do_check(
241 r"
249struct Foo { 242struct Foo {
250 f: foo<|><|> 243 f: foo<|><|>
251} 244}
252", ",\n g: (),"); 245",
253 do_check(r" 246 ",\n g: (),",
247 );
248 do_check(
249 r"
254fn foo { 250fn foo {
255 let; 251 let;
256 1 + 1; 252 1 + 1;
257 <|>92<|>; 253 <|>92<|>;
258} 254}
259", "62"); 255",
260 do_check(r" 256 "62",
257 );
258 do_check(
259 r"
261mod foo { 260mod foo {
262 fn <|><|> 261 fn <|><|>
263} 262}
264", "bar"); 263",
265 do_check(r" 264 "bar",
265 );
266 do_check(
267 r"
266trait Foo { 268trait Foo {
267 type <|>Foo<|>; 269 type <|>Foo<|>;
268} 270}
269", "Output"); 271",
270 do_check(r" 272 "Output",
273 );
274 do_check(
275 r"
271impl IntoIterator<Item=i32> for Foo { 276impl IntoIterator<Item=i32> for Foo {
272 f<|><|> 277 f<|><|>
273} 278}
274", "n next("); 279",
275 do_check(r" 280 "n next(",
281 );
282 do_check(
283 r"
276use a::b::{foo,<|>,bar<|>}; 284use a::b::{foo,<|>,bar<|>};
277 ", "baz"); 285 ",
278 do_check(r" 286 "baz",
287 );
288 do_check(
289 r"
279pub enum A { 290pub enum A {
280 Foo<|><|> 291 Foo<|><|>
281} 292}
282", "\nBar;\n"); 293",
283 do_check(r" 294 "\nBar;\n",
295 );
296 do_check(
297 r"
284foo!{a, b<|><|> d} 298foo!{a, b<|><|> d}
285", ", c[3]"); 299",
286 do_check(r" 300 ", c[3]",
301 );
302 do_check(
303 r"
287fn foo() { 304fn foo() {
288 vec![<|><|>] 305 vec![<|><|>]
289} 306}
290", "123"); 307",
291 do_check(r" 308 "123",
309 );
310 do_check(
311 r"
292extern { 312extern {
293 fn<|>;<|> 313 fn<|>;<|>
294} 314}
295", " exit(code: c_int)"); 315",
316 " exit(code: c_int)",
317 );
296 } 318 }
297 319
298 #[test] 320 #[test]
299 fn reparse_leaf_tests() { 321 fn reparse_leaf_tests() {
300 let do_check = |before, replace_to| 322 let do_check = |before, replace_to| do_check(before, replace_to, reparse_leaf);
301 do_check(before, replace_to, reparse_leaf);
302 323
303 do_check(r"<|><|> 324 do_check(
325 r"<|><|>
304fn foo() -> i32 { 1 } 326fn foo() -> i32 { 1 }
305", "\n\n\n \n"); 327",
306 do_check(r" 328 "\n\n\n \n",
329 );
330 do_check(
331 r"
307fn foo() -> <|><|> {} 332fn foo() -> <|><|> {}
308", " \n"); 333",
309 do_check(r" 334 " \n",
335 );
336 do_check(
337 r"
310fn <|>foo<|>() -> i32 { 1 } 338fn <|>foo<|>() -> i32 { 1 }
311", "bar"); 339",
312 do_check(r" 340 "bar",
341 );
342 do_check(
343 r"
313fn foo<|><|>foo() { } 344fn foo<|><|>foo() { }
314", "bar"); 345",
315 do_check(r" 346 "bar",
347 );
348 do_check(
349 r"
316fn foo /* <|><|> */ () {} 350fn foo /* <|><|> */ () {}
317", "some comment"); 351",
318 do_check(r" 352 "some comment",
353 );
354 do_check(
355 r"
319fn baz <|><|> () {} 356fn baz <|><|> () {}
320", " \t\t\n\n"); 357",
321 do_check(r" 358 " \t\t\n\n",
359 );
360 do_check(
361 r"
322fn baz <|><|> () {} 362fn baz <|><|> () {}
323", " \t\t\n\n"); 363",
324 do_check(r" 364 " \t\t\n\n",
365 );
366 do_check(
367 r"
325/// foo <|><|>omment 368/// foo <|><|>omment
326mod { } 369mod { }
327", "c"); 370",
328 do_check(r#" 371 "c",
372 );
373 do_check(
374 r#"
329fn -> &str { "Hello<|><|>" } 375fn -> &str { "Hello<|><|>" }
330"#, ", world"); 376"#,
331 do_check(r#" 377 ", world",
378 );
379 do_check(
380 r#"
332fn -> &str { // "Hello<|><|>" 381fn -> &str { // "Hello<|><|>"
333"#, ", world"); 382"#,
334 do_check(r##" 383 ", world",
384 );
385 do_check(
386 r##"
335fn -> &str { r#"Hello<|><|>"# 387fn -> &str { r#"Hello<|><|>"#
336"##, ", world"); 388"##,
337 do_check(r" 389 ", world",
390 );
391 do_check(
392 r"
338#[derive(<|>Copy<|>)] 393#[derive(<|>Copy<|>)]
339enum Foo { 394enum Foo {
340 395
341} 396}
342", "Clone"); 397",
398 "Clone",
399 );
343 } 400 }
344} 401}
diff --git a/crates/ra_syntax/src/syntax_kinds/mod.rs b/crates/ra_syntax/src/syntax_kinds/mod.rs
index 3041e5633..0fcd07cbf 100644
--- a/crates/ra_syntax/src/syntax_kinds/mod.rs
+++ b/crates/ra_syntax/src/syntax_kinds/mod.rs
@@ -1,7 +1,7 @@
1mod generated; 1mod generated;
2 2
3use std::fmt;
4use crate::SyntaxKind::*; 3use crate::SyntaxKind::*;
4use std::fmt;
5 5
6pub use self::generated::SyntaxKind; 6pub use self::generated::SyntaxKind;
7 7
diff --git a/crates/ra_syntax/src/text_utils.rs b/crates/ra_syntax/src/text_utils.rs
index adf26ef30..abda5ec39 100644
--- a/crates/ra_syntax/src/text_utils.rs
+++ b/crates/ra_syntax/src/text_utils.rs
@@ -23,4 +23,4 @@ pub fn replace_range(mut text: String, range: TextRange, replace_with: &str) ->
23 let end = u32::from(range.end()) as usize; 23 let end = u32::from(range.end()) as usize;
24 text.replace_range(start..end, replace_with); 24 text.replace_range(start..end, replace_with);
25 text 25 text
26} \ No newline at end of file 26}
diff --git a/crates/ra_syntax/src/utils.rs b/crates/ra_syntax/src/utils.rs
index df1f4b372..27248ff32 100644
--- a/crates/ra_syntax/src/utils.rs
+++ b/crates/ra_syntax/src/utils.rs
@@ -1,8 +1,8 @@
1use std::fmt::Write;
2use crate::{ 1use crate::{
3 algo::walk::{walk, WalkEvent}, 2 algo::walk::{walk, WalkEvent},
4 SyntaxKind, File, SyntaxNodeRef 3 File, SyntaxKind, SyntaxNodeRef,
5}; 4};
5use std::fmt::Write;
6 6
7/// Parse a file and create a string representation of the resulting parse tree. 7/// Parse a file and create a string representation of the resulting parse tree.
8pub fn dump_tree(syntax: SyntaxNodeRef) -> String { 8pub fn dump_tree(syntax: SyntaxNodeRef) -> String {
@@ -58,9 +58,7 @@ pub(crate) fn validate_block_structure(root: SyntaxNodeRef) {
58 let mut stack = Vec::new(); 58 let mut stack = Vec::new();
59 for node in root.descendants() { 59 for node in root.descendants() {
60 match node.kind() { 60 match node.kind() {
61 SyntaxKind::L_CURLY => { 61 SyntaxKind::L_CURLY => stack.push(node),
62 stack.push(node)
63 }
64 SyntaxKind::R_CURLY => { 62 SyntaxKind::R_CURLY => {
65 if let Some(pair) = stack.pop() { 63 if let Some(pair) = stack.pop() {
66 assert_eq!( 64 assert_eq!(
diff --git a/crates/ra_syntax/src/yellow/builder.rs b/crates/ra_syntax/src/yellow/builder.rs
index 67a1a382b..d64053409 100644
--- a/crates/ra_syntax/src/yellow/builder.rs
+++ b/crates/ra_syntax/src/yellow/builder.rs
@@ -1,10 +1,9 @@
1use rowan::GreenNodeBuilder;
2use crate::{ 1use crate::{
3 TextUnit, SmolStr,
4 parser_impl::Sink, 2 parser_impl::Sink,
5 yellow::{GreenNode, SyntaxError, RaTypes}, 3 yellow::{GreenNode, RaTypes, SyntaxError},
6 SyntaxKind, 4 SmolStr, SyntaxKind, TextUnit,
7}; 5};
6use rowan::GreenNodeBuilder;
8 7
9pub(crate) struct GreenBuilder { 8pub(crate) struct GreenBuilder {
10 errors: Vec<SyntaxError>, 9 errors: Vec<SyntaxError>,
@@ -36,7 +35,10 @@ impl Sink for GreenBuilder {
36 } 35 }
37 36
38 fn error(&mut self, message: String, offset: TextUnit) { 37 fn error(&mut self, message: String, offset: TextUnit) {
39 let error = SyntaxError { msg: message, offset }; 38 let error = SyntaxError {
39 msg: message,
40 offset,
41 };
40 self.errors.push(error) 42 self.errors.push(error)
41 } 43 }
42 44
diff --git a/crates/ra_syntax/src/yellow/mod.rs b/crates/ra_syntax/src/yellow/mod.rs
index ab9bca0f0..b5c9da813 100644
--- a/crates/ra_syntax/src/yellow/mod.rs
+++ b/crates/ra_syntax/src/yellow/mod.rs
@@ -1,16 +1,16 @@
1mod builder; 1mod builder;
2mod syntax_text; 2mod syntax_text;
3 3
4use self::syntax_text::SyntaxText;
5use crate::{SmolStr, SyntaxKind, TextRange, TextUnit};
6use rowan::Types;
4use std::{ 7use std::{
5 fmt, 8 fmt,
6 hash::{Hash, Hasher}, 9 hash::{Hash, Hasher},
7}; 10};
8use rowan::Types;
9use crate::{SyntaxKind, TextUnit, TextRange, SmolStr};
10use self::syntax_text::SyntaxText;
11 11
12pub use rowan::{TreeRoot};
13pub(crate) use self::builder::GreenBuilder; 12pub(crate) use self::builder::GreenBuilder;
13pub use rowan::TreeRoot;
14 14
15#[derive(Debug, Clone, Copy)] 15#[derive(Debug, Clone, Copy)]
16pub enum RaTypes {} 16pub enum RaTypes {}
@@ -31,9 +31,7 @@ pub struct SyntaxError {
31} 31}
32 32
33#[derive(Clone, Copy)] 33#[derive(Clone, Copy)]
34pub struct SyntaxNode<R: TreeRoot<RaTypes> = OwnedRoot>( 34pub struct SyntaxNode<R: TreeRoot<RaTypes> = OwnedRoot>(::rowan::SyntaxNode<RaTypes, R>);
35 ::rowan::SyntaxNode<RaTypes, R>,
36);
37pub type SyntaxNodeRef<'a> = SyntaxNode<RefRoot<'a>>; 35pub type SyntaxNodeRef<'a> = SyntaxNode<RefRoot<'a>>;
38 36
39impl<R1, R2> PartialEq<SyntaxNode<R1>> for SyntaxNode<R2> 37impl<R1, R2> PartialEq<SyntaxNode<R1>> for SyntaxNode<R2>
@@ -69,16 +67,16 @@ impl<'a> SyntaxNodeRef<'a> {
69 pub fn leaf_text(self) -> Option<&'a SmolStr> { 67 pub fn leaf_text(self) -> Option<&'a SmolStr> {
70 self.0.leaf_text() 68 self.0.leaf_text()
71 } 69 }
72 pub fn ancestors(self) -> impl Iterator<Item=SyntaxNodeRef<'a>> { 70 pub fn ancestors(self) -> impl Iterator<Item = SyntaxNodeRef<'a>> {
73 crate::algo::generate(Some(self), |&node| node.parent()) 71 crate::algo::generate(Some(self), |&node| node.parent())
74 } 72 }
75 pub fn descendants(self) -> impl Iterator<Item=SyntaxNodeRef<'a>> { 73 pub fn descendants(self) -> impl Iterator<Item = SyntaxNodeRef<'a>> {
76 crate::algo::walk::walk(self).filter_map(|event| match event { 74 crate::algo::walk::walk(self).filter_map(|event| match event {
77 crate::algo::walk::WalkEvent::Enter(node) => Some(node), 75 crate::algo::walk::WalkEvent::Enter(node) => Some(node),
78 crate::algo::walk::WalkEvent::Exit(_) => None, 76 crate::algo::walk::WalkEvent::Exit(_) => None,
79 }) 77 })
80 } 78 }
81 pub fn siblings(self, direction: Direction) -> impl Iterator<Item=SyntaxNodeRef<'a>> { 79 pub fn siblings(self, direction: Direction) -> impl Iterator<Item = SyntaxNodeRef<'a>> {
82 crate::algo::generate(Some(self), move |&node| match direction { 80 crate::algo::generate(Some(self), move |&node| match direction {
83 Direction::Next => node.next_sibling(), 81 Direction::Next => node.next_sibling(),
84 Direction::Prev => node.prev_sibling(), 82 Direction::Prev => node.prev_sibling(),
@@ -142,9 +140,7 @@ impl<R: TreeRoot<RaTypes>> fmt::Debug for SyntaxNode<R> {
142} 140}
143 141
144#[derive(Debug)] 142#[derive(Debug)]
145pub struct SyntaxNodeChildren<R: TreeRoot<RaTypes>>( 143pub struct SyntaxNodeChildren<R: TreeRoot<RaTypes>>(::rowan::SyntaxNodeChildren<RaTypes, R>);
146 ::rowan::SyntaxNodeChildren<RaTypes, R>
147);
148 144
149impl<R: TreeRoot<RaTypes>> Iterator for SyntaxNodeChildren<R> { 145impl<R: TreeRoot<RaTypes>> Iterator for SyntaxNodeChildren<R> {
150 type Item = SyntaxNode<R>; 146 type Item = SyntaxNode<R>;
@@ -154,7 +150,6 @@ impl<R: TreeRoot<RaTypes>> Iterator for SyntaxNodeChildren<R> {
154 } 150 }
155} 151}
156 152
157
158fn has_short_text(kind: SyntaxKind) -> bool { 153fn has_short_text(kind: SyntaxKind) -> bool {
159 use crate::SyntaxKind::*; 154 use crate::SyntaxKind::*;
160 match kind { 155 match kind {
diff --git a/crates/ra_syntax/src/yellow/syntax_text.rs b/crates/ra_syntax/src/yellow/syntax_text.rs
index ae33b993d..5395ca90b 100644
--- a/crates/ra_syntax/src/yellow/syntax_text.rs
+++ b/crates/ra_syntax/src/yellow/syntax_text.rs
@@ -1,10 +1,8 @@
1use std::{ 1use std::{fmt, ops};
2 fmt, ops,
3};
4 2
5use crate::{ 3use crate::{
4 text_utils::{contains_offset_nonstrict, intersect},
6 SyntaxNodeRef, TextRange, TextUnit, 5 SyntaxNodeRef, TextRange, TextUnit,
7 text_utils::{intersect, contains_offset_nonstrict},
8}; 6};
9 7
10#[derive(Clone)] 8#[derive(Clone)]
@@ -17,19 +15,17 @@ impl<'a> SyntaxText<'a> {
17 pub(crate) fn new(node: SyntaxNodeRef<'a>) -> SyntaxText<'a> { 15 pub(crate) fn new(node: SyntaxNodeRef<'a>) -> SyntaxText<'a> {
18 SyntaxText { 16 SyntaxText {
19 node, 17 node,
20 range: node.range() 18 range: node.range(),
21 } 19 }
22 } 20 }
23 pub fn chunks(&self) -> impl Iterator<Item=&'a str> { 21 pub fn chunks(&self) -> impl Iterator<Item = &'a str> {
24 let range = self.range; 22 let range = self.range;
25 self.node 23 self.node.descendants().filter_map(move |node| {
26 .descendants() 24 let text = node.leaf_text()?;
27 .filter_map(move |node| { 25 let range = intersect(range, node.range())?;
28 let text = node.leaf_text()?; 26 let range = range - node.range().start();
29 let range = intersect(range, node.range())?; 27 Some(&text[range])
30 let range = range - node.range().start(); 28 })
31 Some(&text[range])
32 })
33 } 29 }
34 pub fn push_to(&self, buf: &mut String) { 30 pub fn push_to(&self, buf: &mut String) {
35 self.chunks().for_each(|it| buf.push_str(it)); 31 self.chunks().for_each(|it| buf.push_str(it));
@@ -55,11 +51,13 @@ impl<'a> SyntaxText<'a> {
55 self.range.len() 51 self.range.len()
56 } 52 }
57 pub fn slice(&self, range: impl SyntaxTextSlice) -> SyntaxText<'a> { 53 pub fn slice(&self, range: impl SyntaxTextSlice) -> SyntaxText<'a> {
58 let range = range.restrict(self.range) 54 let range = range.restrict(self.range).unwrap_or_else(|| {
59 .unwrap_or_else(|| { 55 panic!("invalid slice, range: {:?}, slice: {:?}", self.range, range)
60 panic!("invalid slice, range: {:?}, slice: {:?}", self.range, range) 56 });
61 }); 57 SyntaxText {
62 SyntaxText { node: self.node, range } 58 node: self.node,
59 range,
60 }
63 } 61 }
64 pub fn char_at(&self, offset: TextUnit) -> Option<char> { 62 pub fn char_at(&self, offset: TextUnit) -> Option<char> {
65 let mut start: TextUnit = 0.into(); 63 let mut start: TextUnit = 0.into();
diff --git a/crates/ra_syntax/tests/test.rs b/crates/ra_syntax/tests/test.rs
index 27380efef..9d1ded093 100644
--- a/crates/ra_syntax/tests/test.rs
+++ b/crates/ra_syntax/tests/test.rs
@@ -4,14 +4,14 @@ extern crate test_utils;
4extern crate walkdir; 4extern crate walkdir;
5 5
6use std::{ 6use std::{
7 fmt::Write,
7 fs, 8 fs,
8 path::{Path, PathBuf}, 9 path::{Path, PathBuf},
9 fmt::Write,
10}; 10};
11 11
12use ra_syntax::{ 12use ra_syntax::{
13 utils::{check_fuzz_invariants, dump_tree},
13 File, 14 File,
14 utils::{dump_tree, check_fuzz_invariants},
15}; 15};
16 16
17#[test] 17#[test]
@@ -37,7 +37,6 @@ fn parser_fuzz_tests() {
37 } 37 }
38} 38}
39 39
40
41/// Read file and normalize newlines. 40/// Read file and normalize newlines.
42/// 41///
43/// `rustc` seems to always normalize `\r\n` newlines to `\n`: 42/// `rustc` seems to always normalize `\r\n` newlines to `\n`:
@@ -54,8 +53,8 @@ fn read_text(path: &Path) -> String {
54} 53}
55 54
56pub fn dir_tests<F>(paths: &[&str], f: F) 55pub fn dir_tests<F>(paths: &[&str], f: F)
57 where 56where
58 F: Fn(&str) -> String, 57 F: Fn(&str) -> String,
59{ 58{
60 for (path, input_code) in collect_tests(paths) { 59 for (path, input_code) in collect_tests(paths) {
61 let parse_tree = f(&input_code); 60 let parse_tree = f(&input_code);