diff options
Diffstat (limited to 'crates/ra_syntax')
26 files changed, 95 insertions, 314 deletions
diff --git a/crates/ra_syntax/src/algo/visit.rs b/crates/ra_syntax/src/algo/visit.rs index 38f21594c..81a99228f 100644 --- a/crates/ra_syntax/src/algo/visit.rs +++ b/crates/ra_syntax/src/algo/visit.rs | |||
@@ -7,10 +7,7 @@ pub fn visitor<'a, T>() -> impl Visitor<'a, Output = T> { | |||
7 | } | 7 | } |
8 | 8 | ||
9 | pub fn visitor_ctx<'a, T, C>(ctx: C) -> impl VisitorCtx<'a, Output = T, Ctx = C> { | 9 | pub fn visitor_ctx<'a, T, C>(ctx: C) -> impl VisitorCtx<'a, Output = T, Ctx = C> { |
10 | EmptyVisitorCtx { | 10 | EmptyVisitorCtx { ph: PhantomData, ctx } |
11 | ph: PhantomData, | ||
12 | ctx, | ||
13 | } | ||
14 | } | 11 | } |
15 | 12 | ||
16 | pub trait Visitor<'a>: Sized { | 13 | pub trait Visitor<'a>: Sized { |
@@ -21,11 +18,7 @@ pub trait Visitor<'a>: Sized { | |||
21 | N: AstNode + 'a, | 18 | N: AstNode + 'a, |
22 | F: FnOnce(&'a N) -> Self::Output, | 19 | F: FnOnce(&'a N) -> Self::Output, |
23 | { | 20 | { |
24 | Vis { | 21 | Vis { inner: self, f, ph: PhantomData } |
25 | inner: self, | ||
26 | f, | ||
27 | ph: PhantomData, | ||
28 | } | ||
29 | } | 22 | } |
30 | } | 23 | } |
31 | 24 | ||
@@ -38,11 +31,7 @@ pub trait VisitorCtx<'a>: Sized { | |||
38 | N: AstNode + 'a, | 31 | N: AstNode + 'a, |
39 | F: FnOnce(&'a N, Self::Ctx) -> Self::Output, | 32 | F: FnOnce(&'a N, Self::Ctx) -> Self::Output, |
40 | { | 33 | { |
41 | VisCtx { | 34 | VisCtx { inner: self, f, ph: PhantomData } |
42 | inner: self, | ||
43 | f, | ||
44 | ph: PhantomData, | ||
45 | } | ||
46 | } | 35 | } |
47 | } | 36 | } |
48 | 37 | ||
diff --git a/crates/ra_syntax/src/ast.rs b/crates/ra_syntax/src/ast.rs index d6237532b..cf5cfecc2 100644 --- a/crates/ra_syntax/src/ast.rs +++ b/crates/ra_syntax/src/ast.rs | |||
@@ -127,16 +127,12 @@ pub trait DocCommentsOwner: AstNode { | |||
127 | let line = comment.text().as_str(); | 127 | let line = comment.text().as_str(); |
128 | 128 | ||
129 | // Determine if the prefix or prefix + 1 char is stripped | 129 | // Determine if the prefix or prefix + 1 char is stripped |
130 | let pos = if line | 130 | let pos = |
131 | .chars() | 131 | if line.chars().nth(prefix_len).map(|c| c.is_whitespace()).unwrap_or(false) { |
132 | .nth(prefix_len) | 132 | prefix_len + 1 |
133 | .map(|c| c.is_whitespace()) | 133 | } else { |
134 | .unwrap_or(false) | 134 | prefix_len |
135 | { | 135 | }; |
136 | prefix_len + 1 | ||
137 | } else { | ||
138 | prefix_len | ||
139 | }; | ||
140 | 136 | ||
141 | line[pos..].to_owned() | 137 | line[pos..].to_owned() |
142 | }) | 138 | }) |
@@ -357,10 +353,7 @@ pub enum PathSegmentKind<'a> { | |||
357 | 353 | ||
358 | impl PathSegment { | 354 | impl PathSegment { |
359 | pub fn parent_path(&self) -> &Path { | 355 | pub fn parent_path(&self) -> &Path { |
360 | self.syntax() | 356 | self.syntax().parent().and_then(Path::cast).expect("segments are always nested in paths") |
361 | .parent() | ||
362 | .and_then(Path::cast) | ||
363 | .expect("segments are always nested in paths") | ||
364 | } | 357 | } |
365 | 358 | ||
366 | pub fn kind(&self) -> Option<PathSegmentKind> { | 359 | pub fn kind(&self) -> Option<PathSegmentKind> { |
@@ -428,10 +421,7 @@ pub struct AstChildren<'a, N> { | |||
428 | 421 | ||
429 | impl<'a, N> AstChildren<'a, N> { | 422 | impl<'a, N> AstChildren<'a, N> { |
430 | fn new(parent: &'a SyntaxNode) -> Self { | 423 | fn new(parent: &'a SyntaxNode) -> Self { |
431 | AstChildren { | 424 | AstChildren { inner: parent.children(), ph: PhantomData } |
432 | inner: parent.children(), | ||
433 | ph: PhantomData, | ||
434 | } | ||
435 | } | 425 | } |
436 | } | 426 | } |
437 | 427 | ||
@@ -658,11 +648,7 @@ impl SelfParam { | |||
658 | let borrowed = self.syntax().children().any(|n| n.kind() == AMP); | 648 | let borrowed = self.syntax().children().any(|n| n.kind() == AMP); |
659 | if borrowed { | 649 | if borrowed { |
660 | // check for a `mut` coming after the & -- `mut &self` != `&mut self` | 650 | // check for a `mut` coming after the & -- `mut &self` != `&mut self` |
661 | if self | 651 | if self.syntax().children().skip_while(|n| n.kind() != AMP).any(|n| n.kind() == MUT_KW) |
662 | .syntax() | ||
663 | .children() | ||
664 | .skip_while(|n| n.kind() != AMP) | ||
665 | .any(|n| n.kind() == MUT_KW) | ||
666 | { | 652 | { |
667 | SelfParamFlavor::MutRef | 653 | SelfParamFlavor::MutRef |
668 | } else { | 654 | } else { |
@@ -769,8 +755,5 @@ fn test_doc_comment_preserves_indents() { | |||
769 | "#, | 755 | "#, |
770 | ); | 756 | ); |
771 | let module = file.syntax().descendants().find_map(Module::cast).unwrap(); | 757 | let module = file.syntax().descendants().find_map(Module::cast).unwrap(); |
772 | assert_eq!( | 758 | assert_eq!("doc1\n```\nfn foo() {\n // ...\n}\n```", module.doc_comment_text().unwrap()); |
773 | "doc1\n```\nfn foo() {\n // ...\n}\n```", | ||
774 | module.doc_comment_text().unwrap() | ||
775 | ); | ||
776 | } | 759 | } |
diff --git a/crates/ra_syntax/src/grammar/expressions.rs b/crates/ra_syntax/src/grammar/expressions.rs index 6b88c5685..28fcb1f7d 100644 --- a/crates/ra_syntax/src/grammar/expressions.rs +++ b/crates/ra_syntax/src/grammar/expressions.rs | |||
@@ -7,26 +7,17 @@ use super::*; | |||
7 | const EXPR_FIRST: TokenSet = LHS_FIRST; | 7 | const EXPR_FIRST: TokenSet = LHS_FIRST; |
8 | 8 | ||
9 | pub(super) fn expr(p: &mut Parser) -> BlockLike { | 9 | pub(super) fn expr(p: &mut Parser) -> BlockLike { |
10 | let r = Restrictions { | 10 | let r = Restrictions { forbid_structs: false, prefer_stmt: false }; |
11 | forbid_structs: false, | ||
12 | prefer_stmt: false, | ||
13 | }; | ||
14 | expr_bp(p, r, 1) | 11 | expr_bp(p, r, 1) |
15 | } | 12 | } |
16 | 13 | ||
17 | pub(super) fn expr_stmt(p: &mut Parser) -> BlockLike { | 14 | pub(super) fn expr_stmt(p: &mut Parser) -> BlockLike { |
18 | let r = Restrictions { | 15 | let r = Restrictions { forbid_structs: false, prefer_stmt: true }; |
19 | forbid_structs: false, | ||
20 | prefer_stmt: true, | ||
21 | }; | ||
22 | expr_bp(p, r, 1) | 16 | expr_bp(p, r, 1) |
23 | } | 17 | } |
24 | 18 | ||
25 | fn expr_no_struct(p: &mut Parser) { | 19 | fn expr_no_struct(p: &mut Parser) { |
26 | let r = Restrictions { | 20 | let r = Restrictions { forbid_structs: true, prefer_stmt: false }; |
27 | forbid_structs: true, | ||
28 | prefer_stmt: false, | ||
29 | }; | ||
30 | expr_bp(p, r, 1); | 21 | expr_bp(p, r, 1); |
31 | } | 22 | } |
32 | 23 | ||
diff --git a/crates/ra_syntax/src/grammar/expressions/atom.rs b/crates/ra_syntax/src/grammar/expressions/atom.rs index 600774afd..27ba87657 100644 --- a/crates/ra_syntax/src/grammar/expressions/atom.rs +++ b/crates/ra_syntax/src/grammar/expressions/atom.rs | |||
@@ -141,14 +141,7 @@ fn tuple_expr(p: &mut Parser) -> CompletedMarker { | |||
141 | } | 141 | } |
142 | } | 142 | } |
143 | p.expect(R_PAREN); | 143 | p.expect(R_PAREN); |
144 | m.complete( | 144 | m.complete(p, if saw_expr && !saw_comma { PAREN_EXPR } else { TUPLE_EXPR }) |
145 | p, | ||
146 | if saw_expr && !saw_comma { | ||
147 | PAREN_EXPR | ||
148 | } else { | ||
149 | TUPLE_EXPR | ||
150 | }, | ||
151 | ) | ||
152 | } | 145 | } |
153 | 146 | ||
154 | // test array_expr | 147 | // test array_expr |
diff --git a/crates/ra_syntax/src/grammar/items.rs b/crates/ra_syntax/src/grammar/items.rs index 84c18a293..a61f260cf 100644 --- a/crates/ra_syntax/src/grammar/items.rs +++ b/crates/ra_syntax/src/grammar/items.rs | |||
@@ -155,11 +155,7 @@ pub(super) fn maybe_item(p: &mut Parser, flavor: ItemFlavor) -> MaybeItem { | |||
155 | IMPL_BLOCK | 155 | IMPL_BLOCK |
156 | } | 156 | } |
157 | _ => { | 157 | _ => { |
158 | return if has_mods { | 158 | return if has_mods { MaybeItem::Modifiers } else { MaybeItem::None }; |
159 | MaybeItem::Modifiers | ||
160 | } else { | ||
161 | MaybeItem::None | ||
162 | }; | ||
163 | } | 159 | } |
164 | }; | 160 | }; |
165 | 161 | ||
diff --git a/crates/ra_syntax/src/grammar/params.rs b/crates/ra_syntax/src/grammar/params.rs index 13158429a..185386569 100644 --- a/crates/ra_syntax/src/grammar/params.rs +++ b/crates/ra_syntax/src/grammar/params.rs | |||
@@ -36,11 +36,7 @@ impl Flavor { | |||
36 | } | 36 | } |
37 | 37 | ||
38 | fn list_(p: &mut Parser, flavor: Flavor) { | 38 | fn list_(p: &mut Parser, flavor: Flavor) { |
39 | let (bra, ket) = if flavor.type_required() { | 39 | let (bra, ket) = if flavor.type_required() { (L_PAREN, R_PAREN) } else { (PIPE, PIPE) }; |
40 | (L_PAREN, R_PAREN) | ||
41 | } else { | ||
42 | (PIPE, PIPE) | ||
43 | }; | ||
44 | assert!(p.at(bra)); | 40 | assert!(p.at(bra)); |
45 | let m = p.start(); | 41 | let m = p.start(); |
46 | p.bump(); | 42 | p.bump(); |
diff --git a/crates/ra_syntax/src/grammar/patterns.rs b/crates/ra_syntax/src/grammar/patterns.rs index 1ac5efdf6..f3f400ae0 100644 --- a/crates/ra_syntax/src/grammar/patterns.rs +++ b/crates/ra_syntax/src/grammar/patterns.rs | |||
@@ -2,9 +2,7 @@ use super::*; | |||
2 | 2 | ||
3 | pub(super) const PATTERN_FIRST: TokenSet = expressions::LITERAL_FIRST | 3 | pub(super) const PATTERN_FIRST: TokenSet = expressions::LITERAL_FIRST |
4 | .union(paths::PATH_FIRST) | 4 | .union(paths::PATH_FIRST) |
5 | .union(token_set![ | 5 | .union(token_set![REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE]); |
6 | REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE | ||
7 | ]); | ||
8 | 6 | ||
9 | pub(super) fn pattern(p: &mut Parser) { | 7 | pub(super) fn pattern(p: &mut Parser) { |
10 | pattern_r(p, PAT_RECOVERY_SET) | 8 | pattern_r(p, PAT_RECOVERY_SET) |
diff --git a/crates/ra_syntax/src/lexer/ptr.rs b/crates/ra_syntax/src/lexer/ptr.rs index 0a473c991..c341c4176 100644 --- a/crates/ra_syntax/src/lexer/ptr.rs +++ b/crates/ra_syntax/src/lexer/ptr.rs | |||
@@ -11,10 +11,7 @@ pub(crate) struct Ptr<'s> { | |||
11 | impl<'s> Ptr<'s> { | 11 | impl<'s> Ptr<'s> { |
12 | /// Creates a new `Ptr` from a string. | 12 | /// Creates a new `Ptr` from a string. |
13 | pub fn new(text: &'s str) -> Ptr<'s> { | 13 | pub fn new(text: &'s str) -> Ptr<'s> { |
14 | Ptr { | 14 | Ptr { text, len: 0.into() } |
15 | text, | ||
16 | len: 0.into(), | ||
17 | } | ||
18 | } | 15 | } |
19 | 16 | ||
20 | /// Gets the length of the remaining string. | 17 | /// Gets the length of the remaining string. |
diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs index 104f32851..088b2f5d7 100644 --- a/crates/ra_syntax/src/lib.rs +++ b/crates/ra_syntax/src/lib.rs | |||
@@ -11,11 +11,7 @@ | |||
11 | //! [rfc#2256]: <https://github.com/rust-lang/rfcs/pull/2256> | 11 | //! [rfc#2256]: <https://github.com/rust-lang/rfcs/pull/2256> |
12 | //! [RFC.md]: <https://github.com/matklad/libsyntax2/blob/master/docs/RFC.md> | 12 | //! [RFC.md]: <https://github.com/matklad/libsyntax2/blob/master/docs/RFC.md> |
13 | 13 | ||
14 | #![forbid( | 14 | #![forbid(missing_debug_implementations, unconditional_recursion, future_incompatible)] |
15 | missing_debug_implementations, | ||
16 | unconditional_recursion, | ||
17 | future_incompatible | ||
18 | )] | ||
19 | #![deny(bad_style, missing_docs)] | 15 | #![deny(bad_style, missing_docs)] |
20 | #![allow(missing_docs)] | 16 | #![allow(missing_docs)] |
21 | //#![warn(unreachable_pub)] // rust-lang/rust#47816 | 17 | //#![warn(unreachable_pub)] // rust-lang/rust#47816 |
@@ -70,8 +66,7 @@ impl SourceFile { | |||
70 | } | 66 | } |
71 | 67 | ||
72 | pub fn reparse(&self, edit: &AtomTextEdit) -> TreeArc<SourceFile> { | 68 | pub fn reparse(&self, edit: &AtomTextEdit) -> TreeArc<SourceFile> { |
73 | self.incremental_reparse(edit) | 69 | self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit)) |
74 | .unwrap_or_else(|| self.full_reparse(edit)) | ||
75 | } | 70 | } |
76 | 71 | ||
77 | pub fn incremental_reparse(&self, edit: &AtomTextEdit) -> Option<TreeArc<SourceFile>> { | 72 | pub fn incremental_reparse(&self, edit: &AtomTextEdit) -> Option<TreeArc<SourceFile>> { |
diff --git a/crates/ra_syntax/src/parser_api.rs b/crates/ra_syntax/src/parser_api.rs index 3148371c5..504df753e 100644 --- a/crates/ra_syntax/src/parser_api.rs +++ b/crates/ra_syntax/src/parser_api.rs | |||
@@ -136,10 +136,7 @@ pub(crate) struct Marker { | |||
136 | 136 | ||
137 | impl Marker { | 137 | impl Marker { |
138 | fn new(pos: u32) -> Marker { | 138 | fn new(pos: u32) -> Marker { |
139 | Marker { | 139 | Marker { pos, bomb: DropBomb::new("Marker must be either completed or abandoned") } |
140 | pos, | ||
141 | bomb: DropBomb::new("Marker must be either completed or abandoned"), | ||
142 | } | ||
143 | } | 140 | } |
144 | 141 | ||
145 | /// Finishes the syntax tree node and assigns `kind` to it, | 142 | /// Finishes the syntax tree node and assigns `kind` to it, |
diff --git a/crates/ra_syntax/src/parser_impl.rs b/crates/ra_syntax/src/parser_impl.rs index 01a51cd8d..f255dc23b 100644 --- a/crates/ra_syntax/src/parser_impl.rs +++ b/crates/ra_syntax/src/parser_impl.rs | |||
@@ -54,9 +54,7 @@ pub(crate) fn parse_with<S: Sink>( | |||
54 | parser(&mut parser_api); | 54 | parser(&mut parser_api); |
55 | parser_api.0.into_events() | 55 | parser_api.0.into_events() |
56 | }; | 56 | }; |
57 | EventProcessor::new(sink, text, tokens, &mut events) | 57 | EventProcessor::new(sink, text, tokens, &mut events).process().finish() |
58 | .process() | ||
59 | .finish() | ||
60 | } | 58 | } |
61 | 59 | ||
62 | /// Implementation details of `Parser`, extracted | 60 | /// Implementation details of `Parser`, extracted |
@@ -160,17 +158,13 @@ impl<'t> ParserImpl<'t> { | |||
160 | 158 | ||
161 | /// Append one Error event to the back of events. | 159 | /// Append one Error event to the back of events. |
162 | pub(super) fn error(&mut self, msg: String) { | 160 | pub(super) fn error(&mut self, msg: String) { |
163 | self.push_event(Event::Error { | 161 | self.push_event(Event::Error { msg: ParseError(msg) }) |
164 | msg: ParseError(msg), | ||
165 | }) | ||
166 | } | 162 | } |
167 | 163 | ||
168 | /// Complete an event with appending a `Finish` event. | 164 | /// Complete an event with appending a `Finish` event. |
169 | pub(super) fn complete(&mut self, pos: u32, kind: SyntaxKind) { | 165 | pub(super) fn complete(&mut self, pos: u32, kind: SyntaxKind) { |
170 | match self.events[pos as usize] { | 166 | match self.events[pos as usize] { |
171 | Event::Start { | 167 | Event::Start { kind: ref mut slot, .. } => { |
172 | kind: ref mut slot, .. | ||
173 | } => { | ||
174 | *slot = kind; | 168 | *slot = kind; |
175 | } | 169 | } |
176 | _ => unreachable!(), | 170 | _ => unreachable!(), |
@@ -183,10 +177,7 @@ impl<'t> ParserImpl<'t> { | |||
183 | let idx = pos as usize; | 177 | let idx = pos as usize; |
184 | if idx == self.events.len() - 1 { | 178 | if idx == self.events.len() - 1 { |
185 | match self.events.pop() { | 179 | match self.events.pop() { |
186 | Some(Event::Start { | 180 | Some(Event::Start { kind: TOMBSTONE, forward_parent: None }) => (), |
187 | kind: TOMBSTONE, | ||
188 | forward_parent: None, | ||
189 | }) => (), | ||
190 | _ => unreachable!(), | 181 | _ => unreachable!(), |
191 | } | 182 | } |
192 | } | 183 | } |
@@ -196,10 +187,7 @@ impl<'t> ParserImpl<'t> { | |||
196 | pub(super) fn precede(&mut self, pos: u32) -> u32 { | 187 | pub(super) fn precede(&mut self, pos: u32) -> u32 { |
197 | let new_pos = self.start(); | 188 | let new_pos = self.start(); |
198 | match self.events[pos as usize] { | 189 | match self.events[pos as usize] { |
199 | Event::Start { | 190 | Event::Start { ref mut forward_parent, .. } => { |
200 | ref mut forward_parent, | ||
201 | .. | ||
202 | } => { | ||
203 | *forward_parent = Some(new_pos - pos); | 191 | *forward_parent = Some(new_pos - pos); |
204 | } | 192 | } |
205 | _ => unreachable!(), | 193 | _ => unreachable!(), |
diff --git a/crates/ra_syntax/src/parser_impl/event.rs b/crates/ra_syntax/src/parser_impl/event.rs index 33e10ef85..677876ab5 100644 --- a/crates/ra_syntax/src/parser_impl/event.rs +++ b/crates/ra_syntax/src/parser_impl/event.rs | |||
@@ -86,10 +86,7 @@ pub(crate) enum Event { | |||
86 | 86 | ||
87 | impl Event { | 87 | impl Event { |
88 | pub(crate) fn tombstone() -> Self { | 88 | pub(crate) fn tombstone() -> Self { |
89 | Event::Start { | 89 | Event::Start { kind: TOMBSTONE, forward_parent: None } |
90 | kind: TOMBSTONE, | ||
91 | forward_parent: None, | ||
92 | } | ||
93 | } | 90 | } |
94 | } | 91 | } |
95 | 92 | ||
@@ -109,14 +106,7 @@ impl<'a, S: Sink> EventProcessor<'a, S> { | |||
109 | tokens: &'a [Token], | 106 | tokens: &'a [Token], |
110 | events: &'a mut [Event], | 107 | events: &'a mut [Event], |
111 | ) -> EventProcessor<'a, S> { | 108 | ) -> EventProcessor<'a, S> { |
112 | EventProcessor { | 109 | EventProcessor { sink, text_pos: 0.into(), text, token_pos: 0, tokens, events } |
113 | sink, | ||
114 | text_pos: 0.into(), | ||
115 | text, | ||
116 | token_pos: 0, | ||
117 | tokens, | ||
118 | events, | ||
119 | } | ||
120 | } | 110 | } |
121 | 111 | ||
122 | /// Generate the syntax tree with the control of events. | 112 | /// Generate the syntax tree with the control of events. |
@@ -125,14 +115,9 @@ impl<'a, S: Sink> EventProcessor<'a, S> { | |||
125 | 115 | ||
126 | for i in 0..self.events.len() { | 116 | for i in 0..self.events.len() { |
127 | match mem::replace(&mut self.events[i], Event::tombstone()) { | 117 | match mem::replace(&mut self.events[i], Event::tombstone()) { |
128 | Event::Start { | 118 | Event::Start { kind: TOMBSTONE, .. } => (), |
129 | kind: TOMBSTONE, .. | ||
130 | } => (), | ||
131 | 119 | ||
132 | Event::Start { | 120 | Event::Start { kind, forward_parent } => { |
133 | kind, | ||
134 | forward_parent, | ||
135 | } => { | ||
136 | // For events[A, B, C], B is A's forward_parent, C is B's forward_parent, | 121 | // For events[A, B, C], B is A's forward_parent, C is B's forward_parent, |
137 | // in the normal control flow, the parent-child relation: `A -> B -> C`, | 122 | // in the normal control flow, the parent-child relation: `A -> B -> C`, |
138 | // while with the magic forward_parent, it writes: `C <- B <- A`. | 123 | // while with the magic forward_parent, it writes: `C <- B <- A`. |
@@ -145,10 +130,7 @@ impl<'a, S: Sink> EventProcessor<'a, S> { | |||
145 | idx += fwd as usize; | 130 | idx += fwd as usize; |
146 | // append `A`'s forward_parent `B` | 131 | // append `A`'s forward_parent `B` |
147 | fp = match mem::replace(&mut self.events[idx], Event::tombstone()) { | 132 | fp = match mem::replace(&mut self.events[idx], Event::tombstone()) { |
148 | Event::Start { | 133 | Event::Start { kind, forward_parent } => { |
149 | kind, | ||
150 | forward_parent, | ||
151 | } => { | ||
152 | forward_parents.push(kind); | 134 | forward_parents.push(kind); |
153 | forward_parent | 135 | forward_parent |
154 | } | 136 | } |
@@ -174,10 +156,9 @@ impl<'a, S: Sink> EventProcessor<'a, S> { | |||
174 | .sum::<TextUnit>(); | 156 | .sum::<TextUnit>(); |
175 | self.leaf(kind, len, n_raw_tokens); | 157 | self.leaf(kind, len, n_raw_tokens); |
176 | } | 158 | } |
177 | Event::Error { msg } => self.sink.error(SyntaxError::new( | 159 | Event::Error { msg } => self |
178 | SyntaxErrorKind::ParseError(msg), | 160 | .sink |
179 | self.text_pos, | 161 | .error(SyntaxError::new(SyntaxErrorKind::ParseError(msg), self.text_pos)), |
180 | )), | ||
181 | } | 162 | } |
182 | } | 163 | } |
183 | self.sink | 164 | self.sink |
@@ -189,10 +170,8 @@ impl<'a, S: Sink> EventProcessor<'a, S> { | |||
189 | self.sink.start_branch(kind); | 170 | self.sink.start_branch(kind); |
190 | return; | 171 | return; |
191 | } | 172 | } |
192 | let n_trivias = self.tokens[self.token_pos..] | 173 | let n_trivias = |
193 | .iter() | 174 | self.tokens[self.token_pos..].iter().take_while(|it| it.kind.is_trivia()).count(); |
194 | .take_while(|it| it.kind.is_trivia()) | ||
195 | .count(); | ||
196 | let leading_trivias = &self.tokens[self.token_pos..self.token_pos + n_trivias]; | 175 | let leading_trivias = &self.tokens[self.token_pos..self.token_pos + n_trivias]; |
197 | let mut trivia_end = | 176 | let mut trivia_end = |
198 | self.text_pos + leading_trivias.iter().map(|it| it.len).sum::<TextUnit>(); | 177 | self.text_pos + leading_trivias.iter().map(|it| it.len).sum::<TextUnit>(); |
diff --git a/crates/ra_syntax/src/parser_impl/input.rs b/crates/ra_syntax/src/parser_impl/input.rs index 7fde5b3ab..616a26fdc 100644 --- a/crates/ra_syntax/src/parser_impl/input.rs +++ b/crates/ra_syntax/src/parser_impl/input.rs | |||
@@ -36,11 +36,7 @@ impl<'t> ParserInput<'t> { | |||
36 | len += token.len; | 36 | len += token.len; |
37 | } | 37 | } |
38 | 38 | ||
39 | ParserInput { | 39 | ParserInput { text, start_offsets, tokens } |
40 | text, | ||
41 | start_offsets, | ||
42 | tokens, | ||
43 | } | ||
44 | } | 40 | } |
45 | 41 | ||
46 | /// Get the syntax kind of token at given input position. | 42 | /// Get the syntax kind of token at given input position. |
diff --git a/crates/ra_syntax/src/ptr.rs b/crates/ra_syntax/src/ptr.rs index 13ee1305f..aae590cb6 100644 --- a/crates/ra_syntax/src/ptr.rs +++ b/crates/ra_syntax/src/ptr.rs | |||
@@ -15,16 +15,12 @@ pub struct SyntaxNodePtr { | |||
15 | 15 | ||
16 | impl SyntaxNodePtr { | 16 | impl SyntaxNodePtr { |
17 | pub fn new(node: &SyntaxNode) -> SyntaxNodePtr { | 17 | pub fn new(node: &SyntaxNode) -> SyntaxNodePtr { |
18 | SyntaxNodePtr { | 18 | SyntaxNodePtr { range: node.range(), kind: node.kind() } |
19 | range: node.range(), | ||
20 | kind: node.kind(), | ||
21 | } | ||
22 | } | 19 | } |
23 | 20 | ||
24 | pub fn to_node(self, source_file: &SourceFile) -> &SyntaxNode { | 21 | pub fn to_node(self, source_file: &SourceFile) -> &SyntaxNode { |
25 | generate(Some(source_file.syntax()), |&node| { | 22 | generate(Some(source_file.syntax()), |&node| { |
26 | node.children() | 23 | node.children().find(|it| self.range.is_subrange(&it.range())) |
27 | .find(|it| self.range.is_subrange(&it.range())) | ||
28 | }) | 24 | }) |
29 | .find(|it| it.range() == self.range && it.kind() == self.kind) | 25 | .find(|it| it.range() == self.range && it.kind() == self.kind) |
30 | .unwrap_or_else(|| panic!("can't resolve local ptr to SyntaxNode: {:?}", self)) | 26 | .unwrap_or_else(|| panic!("can't resolve local ptr to SyntaxNode: {:?}", self)) |
@@ -55,10 +51,7 @@ impl<N: AstNode> Clone for AstPtr<N> { | |||
55 | 51 | ||
56 | impl<N: AstNode> AstPtr<N> { | 52 | impl<N: AstNode> AstPtr<N> { |
57 | pub fn new(node: &N) -> AstPtr<N> { | 53 | pub fn new(node: &N) -> AstPtr<N> { |
58 | AstPtr { | 54 | AstPtr { raw: SyntaxNodePtr::new(node.syntax()), _ty: PhantomData } |
59 | raw: SyntaxNodePtr::new(node.syntax()), | ||
60 | _ty: PhantomData, | ||
61 | } | ||
62 | } | 55 | } |
63 | 56 | ||
64 | pub fn to_node(self, source_file: &SourceFile) -> &N { | 57 | pub fn to_node(self, source_file: &SourceFile) -> &N { |
@@ -76,11 +69,7 @@ fn test_local_syntax_ptr() { | |||
76 | use crate::{ast, AstNode}; | 69 | use crate::{ast, AstNode}; |
77 | 70 | ||
78 | let file = SourceFile::parse("struct Foo { f: u32, }"); | 71 | let file = SourceFile::parse("struct Foo { f: u32, }"); |
79 | let field = file | 72 | let field = file.syntax().descendants().find_map(ast::NamedFieldDef::cast).unwrap(); |
80 | .syntax() | ||
81 | .descendants() | ||
82 | .find_map(ast::NamedFieldDef::cast) | ||
83 | .unwrap(); | ||
84 | let ptr = SyntaxNodePtr::new(field.syntax()); | 73 | let ptr = SyntaxNodePtr::new(field.syntax()); |
85 | let field_syntax = ptr.to_node(&file); | 74 | let field_syntax = ptr.to_node(&file); |
86 | assert_eq!(field.syntax(), &*field_syntax); | 75 | assert_eq!(field.syntax(), &*field_syntax); |
diff --git a/crates/ra_syntax/src/reparsing.rs b/crates/ra_syntax/src/reparsing.rs index 2f1de6b02..c5c609ad5 100644 --- a/crates/ra_syntax/src/reparsing.rs +++ b/crates/ra_syntax/src/reparsing.rs | |||
@@ -75,10 +75,7 @@ fn is_contextual_kw(text: &str) -> bool { | |||
75 | type ParseFn = fn(&mut Parser); | 75 | type ParseFn = fn(&mut Parser); |
76 | fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(&SyntaxNode, ParseFn)> { | 76 | fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(&SyntaxNode, ParseFn)> { |
77 | let node = algo::find_covering_node(node, range); | 77 | let node = algo::find_covering_node(node, range); |
78 | return node | 78 | return node.ancestors().filter_map(|node| reparser(node).map(|r| (node, r))).next(); |
79 | .ancestors() | ||
80 | .filter_map(|node| reparser(node).map(|r| (node, r))) | ||
81 | .next(); | ||
82 | 79 | ||
83 | fn reparser(node: &SyntaxNode) -> Option<ParseFn> { | 80 | fn reparser(node: &SyntaxNode) -> Option<ParseFn> { |
84 | let res = match node.kind() { | 81 | let res = match node.kind() { |
@@ -169,10 +166,7 @@ mod tests { | |||
169 | let fully_reparsed = SourceFile::parse(&after); | 166 | let fully_reparsed = SourceFile::parse(&after); |
170 | let incrementally_reparsed = { | 167 | let incrementally_reparsed = { |
171 | let f = SourceFile::parse(&before); | 168 | let f = SourceFile::parse(&before); |
172 | let edit = AtomTextEdit { | 169 | let edit = AtomTextEdit { delete: range, insert: replace_with.to_string() }; |
173 | delete: range, | ||
174 | insert: replace_with.to_string(), | ||
175 | }; | ||
176 | let (node, green, new_errors) = | 170 | let (node, green, new_errors) = |
177 | reparser(f.syntax(), &edit).expect("cannot incrementally reparse"); | 171 | reparser(f.syntax(), &edit).expect("cannot incrementally reparse"); |
178 | let green_root = node.replace_with(green); | 172 | let green_root = node.replace_with(green); |
diff --git a/crates/ra_syntax/src/string_lexing/parser.rs b/crates/ra_syntax/src/string_lexing/parser.rs index e835382fc..7469eb903 100644 --- a/crates/ra_syntax/src/string_lexing/parser.rs +++ b/crates/ra_syntax/src/string_lexing/parser.rs | |||
@@ -24,9 +24,7 @@ impl<'a> Parser<'a> { | |||
24 | } | 24 | } |
25 | 25 | ||
26 | pub fn advance(&mut self) -> char { | 26 | pub fn advance(&mut self) -> char { |
27 | let next = self | 27 | let next = self.peek().expect("cannot advance if end of input is reached"); |
28 | .peek() | ||
29 | .expect("cannot advance if end of input is reached"); | ||
30 | self.pos += next.len_utf8(); | 28 | self.pos += next.len_utf8(); |
31 | next | 29 | next |
32 | } | 30 | } |
@@ -133,10 +131,7 @@ impl<'a> Parser<'a> { | |||
133 | Some(self.parse_escape(start)) | 131 | Some(self.parse_escape(start)) |
134 | } else { | 132 | } else { |
135 | let end = self.get_pos(); | 133 | let end = self.get_pos(); |
136 | Some(StringComponent::new( | 134 | Some(StringComponent::new(TextRange::from_to(start, end), CodePoint)) |
137 | TextRange::from_to(start, end), | ||
138 | CodePoint, | ||
139 | )) | ||
140 | } | 135 | } |
141 | } | 136 | } |
142 | 137 | ||
diff --git a/crates/ra_syntax/src/string_lexing/string.rs b/crates/ra_syntax/src/string_lexing/string.rs index 064f08544..a4742a0d1 100644 --- a/crates/ra_syntax/src/string_lexing/string.rs +++ b/crates/ra_syntax/src/string_lexing/string.rs | |||
@@ -120,12 +120,7 @@ mod tests { | |||
120 | fn closed_char_component(src: &str) -> StringComponent { | 120 | fn closed_char_component(src: &str) -> StringComponent { |
121 | let (has_closing_quote, components) = parse(src); | 121 | let (has_closing_quote, components) = parse(src); |
122 | assert!(has_closing_quote, "char should have closing quote"); | 122 | assert!(has_closing_quote, "char should have closing quote"); |
123 | assert!( | 123 | assert!(components.len() == 1, "Literal: {}\nComponents: {:#?}", src, components); |
124 | components.len() == 1, | ||
125 | "Literal: {}\nComponents: {:#?}", | ||
126 | src, | ||
127 | components | ||
128 | ); | ||
129 | components[0].clone() | 124 | components[0].clone() |
130 | } | 125 | } |
131 | 126 | ||
diff --git a/crates/ra_syntax/src/validation/block.rs b/crates/ra_syntax/src/validation/block.rs index 9e1949124..4e77c15b6 100644 --- a/crates/ra_syntax/src/validation/block.rs +++ b/crates/ra_syntax/src/validation/block.rs | |||
@@ -17,8 +17,6 @@ pub(crate) fn validate_block_node(node: &ast::Block, errors: &mut Vec<SyntaxErro | |||
17 | _ => {} | 17 | _ => {} |
18 | } | 18 | } |
19 | } | 19 | } |
20 | errors.extend( | 20 | errors |
21 | node.attrs() | 21 | .extend(node.attrs().map(|attr| SyntaxError::new(InvalidBlockAttr, attr.syntax().range()))) |
22 | .map(|attr| SyntaxError::new(InvalidBlockAttr, attr.syntax().range())), | ||
23 | ) | ||
24 | } | 22 | } |
diff --git a/crates/ra_syntax/src/validation/byte.rs b/crates/ra_syntax/src/validation/byte.rs index 9bddabc80..d51fabcf9 100644 --- a/crates/ra_syntax/src/validation/byte.rs +++ b/crates/ra_syntax/src/validation/byte.rs | |||
@@ -28,10 +28,7 @@ pub(super) fn validate_byte_node(node: &ast::Byte, errors: &mut Vec<SyntaxError> | |||
28 | } | 28 | } |
29 | 29 | ||
30 | if let Some(range) = components.suffix { | 30 | if let Some(range) = components.suffix { |
31 | errors.push(SyntaxError::new( | 31 | errors.push(SyntaxError::new(InvalidSuffix, range + literal_range.start())); |
32 | InvalidSuffix, | ||
33 | range + literal_range.start(), | ||
34 | )); | ||
35 | } | 32 | } |
36 | 33 | ||
37 | if len == 0 { | 34 | if len == 0 { |
@@ -55,10 +52,7 @@ pub(super) fn validate_byte_component( | |||
55 | AsciiCodeEscape => validate_byte_code_escape(text, range, errors), | 52 | AsciiCodeEscape => validate_byte_code_escape(text, range, errors), |
56 | UnicodeEscape => errors.push(SyntaxError::new(UnicodeEscapeForbidden, range)), | 53 | UnicodeEscape => errors.push(SyntaxError::new(UnicodeEscapeForbidden, range)), |
57 | CodePoint => { | 54 | CodePoint => { |
58 | let c = text | 55 | let c = text.chars().next().expect("Code points should be one character long"); |
59 | .chars() | ||
60 | .next() | ||
61 | .expect("Code points should be one character long"); | ||
62 | 56 | ||
63 | // These bytes must always be escaped | 57 | // These bytes must always be escaped |
64 | if c == '\t' || c == '\r' || c == '\n' { | 58 | if c == '\t' || c == '\r' || c == '\n' { |
@@ -93,10 +87,7 @@ fn validate_byte_code_escape(text: &str, range: TextRange, errors: &mut Vec<Synt | |||
93 | } else if text.chars().count() < 4 { | 87 | } else if text.chars().count() < 4 { |
94 | errors.push(SyntaxError::new(TooShortByteCodeEscape, range)); | 88 | errors.push(SyntaxError::new(TooShortByteCodeEscape, range)); |
95 | } else { | 89 | } else { |
96 | assert!( | 90 | assert!(text.chars().count() == 4, "ByteCodeEscape cannot be longer than 4 chars"); |
97 | text.chars().count() == 4, | ||
98 | "ByteCodeEscape cannot be longer than 4 chars" | ||
99 | ); | ||
100 | 91 | ||
101 | if u8::from_str_radix(&text[2..], 16).is_err() { | 92 | if u8::from_str_radix(&text[2..], 16).is_err() { |
102 | errors.push(SyntaxError::new(MalformedByteCodeEscape, range)); | 93 | errors.push(SyntaxError::new(MalformedByteCodeEscape, range)); |
@@ -115,12 +106,7 @@ mod test { | |||
115 | 106 | ||
116 | fn assert_valid_byte(literal: &str) { | 107 | fn assert_valid_byte(literal: &str) { |
117 | let file = build_file(literal); | 108 | let file = build_file(literal); |
118 | assert!( | 109 | assert!(file.errors().len() == 0, "Errors for literal '{}': {:?}", literal, file.errors()); |
119 | file.errors().len() == 0, | ||
120 | "Errors for literal '{}': {:?}", | ||
121 | literal, | ||
122 | file.errors() | ||
123 | ); | ||
124 | } | 110 | } |
125 | 111 | ||
126 | fn assert_invalid_byte(literal: &str) { | 112 | fn assert_invalid_byte(literal: &str) { |
@@ -193,13 +179,7 @@ mod test { | |||
193 | 179 | ||
194 | #[test] | 180 | #[test] |
195 | fn test_invalid_unicode_escape() { | 181 | fn test_invalid_unicode_escape() { |
196 | let well_formed = [ | 182 | let well_formed = [r"\u{FF}", r"\u{0}", r"\u{F}", r"\u{10FFFF}", r"\u{1_0__FF___FF_____}"]; |
197 | r"\u{FF}", | ||
198 | r"\u{0}", | ||
199 | r"\u{F}", | ||
200 | r"\u{10FFFF}", | ||
201 | r"\u{1_0__FF___FF_____}", | ||
202 | ]; | ||
203 | for c in &well_formed { | 183 | for c in &well_formed { |
204 | assert_invalid_byte(c); | 184 | assert_invalid_byte(c); |
205 | } | 185 | } |
diff --git a/crates/ra_syntax/src/validation/byte_string.rs b/crates/ra_syntax/src/validation/byte_string.rs index bdb147545..7abe8f330 100644 --- a/crates/ra_syntax/src/validation/byte_string.rs +++ b/crates/ra_syntax/src/validation/byte_string.rs | |||
@@ -34,10 +34,7 @@ pub(crate) fn validate_byte_string_node(node: &ast::ByteString, errors: &mut Vec | |||
34 | } | 34 | } |
35 | 35 | ||
36 | if let Some(range) = components.suffix { | 36 | if let Some(range) = components.suffix { |
37 | errors.push(SyntaxError::new( | 37 | errors.push(SyntaxError::new(InvalidSuffix, range + literal_range.start())); |
38 | InvalidSuffix, | ||
39 | range + literal_range.start(), | ||
40 | )); | ||
41 | } | 38 | } |
42 | } | 39 | } |
43 | 40 | ||
@@ -53,12 +50,7 @@ mod test { | |||
53 | 50 | ||
54 | fn assert_valid_str(literal: &str) { | 51 | fn assert_valid_str(literal: &str) { |
55 | let file = build_file(literal); | 52 | let file = build_file(literal); |
56 | assert!( | 53 | assert!(file.errors().len() == 0, "Errors for literal '{}': {:?}", literal, file.errors()); |
57 | file.errors().len() == 0, | ||
58 | "Errors for literal '{}': {:?}", | ||
59 | literal, | ||
60 | file.errors() | ||
61 | ); | ||
62 | } | 54 | } |
63 | 55 | ||
64 | fn assert_invalid_str(literal: &str) { | 56 | fn assert_invalid_str(literal: &str) { |
@@ -130,13 +122,7 @@ mod test { | |||
130 | 122 | ||
131 | #[test] | 123 | #[test] |
132 | fn test_invalid_unicode_escape() { | 124 | fn test_invalid_unicode_escape() { |
133 | let well_formed = [ | 125 | let well_formed = [r"\u{FF}", r"\u{0}", r"\u{F}", r"\u{10FFFF}", r"\u{1_0__FF___FF_____}"]; |
134 | r"\u{FF}", | ||
135 | r"\u{0}", | ||
136 | r"\u{F}", | ||
137 | r"\u{10FFFF}", | ||
138 | r"\u{1_0__FF___FF_____}", | ||
139 | ]; | ||
140 | for c in &well_formed { | 126 | for c in &well_formed { |
141 | assert_invalid_str(c); | 127 | assert_invalid_str(c); |
142 | } | 128 | } |
diff --git a/crates/ra_syntax/src/validation/char.rs b/crates/ra_syntax/src/validation/char.rs index e3ac5836b..012594db3 100644 --- a/crates/ra_syntax/src/validation/char.rs +++ b/crates/ra_syntax/src/validation/char.rs | |||
@@ -31,10 +31,7 @@ pub(super) fn validate_char_node(node: &ast::Char, errors: &mut Vec<SyntaxError> | |||
31 | } | 31 | } |
32 | 32 | ||
33 | if let Some(range) = components.suffix { | 33 | if let Some(range) = components.suffix { |
34 | errors.push(SyntaxError::new( | 34 | errors.push(SyntaxError::new(InvalidSuffix, range + literal_range.start())); |
35 | InvalidSuffix, | ||
36 | range + literal_range.start(), | ||
37 | )); | ||
38 | } | 35 | } |
39 | 36 | ||
40 | if len == 0 { | 37 | if len == 0 { |
@@ -184,12 +181,7 @@ mod test { | |||
184 | 181 | ||
185 | fn assert_valid_char(literal: &str) { | 182 | fn assert_valid_char(literal: &str) { |
186 | let file = build_file(literal); | 183 | let file = build_file(literal); |
187 | assert!( | 184 | assert!(file.errors().len() == 0, "Errors for literal '{}': {:?}", literal, file.errors()); |
188 | file.errors().len() == 0, | ||
189 | "Errors for literal '{}': {:?}", | ||
190 | literal, | ||
191 | file.errors() | ||
192 | ); | ||
193 | } | 185 | } |
194 | 186 | ||
195 | fn assert_invalid_char(literal: &str) { | 187 | fn assert_invalid_char(literal: &str) { |
@@ -258,13 +250,7 @@ mod test { | |||
258 | 250 | ||
259 | #[test] | 251 | #[test] |
260 | fn test_valid_unicode_escape() { | 252 | fn test_valid_unicode_escape() { |
261 | let valid = [ | 253 | let valid = [r"\u{FF}", r"\u{0}", r"\u{F}", r"\u{10FFFF}", r"\u{1_0__FF___FF_____}"]; |
262 | r"\u{FF}", | ||
263 | r"\u{0}", | ||
264 | r"\u{F}", | ||
265 | r"\u{10FFFF}", | ||
266 | r"\u{1_0__FF___FF_____}", | ||
267 | ]; | ||
268 | for c in &valid { | 254 | for c in &valid { |
269 | assert_valid_char(c); | 255 | assert_valid_char(c); |
270 | } | 256 | } |
diff --git a/crates/ra_syntax/src/validation/string.rs b/crates/ra_syntax/src/validation/string.rs index 365fe8d2d..4fd7fffdf 100644 --- a/crates/ra_syntax/src/validation/string.rs +++ b/crates/ra_syntax/src/validation/string.rs | |||
@@ -29,10 +29,7 @@ pub(crate) fn validate_string_node(node: &ast::String, errors: &mut Vec<SyntaxEr | |||
29 | } | 29 | } |
30 | 30 | ||
31 | if let Some(range) = components.suffix { | 31 | if let Some(range) = components.suffix { |
32 | errors.push(SyntaxError::new( | 32 | errors.push(SyntaxError::new(InvalidSuffix, range + literal_range.start())); |
33 | InvalidSuffix, | ||
34 | range + literal_range.start(), | ||
35 | )); | ||
36 | } | 33 | } |
37 | } | 34 | } |
38 | 35 | ||
@@ -48,12 +45,7 @@ mod test { | |||
48 | 45 | ||
49 | fn assert_valid_str(literal: &str) { | 46 | fn assert_valid_str(literal: &str) { |
50 | let file = build_file(literal); | 47 | let file = build_file(literal); |
51 | assert!( | 48 | assert!(file.errors().len() == 0, "Errors for literal '{}': {:?}", literal, file.errors()); |
52 | file.errors().len() == 0, | ||
53 | "Errors for literal '{}': {:?}", | ||
54 | literal, | ||
55 | file.errors() | ||
56 | ); | ||
57 | } | 49 | } |
58 | 50 | ||
59 | fn assert_invalid_str(literal: &str) { | 51 | fn assert_invalid_str(literal: &str) { |
@@ -121,13 +113,7 @@ mod test { | |||
121 | 113 | ||
122 | #[test] | 114 | #[test] |
123 | fn test_valid_unicode_escape() { | 115 | fn test_valid_unicode_escape() { |
124 | let valid = [ | 116 | let valid = [r"\u{FF}", r"\u{0}", r"\u{F}", r"\u{10FFFF}", r"\u{1_0__FF___FF_____}"]; |
125 | r"\u{FF}", | ||
126 | r"\u{0}", | ||
127 | r"\u{F}", | ||
128 | r"\u{10FFFF}", | ||
129 | r"\u{1_0__FF___FF_____}", | ||
130 | ]; | ||
131 | for c in &valid { | 117 | for c in &valid { |
132 | assert_valid_str(c); | 118 | assert_valid_str(c); |
133 | } | 119 | } |
diff --git a/crates/ra_syntax/src/yellow/builder.rs b/crates/ra_syntax/src/yellow/builder.rs index 37ae6329b..e8b9112d4 100644 --- a/crates/ra_syntax/src/yellow/builder.rs +++ b/crates/ra_syntax/src/yellow/builder.rs | |||
@@ -12,10 +12,7 @@ pub(crate) struct GreenBuilder { | |||
12 | 12 | ||
13 | impl GreenBuilder { | 13 | impl GreenBuilder { |
14 | pub(crate) fn new() -> GreenBuilder { | 14 | pub(crate) fn new() -> GreenBuilder { |
15 | GreenBuilder { | 15 | GreenBuilder { errors: Vec::new(), inner: GreenNodeBuilder::new() } |
16 | errors: Vec::new(), | ||
17 | inner: GreenNodeBuilder::new(), | ||
18 | } | ||
19 | } | 16 | } |
20 | } | 17 | } |
21 | 18 | ||
diff --git a/crates/ra_syntax/src/yellow/syntax_error.rs b/crates/ra_syntax/src/yellow/syntax_error.rs index c52c44cc3..412cf82cc 100644 --- a/crates/ra_syntax/src/yellow/syntax_error.rs +++ b/crates/ra_syntax/src/yellow/syntax_error.rs | |||
@@ -28,10 +28,7 @@ impl Into<Location> for TextRange { | |||
28 | 28 | ||
29 | impl SyntaxError { | 29 | impl SyntaxError { |
30 | pub fn new<L: Into<Location>>(kind: SyntaxErrorKind, loc: L) -> SyntaxError { | 30 | pub fn new<L: Into<Location>>(kind: SyntaxErrorKind, loc: L) -> SyntaxError { |
31 | SyntaxError { | 31 | SyntaxError { kind, location: loc.into() } |
32 | kind, | ||
33 | location: loc.into(), | ||
34 | } | ||
35 | } | 32 | } |
36 | 33 | ||
37 | pub fn kind(&self) -> SyntaxErrorKind { | 34 | pub fn kind(&self) -> SyntaxErrorKind { |
@@ -119,10 +116,9 @@ impl fmt::Display for SyntaxErrorKind { | |||
119 | InvalidByteEscape => write!(f, "Invalid escape sequence"), | 116 | InvalidByteEscape => write!(f, "Invalid escape sequence"), |
120 | TooShortByteCodeEscape => write!(f, "Escape sequence should have two digits"), | 117 | TooShortByteCodeEscape => write!(f, "Escape sequence should have two digits"), |
121 | MalformedByteCodeEscape => write!(f, "Escape sequence should be a hexadecimal number"), | 118 | MalformedByteCodeEscape => write!(f, "Escape sequence should be a hexadecimal number"), |
122 | UnicodeEscapeForbidden => write!( | 119 | UnicodeEscapeForbidden => { |
123 | f, | 120 | write!(f, "Unicode escapes are not allowed in byte literals or byte strings") |
124 | "Unicode escapes are not allowed in byte literals or byte strings" | 121 | } |
125 | ), | ||
126 | TooShortAsciiCodeEscape => write!(f, "Escape sequence should have two digits"), | 122 | TooShortAsciiCodeEscape => write!(f, "Escape sequence should have two digits"), |
127 | AsciiCodeEscapeOutOfRange => { | 123 | AsciiCodeEscapeOutOfRange => { |
128 | write!(f, "Escape sequence should be between \\x00 and \\x7F") | 124 | write!(f, "Escape sequence should be between \\x00 and \\x7F") |
diff --git a/crates/ra_syntax/src/yellow/syntax_text.rs b/crates/ra_syntax/src/yellow/syntax_text.rs index 378cd1b2e..84e5b231a 100644 --- a/crates/ra_syntax/src/yellow/syntax_text.rs +++ b/crates/ra_syntax/src/yellow/syntax_text.rs | |||
@@ -10,10 +10,7 @@ pub struct SyntaxText<'a> { | |||
10 | 10 | ||
11 | impl<'a> SyntaxText<'a> { | 11 | impl<'a> SyntaxText<'a> { |
12 | pub(crate) fn new(node: &'a SyntaxNode) -> SyntaxText<'a> { | 12 | pub(crate) fn new(node: &'a SyntaxNode) -> SyntaxText<'a> { |
13 | SyntaxText { | 13 | SyntaxText { node, range: node.range() } |
14 | node, | ||
15 | range: node.range(), | ||
16 | } | ||
17 | } | 14 | } |
18 | 15 | ||
19 | pub fn chunks(&self) -> impl Iterator<Item = &'a str> { | 16 | pub fn chunks(&self) -> impl Iterator<Item = &'a str> { |
@@ -58,10 +55,7 @@ impl<'a> SyntaxText<'a> { | |||
58 | let range = range.restrict(self.range).unwrap_or_else(|| { | 55 | let range = range.restrict(self.range).unwrap_or_else(|| { |
59 | panic!("invalid slice, range: {:?}, slice: {:?}", self.range, range) | 56 | panic!("invalid slice, range: {:?}, slice: {:?}", self.range, range) |
60 | }); | 57 | }); |
61 | SyntaxText { | 58 | SyntaxText { node: self.node, range } |
62 | node: self.node, | ||
63 | range, | ||
64 | } | ||
65 | } | 59 | } |
66 | 60 | ||
67 | pub fn char_at(&self, offset: impl Into<TextUnit>) -> Option<char> { | 61 | pub fn char_at(&self, offset: impl Into<TextUnit>) -> Option<char> { |
diff --git a/crates/ra_syntax/tests/test.rs b/crates/ra_syntax/tests/test.rs index 3243b27ae..168d0623d 100644 --- a/crates/ra_syntax/tests/test.rs +++ b/crates/ra_syntax/tests/test.rs | |||
@@ -23,36 +23,28 @@ fn lexer_tests() { | |||
23 | 23 | ||
24 | #[test] | 24 | #[test] |
25 | fn parser_tests() { | 25 | fn parser_tests() { |
26 | dir_tests( | 26 | dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| { |
27 | &test_data_dir(), | 27 | let file = SourceFile::parse(text); |
28 | &["parser/inline/ok", "parser/ok"], | 28 | let errors = file.errors(); |
29 | |text, path| { | 29 | assert_eq!( |
30 | let file = SourceFile::parse(text); | 30 | &*errors, |
31 | let errors = file.errors(); | 31 | &[] as &[ra_syntax::SyntaxError], |
32 | assert_eq!( | 32 | "There should be no errors in the file {:?}", |
33 | &*errors, | 33 | path.display() |
34 | &[] as &[ra_syntax::SyntaxError], | 34 | ); |
35 | "There should be no errors in the file {:?}", | 35 | dump_tree(file.syntax()) |
36 | path.display() | 36 | }); |
37 | ); | 37 | dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| { |
38 | dump_tree(file.syntax()) | 38 | let file = SourceFile::parse(text); |
39 | }, | 39 | let errors = file.errors(); |
40 | ); | 40 | assert_ne!( |
41 | dir_tests( | 41 | &*errors, |
42 | &test_data_dir(), | 42 | &[] as &[ra_syntax::SyntaxError], |
43 | &["parser/err", "parser/inline/err"], | 43 | "There should be errors in the file {:?}", |
44 | |text, path| { | 44 | path.display() |
45 | let file = SourceFile::parse(text); | 45 | ); |
46 | let errors = file.errors(); | 46 | dump_tree(file.syntax()) |
47 | assert_ne!( | 47 | }); |
48 | &*errors, | ||
49 | &[] as &[ra_syntax::SyntaxError], | ||
50 | "There should be errors in the file {:?}", | ||
51 | path.display() | ||
52 | ); | ||
53 | dump_tree(file.syntax()) | ||
54 | }, | ||
55 | ); | ||
56 | } | 48 | } |
57 | 49 | ||
58 | #[test] | 50 | #[test] |
@@ -87,12 +79,7 @@ fn self_hosting_parsing() { | |||
87 | let text = read_text(entry.path()); | 79 | let text = read_text(entry.path()); |
88 | let node = SourceFile::parse(&text); | 80 | let node = SourceFile::parse(&text); |
89 | let errors = node.errors(); | 81 | let errors = node.errors(); |
90 | assert_eq!( | 82 | assert_eq!(&*errors, &[], "There should be no errors in the file {:?}", entry); |
91 | &*errors, | ||
92 | &[], | ||
93 | "There should be no errors in the file {:?}", | ||
94 | entry | ||
95 | ); | ||
96 | } | 83 | } |
97 | assert!( | 84 | assert!( |
98 | count > 30, | 85 | count > 30, |