aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorChristopher Durham <[email protected]>2018-01-27 23:51:12 +0000
committerGitHub <[email protected]>2018-01-27 23:51:12 +0000
commite6e61251abb68b3e6a47fd3708f4dea6059a9ec3 (patch)
treec22cfa06a26add11f3aefca3806f0009dd25d923 /src
parent357cd3358167daa38f3ff34d225e1501faff6015 (diff)
parent9140b3e7286efae57ee8c49c7bfcad8737f9c6fc (diff)
Merge pull request #12 from CAD97/enforce-fmt
Enforce rustfmt format
Diffstat (limited to 'src')
-rw-r--r--src/bin/gen.rs21
-rw-r--r--src/bin/parse-rust.rs2
-rw-r--r--src/lexer/classes.rs9
-rw-r--r--src/lexer/comments.rs5
-rw-r--r--src/lexer/mod.rs145
-rw-r--r--src/lexer/numbers.rs6
-rw-r--r--src/lexer/ptr.rs11
-rw-r--r--src/lexer/strings.rs33
-rw-r--r--src/lib.rs8
-rw-r--r--src/parser/event_parser/grammar/attributes.rs15
-rw-r--r--src/parser/event_parser/grammar/expressions.rs8
-rw-r--r--src/parser/event_parser/grammar/items.rs38
-rw-r--r--src/parser/event_parser/grammar/mod.rs25
-rw-r--r--src/parser/event_parser/grammar/paths.rs4
-rw-r--r--src/parser/event_parser/grammar/types.rs2
-rw-r--r--src/parser/event_parser/mod.rs2
-rw-r--r--src/parser/event_parser/parser.rs46
-rw-r--r--src/parser/mod.rs30
-rw-r--r--src/text.rs9
-rw-r--r--src/tree/file_builder.rs32
-rw-r--r--src/tree/mod.rs44
21 files changed, 257 insertions, 238 deletions
diff --git a/src/bin/gen.rs b/src/bin/gen.rs
index 5ebf3e2e8..8f6402f5c 100644
--- a/src/bin/gen.rs
+++ b/src/bin/gen.rs
@@ -2,8 +2,8 @@ extern crate serde;
2#[macro_use] 2#[macro_use]
3extern crate serde_derive; 3extern crate serde_derive;
4 4
5extern crate ron;
6extern crate file; 5extern crate file;
6extern crate ron;
7 7
8use std::path::PathBuf; 8use std::path::PathBuf;
9use std::fmt::Write; 9use std::fmt::Write;
@@ -33,11 +33,12 @@ impl Grammar {
33 acc.push_str("use tree::{SyntaxKind, SyntaxInfo};\n"); 33 acc.push_str("use tree::{SyntaxKind, SyntaxInfo};\n");
34 acc.push_str("\n"); 34 acc.push_str("\n");
35 35
36 let syntax_kinds: Vec<String> = 36 let syntax_kinds: Vec<String> = self.keywords
37 self.keywords.iter().map(|kw| kw_token(kw)) 37 .iter()
38 .chain(self.tokens.iter().cloned()) 38 .map(|kw| kw_token(kw))
39 .chain(self.nodes.iter().cloned()) 39 .chain(self.tokens.iter().cloned())
40 .collect(); 40 .chain(self.nodes.iter().cloned())
41 .collect();
41 42
42 for (idx, kind) in syntax_kinds.iter().enumerate() { 43 for (idx, kind) in syntax_kinds.iter().enumerate() {
43 let sname = scream(kind); 44 let sname = scream(kind);
@@ -48,7 +49,11 @@ impl Grammar {
48 ).unwrap(); 49 ).unwrap();
49 } 50 }
50 acc.push_str("\n"); 51 acc.push_str("\n");
51 write!(acc, "static INFOS: [SyntaxInfo; {}] = [\n", syntax_kinds.len()).unwrap(); 52 write!(
53 acc,
54 "static INFOS: [SyntaxInfo; {}] = [\n",
55 syntax_kinds.len()
56 ).unwrap();
52 for kind in syntax_kinds.iter() { 57 for kind in syntax_kinds.iter() {
53 let sname = scream(kind); 58 let sname = scream(kind);
54 write!( 59 write!(
@@ -91,4 +96,4 @@ fn scream(word: &str) -> String {
91 96
92fn kw_token(keyword: &str) -> String { 97fn kw_token(keyword: &str) -> String {
93 format!("{}_KW", scream(keyword)) 98 format!("{}_KW", scream(keyword))
94} \ No newline at end of file 99}
diff --git a/src/bin/parse-rust.rs b/src/bin/parse-rust.rs
index 3c13e732e..af1325bfc 100644
--- a/src/bin/parse-rust.rs
+++ b/src/bin/parse-rust.rs
@@ -2,7 +2,7 @@ extern crate libsyntax2;
2 2
3use std::io::Read; 3use std::io::Read;
4 4
5use libsyntax2::{tokenize, parse}; 5use libsyntax2::{parse, tokenize};
6use libsyntax2::utils::dump_tree; 6use libsyntax2::utils::dump_tree;
7 7
8fn main() { 8fn main() {
diff --git a/src/lexer/classes.rs b/src/lexer/classes.rs
index 4235d2648..7fed008af 100644
--- a/src/lexer/classes.rs
+++ b/src/lexer/classes.rs
@@ -1,17 +1,12 @@
1use unicode_xid::UnicodeXID; 1use unicode_xid::UnicodeXID;
2 2
3pub fn is_ident_start(c: char) -> bool { 3pub fn is_ident_start(c: char) -> bool {
4 (c >= 'a' && c <= 'z') 4 (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_'
5 || (c >= 'A' && c <= 'Z')
6 || c == '_'
7 || (c > '\x7f' && UnicodeXID::is_xid_start(c)) 5 || (c > '\x7f' && UnicodeXID::is_xid_start(c))
8} 6}
9 7
10pub fn is_ident_continue(c: char) -> bool { 8pub fn is_ident_continue(c: char) -> bool {
11 (c >= 'a' && c <= 'z') 9 (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || c == '_'
12 || (c >= 'A' && c <= 'Z')
13 || (c >= '0' && c <= '9')
14 || c == '_'
15 || (c > '\x7f' && UnicodeXID::is_xid_continue(c)) 10 || (c > '\x7f' && UnicodeXID::is_xid_continue(c))
16} 11}
17 12
diff --git a/src/lexer/comments.rs b/src/lexer/comments.rs
index 79782cc5b..b70f2c6c6 100644
--- a/src/lexer/comments.rs
+++ b/src/lexer/comments.rs
@@ -1,6 +1,6 @@
1use lexer::ptr::Ptr; 1use lexer::ptr::Ptr;
2 2
3use {SyntaxKind}; 3use SyntaxKind;
4use syntax_kinds::*; 4use syntax_kinds::*;
5 5
6pub(crate) fn scan_shebang(ptr: &mut Ptr) -> bool { 6pub(crate) fn scan_shebang(ptr: &mut Ptr) -> bool {
@@ -23,7 +23,6 @@ pub(crate) fn scan_comment(ptr: &mut Ptr) -> Option<SyntaxKind> {
23 } 23 }
24} 24}
25 25
26
27fn bump_until_eol(ptr: &mut Ptr) { 26fn bump_until_eol(ptr: &mut Ptr) {
28 loop { 27 loop {
29 if ptr.next_is('\n') || ptr.next_is('\r') && ptr.nnext_is('\n') { 28 if ptr.next_is('\n') || ptr.next_is('\r') && ptr.nnext_is('\n') {
@@ -33,4 +32,4 @@ fn bump_until_eol(ptr: &mut Ptr) {
33 break; 32 break;
34 } 33 }
35 } 34 }
36} \ No newline at end of file 35}
diff --git a/src/lexer/mod.rs b/src/lexer/mod.rs
index 842059a42..2f8d3a402 100644
--- a/src/lexer/mod.rs
+++ b/src/lexer/mod.rs
@@ -1,4 +1,4 @@
1use {Token, SyntaxKind}; 1use {SyntaxKind, Token};
2use syntax_kinds::*; 2use syntax_kinds::*;
3 3
4mod ptr; 4mod ptr;
@@ -11,10 +11,11 @@ mod numbers;
11use self::numbers::scan_number; 11use self::numbers::scan_number;
12 12
13mod strings; 13mod strings;
14use self::strings::{is_string_literal_start, scan_char, scan_byte_char_or_string, scan_string, scan_raw_string}; 14use self::strings::{is_string_literal_start, scan_byte_char_or_string, scan_char, scan_raw_string,
15 scan_string};
15 16
16mod comments; 17mod comments;
17use self::comments::{scan_shebang, scan_comment}; 18use self::comments::{scan_comment, scan_shebang};
18 19
19pub fn tokenize(text: &str) -> Vec<Token> { 20pub fn tokenize(text: &str) -> Vec<Token> {
20 let mut text = text; 21 let mut text = text;
@@ -45,10 +46,10 @@ fn next_token_inner(c: char, ptr: &mut Ptr) -> SyntaxKind {
45 match c { 46 match c {
46 '#' => if scan_shebang(ptr) { 47 '#' => if scan_shebang(ptr) {
47 return SHEBANG; 48 return SHEBANG;
48 } 49 },
49 '/' => if let Some(kind) = scan_comment(ptr) { 50 '/' => if let Some(kind) = scan_comment(ptr) {
50 return kind; 51 return kind;
51 } 52 },
52 _ => (), 53 _ => (),
53 } 54 }
54 55
@@ -89,79 +90,91 @@ fn next_token_inner(c: char, ptr: &mut Ptr) -> SyntaxKind {
89 '%' => return PERCENT, 90 '%' => return PERCENT,
90 91
91 // Multi-byte tokens. 92 // Multi-byte tokens.
92 '.' => return match (ptr.next(), ptr.nnext()) { 93 '.' => {
93 (Some('.'), Some('.')) => { 94 return match (ptr.next(), ptr.nnext()) {
94 ptr.bump(); 95 (Some('.'), Some('.')) => {
95 ptr.bump(); 96 ptr.bump();
96 DOTDOTDOT 97 ptr.bump();
97 }, 98 DOTDOTDOT
98 (Some('.'), Some('=')) => { 99 }
99 ptr.bump(); 100 (Some('.'), Some('=')) => {
100 ptr.bump(); 101 ptr.bump();
101 DOTDOTEQ 102 ptr.bump();
102 }, 103 DOTDOTEQ
103 (Some('.'), _) => { 104 }
104 ptr.bump(); 105 (Some('.'), _) => {
105 DOTDOT 106 ptr.bump();
106 }, 107 DOTDOT
107 _ => DOT 108 }
108 }, 109 _ => DOT,
109 ':' => return match ptr.next() {
110 Some(':') => {
111 ptr.bump();
112 COLONCOLON
113 } 110 }
114 _ => COLON 111 }
115 }, 112 ':' => {
116 '=' => return match ptr.next() { 113 return match ptr.next() {
117 Some('=') => { 114 Some(':') => {
118 ptr.bump(); 115 ptr.bump();
119 EQEQ 116 COLONCOLON
117 }
118 _ => COLON,
120 } 119 }
121 Some('>') => { 120 }
122 ptr.bump(); 121 '=' => {
123 FAT_ARROW 122 return match ptr.next() {
123 Some('=') => {
124 ptr.bump();
125 EQEQ
126 }
127 Some('>') => {
128 ptr.bump();
129 FAT_ARROW
130 }
131 _ => EQ,
124 } 132 }
125 _ => EQ, 133 }
126 }, 134 '!' => {
127 '!' => return match ptr.next() { 135 return match ptr.next() {
128 Some('=') => { 136 Some('=') => {
137 ptr.bump();
138 NEQ
139 }
140 _ => EXCL,
141 }
142 }
143 '-' => {
144 return if ptr.next_is('>') {
129 ptr.bump(); 145 ptr.bump();
130 NEQ 146 THIN_ARROW
147 } else {
148 MINUS
131 } 149 }
132 _ => EXCL, 150 }
133 },
134 '-' => return if ptr.next_is('>') {
135 ptr.bump();
136 THIN_ARROW
137 } else {
138 MINUS
139 },
140 151
141 // If the character is an ident start not followed by another single 152 // If the character is an ident start not followed by another single
142 // quote, then this is a lifetime name: 153 // quote, then this is a lifetime name:
143 '\'' => return if ptr.next_is_p(is_ident_start) && !ptr.nnext_is('\'') { 154 '\'' => {
144 ptr.bump(); 155 return if ptr.next_is_p(is_ident_start) && !ptr.nnext_is('\'') {
145 while ptr.next_is_p(is_ident_continue) {
146 ptr.bump();
147 }
148 // lifetimes shouldn't end with a single quote
149 // if we find one, then this is an invalid character literal
150 if ptr.next_is('\'') {
151 ptr.bump(); 156 ptr.bump();
152 return CHAR; // TODO: error reporting 157 while ptr.next_is_p(is_ident_continue) {
153 } 158 ptr.bump();
154 LIFETIME 159 }
155 } else { 160 // lifetimes shouldn't end with a single quote
156 scan_char(ptr); 161 // if we find one, then this is an invalid character literal
157 scan_literal_suffix(ptr); 162 if ptr.next_is('\'') {
158 CHAR 163 ptr.bump();
159 }, 164 return CHAR; // TODO: error reporting
165 }
166 LIFETIME
167 } else {
168 scan_char(ptr);
169 scan_literal_suffix(ptr);
170 CHAR
171 };
172 }
160 'b' => { 173 'b' => {
161 let kind = scan_byte_char_or_string(ptr); 174 let kind = scan_byte_char_or_string(ptr);
162 scan_literal_suffix(ptr); 175 scan_literal_suffix(ptr);
163 return kind 176 return kind;
164 }, 177 }
165 '"' => { 178 '"' => {
166 scan_string(ptr); 179 scan_string(ptr);
167 scan_literal_suffix(ptr); 180 scan_literal_suffix(ptr);
diff --git a/src/lexer/numbers.rs b/src/lexer/numbers.rs
index 4c7edfe1c..95e42246f 100644
--- a/src/lexer/numbers.rs
+++ b/src/lexer/numbers.rs
@@ -1,7 +1,7 @@
1use lexer::ptr::Ptr; 1use lexer::ptr::Ptr;
2use lexer::classes::*; 2use lexer::classes::*;
3 3
4use {SyntaxKind}; 4use SyntaxKind;
5use syntax_kinds::*; 5use syntax_kinds::*;
6 6
7pub(crate) fn scan_number(c: char, ptr: &mut Ptr) -> SyntaxKind { 7pub(crate) fn scan_number(c: char, ptr: &mut Ptr) -> SyntaxKind {
@@ -49,10 +49,10 @@ fn scan_digits(ptr: &mut Ptr, allow_hex: bool) {
49 '_' | '0'...'9' => { 49 '_' | '0'...'9' => {
50 ptr.bump(); 50 ptr.bump();
51 } 51 }
52 'a'...'f' | 'A' ... 'F' if allow_hex => { 52 'a'...'f' | 'A'...'F' if allow_hex => {
53 ptr.bump(); 53 ptr.bump();
54 } 54 }
55 _ => return 55 _ => return,
56 } 56 }
57 } 57 }
58} 58}
diff --git a/src/lexer/ptr.rs b/src/lexer/ptr.rs
index ff6ef11fc..99d55b283 100644
--- a/src/lexer/ptr.rs
+++ b/src/lexer/ptr.rs
@@ -1,4 +1,4 @@
1use {TextUnit}; 1use TextUnit;
2 2
3use std::str::Chars; 3use std::str::Chars;
4 4
@@ -9,7 +9,10 @@ pub(crate) struct Ptr<'s> {
9 9
10impl<'s> Ptr<'s> { 10impl<'s> Ptr<'s> {
11 pub fn new(text: &'s str) -> Ptr<'s> { 11 pub fn new(text: &'s str) -> Ptr<'s> {
12 Ptr { text, len: TextUnit::new(0) } 12 Ptr {
13 text,
14 len: TextUnit::new(0),
15 }
13 } 16 }
14 17
15 pub fn into_len(self) -> TextUnit { 18 pub fn into_len(self) -> TextUnit {
@@ -53,7 +56,7 @@ impl<'s> Ptr<'s> {
53 match self.next() { 56 match self.next() {
54 Some(c) if pred(c) => { 57 Some(c) if pred(c) => {
55 self.bump(); 58 self.bump();
56 }, 59 }
57 _ => return, 60 _ => return,
58 } 61 }
59 } 62 }
@@ -66,6 +69,6 @@ impl<'s> Ptr<'s> {
66 69
67 fn chars(&self) -> Chars { 70 fn chars(&self) -> Chars {
68 let len: u32 = self.len.into(); 71 let len: u32 = self.len.into();
69 self.text[len as usize ..].chars() 72 self.text[len as usize..].chars()
70 } 73 }
71} 74}
diff --git a/src/lexer/strings.rs b/src/lexer/strings.rs
index 116d31760..00a84ec85 100644
--- a/src/lexer/strings.rs
+++ b/src/lexer/strings.rs
@@ -1,17 +1,17 @@
1use {SyntaxKind}; 1use SyntaxKind;
2use syntax_kinds::*; 2use syntax_kinds::*;
3 3
4use lexer::ptr::Ptr; 4use lexer::ptr::Ptr;
5 5
6pub(crate) fn is_string_literal_start(c: char, c1: Option<char>, c2: Option<char>) -> bool { 6pub(crate) fn is_string_literal_start(c: char, c1: Option<char>, c2: Option<char>) -> bool {
7 match (c, c1, c2) { 7 match (c, c1, c2) {
8 ('r', Some('"'), _) | 8 ('r', Some('"'), _)
9 ('r', Some('#'), _) | 9 | ('r', Some('#'), _)
10 ('b', Some('"'), _) | 10 | ('b', Some('"'), _)
11 ('b', Some('\''), _) | 11 | ('b', Some('\''), _)
12 ('b', Some('r'), Some('"')) | 12 | ('b', Some('r'), Some('"'))
13 ('b', Some('r'), Some('#')) => true, 13 | ('b', Some('r'), Some('#')) => true,
14 _ => false 14 _ => false,
15 } 15 }
16} 16}
17 17
@@ -50,20 +50,20 @@ pub(crate) fn scan_byte_char_or_string(ptr: &mut Ptr) -> SyntaxKind {
50pub(crate) fn scan_string(ptr: &mut Ptr) { 50pub(crate) fn scan_string(ptr: &mut Ptr) {
51 while let Some(c) = ptr.bump() { 51 while let Some(c) = ptr.bump() {
52 if c == '"' { 52 if c == '"' {
53 return 53 return;
54 } 54 }
55 } 55 }
56} 56}
57 57
58pub(crate) fn scan_raw_string(ptr: &mut Ptr) { 58pub(crate) fn scan_raw_string(ptr: &mut Ptr) {
59 if !ptr.next_is('"') { 59 if !ptr.next_is('"') {
60 return 60 return;
61 } 61 }
62 ptr.bump(); 62 ptr.bump();
63 63
64 while let Some(c) = ptr.bump() { 64 while let Some(c) = ptr.bump() {
65 if c == '"' { 65 if c == '"' {
66 return 66 return;
67 } 67 }
68 } 68 }
69} 69}
@@ -71,32 +71,32 @@ pub(crate) fn scan_raw_string(ptr: &mut Ptr) {
71fn scan_byte(ptr: &mut Ptr) { 71fn scan_byte(ptr: &mut Ptr) {
72 if ptr.next_is('\'') { 72 if ptr.next_is('\'') {
73 ptr.bump(); 73 ptr.bump();
74 return 74 return;
75 } 75 }
76 ptr.bump(); 76 ptr.bump();
77 if ptr.next_is('\'') { 77 if ptr.next_is('\'') {
78 ptr.bump(); 78 ptr.bump();
79 return 79 return;
80 } 80 }
81} 81}
82 82
83fn scan_byte_string(ptr: &mut Ptr) { 83fn scan_byte_string(ptr: &mut Ptr) {
84 while let Some(c) = ptr.bump() { 84 while let Some(c) = ptr.bump() {
85 if c == '"' { 85 if c == '"' {
86 return 86 return;
87 } 87 }
88 } 88 }
89} 89}
90 90
91fn scan_raw_byte_string(ptr: &mut Ptr) { 91fn scan_raw_byte_string(ptr: &mut Ptr) {
92 if !ptr.next_is('"') { 92 if !ptr.next_is('"') {
93 return 93 return;
94 } 94 }
95 ptr.bump(); 95 ptr.bump();
96 96
97 while let Some(c) = ptr.bump() { 97 while let Some(c) = ptr.bump() {
98 if c == '"' { 98 if c == '"' {
99 return 99 return;
100 } 100 }
101 } 101 }
102} 102}
@@ -105,4 +105,3 @@ fn scan_char_or_byte(ptr: &mut Ptr) {
105 //FIXME: deal with escape sequencies 105 //FIXME: deal with escape sequencies
106 ptr.bump(); 106 ptr.bump();
107} 107}
108
diff --git a/src/lib.rs b/src/lib.rs
index 7fd9e547a..39b01a1cb 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -5,9 +5,10 @@ mod tree;
5mod lexer; 5mod lexer;
6mod parser; 6mod parser;
7 7
8#[cfg_attr(rustfmt, rustfmt_skip)]
8pub mod syntax_kinds; 9pub mod syntax_kinds;
9pub use text::{TextUnit, TextRange}; 10pub use text::{TextRange, TextUnit};
10pub use tree::{SyntaxKind, Token, FileBuilder, Sink, File, Node}; 11pub use tree::{File, FileBuilder, Node, Sink, SyntaxKind, Token};
11pub use lexer::{next_token, tokenize}; 12pub use lexer::{next_token, tokenize};
12pub use parser::parse; 13pub use parser::parse;
13 14
@@ -25,7 +26,8 @@ pub mod utils {
25 buff.push_str(&String::from(" ").repeat(level)); 26 buff.push_str(&String::from(" ").repeat(level));
26 write!(buff, "{:?}\n", node).unwrap(); 27 write!(buff, "{:?}\n", node).unwrap();
27 let my_errors = node.errors().filter(|e| e.after_child().is_none()); 28 let my_errors = node.errors().filter(|e| e.after_child().is_none());
28 let parent_errors = node.parent().into_iter() 29 let parent_errors = node.parent()
30 .into_iter()
29 .flat_map(|n| n.errors()) 31 .flat_map(|n| n.errors())
30 .filter(|e| e.after_child() == Some(node)); 32 .filter(|e| e.after_child() == Some(node));
31 33
diff --git a/src/parser/event_parser/grammar/attributes.rs b/src/parser/event_parser/grammar/attributes.rs
index 045840059..8bf04afce 100644
--- a/src/parser/event_parser/grammar/attributes.rs
+++ b/src/parser/event_parser/grammar/attributes.rs
@@ -12,8 +12,7 @@ pub(super) fn outer_attributes(p: &mut Parser) {
12 } 12 }
13} 13}
14 14
15 15fn attribute(p: &mut Parser, inner: bool) {
16fn attribute(p: &mut Parser, inner: bool){
17 let attr = p.start(); 16 let attr = p.start();
18 assert!(p.at(POUND)); 17 assert!(p.at(POUND));
19 p.bump(); 18 p.bump();
@@ -38,9 +37,7 @@ fn meta_item(p: &mut Parser) {
38 EQ => { 37 EQ => {
39 p.bump(); 38 p.bump();
40 if !expressions::literal(p) { 39 if !expressions::literal(p) {
41 p.error() 40 p.error().message("expected literal").emit();
42 .message("expected literal")
43 .emit();
44 } 41 }
45 } 42 }
46 L_PAREN => meta_item_arg_list(p), 43 L_PAREN => meta_item_arg_list(p),
@@ -48,9 +45,7 @@ fn meta_item(p: &mut Parser) {
48 } 45 }
49 meta_item.complete(p, META_ITEM); 46 meta_item.complete(p, META_ITEM);
50 } else { 47 } else {
51 p.error() 48 p.error().message("expected attribute value").emit()
52 .message("expected attribute value")
53 .emit()
54 } 49 }
55} 50}
56 51
@@ -73,8 +68,8 @@ fn meta_item_arg_list(p: &mut Parser) {
73 p.error().message(message).emit(); 68 p.error().message(message).emit();
74 p.bump(); 69 p.bump();
75 err.complete(p, ERROR); 70 err.complete(p, ERROR);
76 continue 71 continue;
77 } 72 },
78 } 73 }
79 if !p.at(R_PAREN) { 74 if !p.at(R_PAREN) {
80 p.expect(COMMA); 75 p.expect(COMMA);
diff --git a/src/parser/event_parser/grammar/expressions.rs b/src/parser/event_parser/grammar/expressions.rs
index a943b8c81..c81dc6c35 100644
--- a/src/parser/event_parser/grammar/expressions.rs
+++ b/src/parser/event_parser/grammar/expressions.rs
@@ -2,15 +2,13 @@ use super::*;
2 2
3pub(super) fn literal(p: &mut Parser) -> bool { 3pub(super) fn literal(p: &mut Parser) -> bool {
4 match p.current() { 4 match p.current() {
5 TRUE_KW | FALSE_KW | 5 TRUE_KW | FALSE_KW | INT_NUMBER | FLOAT_NUMBER | BYTE | CHAR | STRING | RAW_STRING
6 INT_NUMBER | FLOAT_NUMBER | 6 | BYTE_STRING | RAW_BYTE_STRING => {
7 BYTE | CHAR |
8 STRING | RAW_STRING | BYTE_STRING | RAW_BYTE_STRING => {
9 let lit = p.start(); 7 let lit = p.start();
10 p.bump(); 8 p.bump();
11 lit.complete(p, LITERAL); 9 lit.complete(p, LITERAL);
12 true 10 true
13 } 11 }
14 _ => false 12 _ => false,
15 } 13 }
16} 14}
diff --git a/src/parser/event_parser/grammar/items.rs b/src/parser/event_parser/grammar/items.rs
index 7706690cc..e569e5047 100644
--- a/src/parser/event_parser/grammar/items.rs
+++ b/src/parser/event_parser/grammar/items.rs
@@ -7,15 +7,8 @@ pub(super) fn mod_contents(p: &mut Parser) {
7 } 7 }
8} 8}
9 9
10pub(super) const ITEM_FIRST: TokenSet = token_set![ 10pub(super) const ITEM_FIRST: TokenSet =
11 EXTERN_KW, 11 token_set![EXTERN_KW, MOD_KW, USE_KW, STRUCT_KW, FN_KW, PUB_KW, POUND,];
12 MOD_KW,
13 USE_KW,
14 STRUCT_KW,
15 FN_KW,
16 PUB_KW,
17 POUND,
18];
19 12
20fn item(p: &mut Parser) { 13fn item(p: &mut Parser) {
21 let item = p.start(); 14 let item = p.start();
@@ -48,7 +41,7 @@ fn item(p: &mut Parser) {
48 let message = if err_token == SEMI { 41 let message = if err_token == SEMI {
49 //TODO: if the item is incomplete, this message is misleading 42 //TODO: if the item is incomplete, this message is misleading
50 "expected item, found `;`\n\ 43 "expected item, found `;`\n\
51 consider removing this semicolon" 44 consider removing this semicolon"
52 } else { 45 } else {
53 "expected item" 46 "expected item"
54 }; 47 };
@@ -76,10 +69,9 @@ fn struct_item(p: &mut Parser) {
76 return; 69 return;
77 } 70 }
78 L_CURLY => named_fields(p), 71 L_CURLY => named_fields(p),
79 _ => { //TODO: special case `(` error message 72 _ => {
80 p.error() 73 //TODO: special case `(` error message
81 .message("expected `;` or `{`") 74 p.error().message("expected `;` or `{`").emit();
82 .emit();
83 return; 75 return;
84 } 76 }
85 } 77 }
@@ -94,9 +86,7 @@ fn struct_item(p: &mut Parser) {
94 p.expect(SEMI); 86 p.expect(SEMI);
95 } 87 }
96 _ => { 88 _ => {
97 p.error() 89 p.error().message("expected `;`, `{`, or `(`").emit();
98 .message("expected `;`, `{`, or `(`")
99 .emit();
100 return; 90 return;
101 } 91 }
102 } 92 }
@@ -177,7 +167,7 @@ fn use_item(p: &mut Parser) {
177 use_tree(p); 167 use_tree(p);
178 p.expect(SEMI); 168 p.expect(SEMI);
179 169
180 fn use_tree(p: &mut Parser){ 170 fn use_tree(p: &mut Parser) {
181 let la = p.raw_lookahead(1); 171 let la = p.raw_lookahead(1);
182 let m = p.start(); 172 let m = p.start();
183 match (p.current(), la) { 173 match (p.current(), la) {
@@ -209,9 +199,7 @@ fn use_item(p: &mut Parser) {
209 L_CURLY => nested_trees(p), 199 L_CURLY => nested_trees(p),
210 _ => { 200 _ => {
211 // is this unreachable? 201 // is this unreachable?
212 p.error() 202 p.error().message("expected `{` or `*`").emit();
213 .message("expected `{` or `*`")
214 .emit();
215 } 203 }
216 } 204 }
217 } 205 }
@@ -222,7 +210,7 @@ fn use_item(p: &mut Parser) {
222 m.abandon(p); 210 m.abandon(p);
223 p.err_and_bump("expected one of `*`, `::`, `{`, `self`, `super`, `indent`"); 211 p.err_and_bump("expected one of `*`, `::`, `{`, `self`, `super`, `indent`");
224 return; 212 return;
225 }, 213 }
226 } 214 }
227 m.complete(p, USE_TREE); 215 m.complete(p, USE_TREE);
228 } 216 }
@@ -240,13 +228,9 @@ fn use_item(p: &mut Parser) {
240 } 228 }
241} 229}
242 230
243
244fn fn_item(p: &mut Parser) { 231fn fn_item(p: &mut Parser) {
245 assert!(p.at(FN_KW)); 232 assert!(p.at(FN_KW));
246 p.bump(); 233 p.bump();
247 234
248 p.expect(IDENT) && p.expect(L_PAREN) && p.expect(R_PAREN) 235 p.expect(IDENT) && p.expect(L_PAREN) && p.expect(R_PAREN) && p.curly_block(|_| ());
249 && p.curly_block(|_| ());
250} 236}
251
252
diff --git a/src/parser/event_parser/grammar/mod.rs b/src/parser/event_parser/grammar/mod.rs
index 6e4f72096..c6ab1fbe2 100644
--- a/src/parser/event_parser/grammar/mod.rs
+++ b/src/parser/event_parser/grammar/mod.rs
@@ -1,5 +1,5 @@
1use super::parser::{Parser, TokenSet}; 1use super::parser::{Parser, TokenSet};
2use {SyntaxKind}; 2use SyntaxKind;
3use tree::EOF; 3use tree::EOF;
4use syntax_kinds::*; 4use syntax_kinds::*;
5 5
@@ -29,7 +29,7 @@ fn visibility(p: &mut Parser) {
29 } 29 }
30 p.expect(R_PAREN); 30 p.expect(R_PAREN);
31 } 31 }
32 _ => () 32 _ => (),
33 } 33 }
34 } 34 }
35 vis.complete(p, VISIBILITY); 35 vis.complete(p, VISIBILITY);
@@ -53,9 +53,7 @@ impl<'p> Parser<'p> {
53 53
54 fn err_and_bump(&mut self, message: &str) { 54 fn err_and_bump(&mut self, message: &str) {
55 let err = self.start(); 55 let err = self.start();
56 self.error() 56 self.error().message(message).emit();
57 .message(message)
58 .emit();
59 self.bump(); 57 self.bump();
60 err.complete(self, ERROR); 58 err.complete(self, ERROR);
61 } 59 }
@@ -65,15 +63,16 @@ impl<'p> Parser<'p> {
65 self.bump(); 63 self.bump();
66 true 64 true
67 } else { 65 } else {
68 self.error() 66 self.error().message(format!("expected {:?}", kind)).emit();
69 .message(format!("expected {:?}", kind))
70 .emit();
71 false 67 false
72 } 68 }
73 } 69 }
74 70
75 fn eat(&mut self, kind: SyntaxKind) -> bool { 71 fn eat(&mut self, kind: SyntaxKind) -> bool {
76 self.current() == kind && { self.bump(); true } 72 self.current() == kind && {
73 self.bump();
74 true
75 }
77 } 76 }
78} 77}
79 78
@@ -94,8 +93,7 @@ impl Lookahead for SyntaxKind {
94 93
95impl Lookahead for [SyntaxKind; 2] { 94impl Lookahead for [SyntaxKind; 2] {
96 fn is_ahead(self, p: &Parser) -> bool { 95 fn is_ahead(self, p: &Parser) -> bool {
97 p.current() == self[0] 96 p.current() == self[0] && p.raw_lookahead(1) == self[1]
98 && p.raw_lookahead(1) == self[1]
99 } 97 }
100 98
101 fn consume(p: &mut Parser) { 99 fn consume(p: &mut Parser) {
@@ -106,9 +104,7 @@ impl Lookahead for [SyntaxKind; 2] {
106 104
107impl Lookahead for [SyntaxKind; 3] { 105impl Lookahead for [SyntaxKind; 3] {
108 fn is_ahead(self, p: &Parser) -> bool { 106 fn is_ahead(self, p: &Parser) -> bool {
109 p.current() == self[0] 107 p.current() == self[0] && p.raw_lookahead(1) == self[1] && p.raw_lookahead(2) == self[2]
110 && p.raw_lookahead(1) == self[1]
111 && p.raw_lookahead(2) == self[2]
112 } 108 }
113 109
114 fn consume(p: &mut Parser) { 110 fn consume(p: &mut Parser) {
@@ -130,5 +126,4 @@ impl<'a> Lookahead for AnyOf<'a> {
130 fn consume(p: &mut Parser) { 126 fn consume(p: &mut Parser) {
131 p.bump(); 127 p.bump();
132 } 128 }
133
134} 129}
diff --git a/src/parser/event_parser/grammar/paths.rs b/src/parser/event_parser/grammar/paths.rs
index b58c59aef..4e028073a 100644
--- a/src/parser/event_parser/grammar/paths.rs
+++ b/src/parser/event_parser/grammar/paths.rs
@@ -34,9 +34,7 @@ fn path_segment(p: &mut Parser, first: bool) {
34 p.bump(); 34 p.bump();
35 } 35 }
36 _ => { 36 _ => {
37 p.error() 37 p.error().message("expected identifier").emit();
38 .message("expected identifier")
39 .emit();
40 } 38 }
41 }; 39 };
42 segment.complete(p, PATH_SEGMENT); 40 segment.complete(p, PATH_SEGMENT);
diff --git a/src/parser/event_parser/grammar/types.rs b/src/parser/event_parser/grammar/types.rs
index c431643d7..1a3d44a0a 100644
--- a/src/parser/event_parser/grammar/types.rs
+++ b/src/parser/event_parser/grammar/types.rs
@@ -2,4 +2,4 @@ use super::*;
2 2
3pub(super) fn type_ref(p: &mut Parser) { 3pub(super) fn type_ref(p: &mut Parser) {
4 p.expect(IDENT); 4 p.expect(IDENT);
5} \ No newline at end of file 5}
diff --git a/src/parser/event_parser/mod.rs b/src/parser/event_parser/mod.rs
index b9ffded9d..65aea017b 100644
--- a/src/parser/event_parser/mod.rs
+++ b/src/parser/event_parser/mod.rs
@@ -1,4 +1,4 @@
1use {Token, SyntaxKind}; 1use {SyntaxKind, Token};
2 2
3#[macro_use] 3#[macro_use]
4mod parser; 4mod parser;
diff --git a/src/parser/event_parser/parser.rs b/src/parser/event_parser/parser.rs
index 18231e493..5ba3071cb 100644
--- a/src/parser/event_parser/parser.rs
+++ b/src/parser/event_parser/parser.rs
@@ -1,17 +1,19 @@
1use {Token, SyntaxKind, TextUnit}; 1use {SyntaxKind, TextUnit, Token};
2use super::Event; 2use super::Event;
3use super::super::is_insignificant; 3use super::super::is_insignificant;
4use syntax_kinds::{L_CURLY, R_CURLY, ERROR}; 4use syntax_kinds::{ERROR, L_CURLY, R_CURLY};
5use tree::{EOF, TOMBSTONE}; 5use tree::{EOF, TOMBSTONE};
6 6
7pub(crate) struct Marker { 7pub(crate) struct Marker {
8 pos: u32 8 pos: u32,
9} 9}
10 10
11impl Marker { 11impl Marker {
12 pub fn complete(self, p: &mut Parser, kind: SyntaxKind) -> CompleteMarker { 12 pub fn complete(self, p: &mut Parser, kind: SyntaxKind) -> CompleteMarker {
13 match self.event(p) { 13 match self.event(p) {
14 &mut Event::Start { kind: ref mut slot, ..} => { 14 &mut Event::Start {
15 kind: ref mut slot, ..
16 } => {
15 *slot = kind; 17 *slot = kind;
16 } 18 }
17 _ => unreachable!(), 19 _ => unreachable!(),
@@ -26,8 +28,11 @@ impl Marker {
26 let idx = self.pos as usize; 28 let idx = self.pos as usize;
27 if idx == p.events.len() - 1 { 29 if idx == p.events.len() - 1 {
28 match p.events.pop() { 30 match p.events.pop() {
29 Some(Event::Start { kind: TOMBSTONE, forward_parent: None }) => (), 31 Some(Event::Start {
30 _ => unreachable!() 32 kind: TOMBSTONE,
33 forward_parent: None,
34 }) => (),
35 _ => unreachable!(),
31 } 36 }
32 } 37 }
33 ::std::mem::forget(self); 38 ::std::mem::forget(self);
@@ -51,14 +56,17 @@ impl Drop for Marker {
51} 56}
52 57
53pub(crate) struct CompleteMarker { 58pub(crate) struct CompleteMarker {
54 pos: u32 59 pos: u32,
55} 60}
56 61
57impl CompleteMarker { 62impl CompleteMarker {
58 pub(crate) fn precede(self, p: &mut Parser) -> Marker { 63 pub(crate) fn precede(self, p: &mut Parser) -> Marker {
59 let m = p.start(); 64 let m = p.start();
60 match p.events[self.pos as usize] { 65 match p.events[self.pos as usize] {
61 Event::Start { ref mut forward_parent, ..} => { 66 Event::Start {
67 ref mut forward_parent,
68 ..
69 } => {
62 *forward_parent = Some(m.pos - self.pos); 70 *forward_parent = Some(m.pos - self.pos);
63 } 71 }
64 _ => unreachable!(), 72 _ => unreachable!(),
@@ -68,7 +76,7 @@ impl CompleteMarker {
68} 76}
69 77
70pub(crate) struct TokenSet { 78pub(crate) struct TokenSet {
71 pub tokens: &'static [SyntaxKind] 79 pub tokens: &'static [SyntaxKind],
72} 80}
73 81
74impl TokenSet { 82impl TokenSet {
@@ -90,7 +98,6 @@ macro_rules! token_set {
90 }; 98 };
91} 99}
92 100
93
94pub(crate) struct Parser<'t> { 101pub(crate) struct Parser<'t> {
95 #[allow(unused)] 102 #[allow(unused)]
96 text: &'t str, 103 text: &'t str,
@@ -150,8 +157,13 @@ impl<'t> Parser<'t> {
150 } 157 }
151 158
152 pub(crate) fn start(&mut self) -> Marker { 159 pub(crate) fn start(&mut self) -> Marker {
153 let m = Marker { pos: self.events.len() as u32 }; 160 let m = Marker {
154 self.event(Event::Start { kind: TOMBSTONE, forward_parent: None }); 161 pos: self.events.len() as u32,
162 };
163 self.event(Event::Start {
164 kind: TOMBSTONE,
165 forward_parent: None,
166 });
155 m 167 m
156 } 168 }
157 169
@@ -168,7 +180,10 @@ impl<'t> Parser<'t> {
168 _ => (), 180 _ => (),
169 } 181 }
170 self.pos += 1; 182 self.pos += 1;
171 self.event(Event::Token { kind, n_raw_tokens: 1 }); 183 self.event(Event::Token {
184 kind,
185 n_raw_tokens: 1,
186 });
172 kind 187 kind
173 } 188 }
174 189
@@ -210,7 +225,10 @@ pub(crate) struct ErrorBuilder<'p, 't: 'p> {
210 225
211impl<'t, 'p> ErrorBuilder<'p, 't> { 226impl<'t, 'p> ErrorBuilder<'p, 't> {
212 fn new(parser: &'p mut Parser<'t>) -> Self { 227 fn new(parser: &'p mut Parser<'t>) -> Self {
213 ErrorBuilder { message: None, parser } 228 ErrorBuilder {
229 message: None,
230 parser,
231 }
214 } 232 }
215 233
216 pub fn message<M: Into<String>>(mut self, m: M) -> Self { 234 pub fn message<M: Into<String>>(mut self, m: M) -> Self {
diff --git a/src/parser/mod.rs b/src/parser/mod.rs
index 5ec4b8e93..d04ed1e75 100644
--- a/src/parser/mod.rs
+++ b/src/parser/mod.rs
@@ -1,4 +1,4 @@
1use {Token, File, FileBuilder, Sink, SyntaxKind}; 1use {File, FileBuilder, Sink, SyntaxKind, Token};
2 2
3use syntax_kinds::*; 3use syntax_kinds::*;
4use tree::TOMBSTONE; 4use tree::TOMBSTONE;
@@ -6,17 +6,12 @@ use tree::TOMBSTONE;
6mod event_parser; 6mod event_parser;
7use self::event_parser::Event; 7use self::event_parser::Event;
8 8
9
10pub fn parse(text: String, tokens: &[Token]) -> File { 9pub fn parse(text: String, tokens: &[Token]) -> File {
11 let events = event_parser::parse(&text, tokens); 10 let events = event_parser::parse(&text, tokens);
12 from_events_to_file(text, tokens, events) 11 from_events_to_file(text, tokens, events)
13} 12}
14 13
15fn from_events_to_file( 14fn from_events_to_file(text: String, tokens: &[Token], events: Vec<Event>) -> File {
16 text: String,
17 tokens: &[Token],
18 events: Vec<Event>,
19) -> File {
20 let mut builder = FileBuilder::new(text); 15 let mut builder = FileBuilder::new(text);
21 let mut idx = 0; 16 let mut idx = 0;
22 17
@@ -26,18 +21,23 @@ fn from_events_to_file(
26 for (i, event) in events.iter().enumerate() { 21 for (i, event) in events.iter().enumerate() {
27 if holes.last() == Some(&i) { 22 if holes.last() == Some(&i) {
28 holes.pop(); 23 holes.pop();
29 continue 24 continue;
30 } 25 }
31 26
32 match event { 27 match event {
33 &Event::Start { kind: TOMBSTONE, .. } => (), 28 &Event::Start {
29 kind: TOMBSTONE, ..
30 } => (),
34 31
35 &Event::Start { .. } => { 32 &Event::Start { .. } => {
36 forward_parents.clear(); 33 forward_parents.clear();
37 let mut idx = i; 34 let mut idx = i;
38 loop { 35 loop {
39 let (kind, fwd) = match events[idx] { 36 let (kind, fwd) = match events[idx] {
40 Event::Start { kind, forward_parent } => (kind, forward_parent), 37 Event::Start {
38 kind,
39 forward_parent,
40 } => (kind, forward_parent),
41 _ => unreachable!(), 41 _ => unreachable!(),
42 }; 42 };
43 forward_parents.push((idx, kind)); 43 forward_parents.push((idx, kind));
@@ -64,8 +64,11 @@ fn from_events_to_file(
64 } 64 }
65 } 65 }
66 builder.finish_internal() 66 builder.finish_internal()
67 }, 67 }
68 &Event::Token { kind: _, mut n_raw_tokens } => loop { 68 &Event::Token {
69 kind: _,
70 mut n_raw_tokens,
71 } => loop {
69 let token = tokens[idx]; 72 let token = tokens[idx];
70 if !is_insignificant(token.kind) { 73 if !is_insignificant(token.kind) {
71 n_raw_tokens -= 1; 74 n_raw_tokens -= 1;
@@ -76,8 +79,7 @@ fn from_events_to_file(
76 break; 79 break;
77 } 80 }
78 }, 81 },
79 &Event::Error { ref message } => 82 &Event::Error { ref message } => builder.error().message(message.clone()).emit(),
80 builder.error().message(message.clone()).emit(),
81 } 83 }
82 } 84 }
83 builder.finish() 85 builder.finish()
diff --git a/src/text.rs b/src/text.rs
index af0a4a9e7..ac1a54a75 100644
--- a/src/text.rs
+++ b/src/text.rs
@@ -64,7 +64,6 @@ impl ops::SubAssign<TextUnit> for TextUnit {
64 } 64 }
65} 65}
66 66
67
68#[derive(Clone, Copy, PartialEq, Eq)] 67#[derive(Clone, Copy, PartialEq, Eq)]
69pub struct TextRange { 68pub struct TextRange {
70 start: TextUnit, 69 start: TextUnit,
@@ -83,7 +82,6 @@ impl fmt::Display for TextRange {
83 } 82 }
84} 83}
85 84
86
87impl TextRange { 85impl TextRange {
88 pub fn empty() -> TextRange { 86 pub fn empty() -> TextRange {
89 TextRange::from_to(TextUnit::new(0), TextUnit::new(0)) 87 TextRange::from_to(TextUnit::new(0), TextUnit::new(0))
@@ -91,7 +89,10 @@ impl TextRange {
91 89
92 pub fn from_to(from: TextUnit, to: TextUnit) -> TextRange { 90 pub fn from_to(from: TextUnit, to: TextUnit) -> TextRange {
93 assert!(from <= to, "Invalid text range [{}; {})", from, to); 91 assert!(from <= to, "Invalid text range [{}; {})", from, to);
94 TextRange { start: from, end: to } 92 TextRange {
93 start: from,
94 end: to,
95 }
95 } 96 }
96 97
97 pub fn from_len(from: TextUnit, len: TextUnit) -> TextRange { 98 pub fn from_len(from: TextUnit, len: TextUnit) -> TextRange {
@@ -121,4 +122,4 @@ impl ops::Index<TextRange> for str {
121 fn index(&self, index: TextRange) -> &str { 122 fn index(&self, index: TextRange) -> &str {
122 &self[index.start().0 as usize..index.end().0 as usize] 123 &self[index.start().0 as usize..index.end().0 as usize]
123 } 124 }
124} \ No newline at end of file 125}
diff --git a/src/tree/file_builder.rs b/src/tree/file_builder.rs
index 35702ddd7..939922cb2 100644
--- a/src/tree/file_builder.rs
+++ b/src/tree/file_builder.rs
@@ -1,5 +1,5 @@
1use {SyntaxKind, TextUnit, TextRange}; 1use {SyntaxKind, TextRange, TextUnit};
2use super::{NodeData, SyntaxErrorData, NodeIdx, File}; 2use super::{File, NodeData, NodeIdx, SyntaxErrorData};
3 3
4pub trait Sink { 4pub trait Sink {
5 fn leaf(&mut self, kind: SyntaxKind, len: TextUnit); 5 fn leaf(&mut self, kind: SyntaxKind, len: TextUnit);
@@ -8,7 +8,6 @@ pub trait Sink {
8 fn error(&mut self) -> ErrorBuilder; 8 fn error(&mut self) -> ErrorBuilder;
9} 9}
10 10
11
12pub struct FileBuilder { 11pub struct FileBuilder {
13 text: String, 12 text: String,
14 nodes: Vec<NodeData>, 13 nodes: Vec<NodeData>,
@@ -48,9 +47,9 @@ impl Sink for FileBuilder {
48 } 47 }
49 48
50 fn finish_internal(&mut self) { 49 fn finish_internal(&mut self) {
51 let (id, _) = self.in_progress.pop().expect( 50 let (id, _) = self.in_progress
52 "trying to complete a node, but there are no in-progress nodes" 51 .pop()
53 ); 52 .expect("trying to complete a node, but there are no in-progress nodes");
54 if !self.in_progress.is_empty() { 53 if !self.in_progress.is_empty() {
55 self.add_len(id); 54 self.add_len(id);
56 } 55 }
@@ -76,11 +75,14 @@ impl FileBuilder {
76 assert!( 75 assert!(
77 self.in_progress.is_empty(), 76 self.in_progress.is_empty(),
78 "some nodes in FileBuilder are unfinished: {:?}", 77 "some nodes in FileBuilder are unfinished: {:?}",
79 self.in_progress.iter().map(|&(idx, _)| self.nodes[idx].kind) 78 self.in_progress
79 .iter()
80 .map(|&(idx, _)| self.nodes[idx].kind)
80 .collect::<Vec<_>>() 81 .collect::<Vec<_>>()
81 ); 82 );
82 assert_eq!( 83 assert_eq!(
83 self.pos, (self.text.len() as u32).into(), 84 self.pos,
85 (self.text.len() as u32).into(),
84 "nodes in FileBuilder do not cover the whole file" 86 "nodes in FileBuilder do not cover the whole file"
85 ); 87 );
86 File { 88 File {
@@ -100,7 +102,6 @@ impl FileBuilder {
100 child.parent = Some(self.current_id()); 102 child.parent = Some(self.current_id());
101 let id = self.new_node(child); 103 let id = self.new_node(child);
102 { 104 {
103
104 let (parent, sibling) = *self.in_progress.last().unwrap(); 105 let (parent, sibling) = *self.in_progress.last().unwrap();
105 let slot = if let Some(idx) = sibling { 106 let slot = if let Some(idx) = sibling {
106 &mut self.nodes[idx].next_sibling 107 &mut self.nodes[idx].next_sibling
@@ -140,12 +141,15 @@ fn grow(left: &mut TextRange, right: TextRange) {
140 141
141pub struct ErrorBuilder<'f> { 142pub struct ErrorBuilder<'f> {
142 message: Option<String>, 143 message: Option<String>,
143 builder: &'f mut FileBuilder 144 builder: &'f mut FileBuilder,
144} 145}
145 146
146impl<'f> ErrorBuilder<'f> { 147impl<'f> ErrorBuilder<'f> {
147 fn new(builder: &'f mut FileBuilder) -> Self { 148 fn new(builder: &'f mut FileBuilder) -> Self {
148 ErrorBuilder { message: None, builder } 149 ErrorBuilder {
150 message: None,
151 builder,
152 }
149 } 153 }
150 154
151 pub fn message<M: Into<String>>(mut self, m: M) -> Self { 155 pub fn message<M: Into<String>>(mut self, m: M) -> Self {
@@ -156,6 +160,10 @@ impl<'f> ErrorBuilder<'f> {
156 pub fn emit(self) { 160 pub fn emit(self) {
157 let message = self.message.expect("Error message not set"); 161 let message = self.message.expect("Error message not set");
158 let &(node, after_child) = self.builder.in_progress.last().unwrap(); 162 let &(node, after_child) = self.builder.in_progress.last().unwrap();
159 self.builder.errors.push(SyntaxErrorData { node, message, after_child }) 163 self.builder.errors.push(SyntaxErrorData {
164 node,
165 message,
166 after_child,
167 })
160 } 168 }
161} 169}
diff --git a/src/tree/mod.rs b/src/tree/mod.rs
index 3315b926e..a330caf54 100644
--- a/src/tree/mod.rs
+++ b/src/tree/mod.rs
@@ -1,4 +1,4 @@
1use text::{TextUnit, TextRange}; 1use text::{TextRange, TextUnit};
2use syntax_kinds::syntax_info; 2use syntax_kinds::syntax_info;
3 3
4use std::fmt; 4use std::fmt;
@@ -11,15 +11,10 @@ pub use self::file_builder::{FileBuilder, Sink};
11pub struct SyntaxKind(pub(crate) u32); 11pub struct SyntaxKind(pub(crate) u32);
12 12
13pub(crate) const EOF: SyntaxKind = SyntaxKind(!0); 13pub(crate) const EOF: SyntaxKind = SyntaxKind(!0);
14pub(crate) const EOF_INFO: SyntaxInfo = SyntaxInfo { 14pub(crate) const EOF_INFO: SyntaxInfo = SyntaxInfo { name: "EOF" };
15 name: "EOF"
16};
17 15
18pub(crate) const TOMBSTONE: SyntaxKind = SyntaxKind(!0 - 1); 16pub(crate) const TOMBSTONE: SyntaxKind = SyntaxKind(!0 - 1);
19pub(crate) const TOMBSTONE_INFO: SyntaxInfo = SyntaxInfo { 17pub(crate) const TOMBSTONE_INFO: SyntaxInfo = SyntaxInfo { name: "TOMBSTONE" };
20 name: "TOMBSTONE"
21};
22
23 18
24impl SyntaxKind { 19impl SyntaxKind {
25 fn info(self) -> &'static SyntaxInfo { 20 fn info(self) -> &'static SyntaxInfo {
@@ -38,7 +33,6 @@ impl fmt::Debug for SyntaxKind {
38 } 33 }
39} 34}
40 35
41
42pub(crate) struct SyntaxInfo { 36pub(crate) struct SyntaxInfo {
43 pub name: &'static str, 37 pub name: &'static str,
44} 38}
@@ -58,7 +52,10 @@ pub struct File {
58impl File { 52impl File {
59 pub fn root<'f>(&'f self) -> Node<'f> { 53 pub fn root<'f>(&'f self) -> Node<'f> {
60 assert!(!self.nodes.is_empty()); 54 assert!(!self.nodes.is_empty());
61 Node { file: self, idx: NodeIdx(0) } 55 Node {
56 file: self,
57 idx: NodeIdx(0),
58 }
62 } 59 }
63} 60}
64 61
@@ -86,14 +83,17 @@ impl<'f> Node<'f> {
86 } 83 }
87 84
88 pub fn children(&self) -> Children<'f> { 85 pub fn children(&self) -> Children<'f> {
89 Children { next: self.as_node(self.data().first_child) } 86 Children {
87 next: self.as_node(self.data().first_child),
88 }
90 } 89 }
91 90
92 pub fn errors(&self) -> SyntaxErrors<'f> { 91 pub fn errors(&self) -> SyntaxErrors<'f> {
93 let pos = self.file.errors.iter().position(|e| e.node == self.idx); 92 let pos = self.file.errors.iter().position(|e| e.node == self.idx);
94 let next = pos 93 let next = pos.map(|i| ErrorIdx(i as u32)).map(|idx| SyntaxError {
95 .map(|i| ErrorIdx(i as u32)) 94 file: self.file,
96 .map(|idx| SyntaxError { file: self.file, idx }); 95 idx,
96 });
97 SyntaxErrors { next } 97 SyntaxErrors { next }
98 } 98 }
99 99
@@ -102,7 +102,10 @@ impl<'f> Node<'f> {
102 } 102 }
103 103
104 fn as_node(&self, idx: Option<NodeIdx>) -> Option<Node<'f>> { 104 fn as_node(&self, idx: Option<NodeIdx>) -> Option<Node<'f>> {
105 idx.map(|idx| Node { file: self.file, idx }) 105 idx.map(|idx| Node {
106 file: self.file,
107 idx,
108 })
106 } 109 }
107} 110}
108 111
@@ -118,8 +121,7 @@ impl<'f> cmp::PartialEq<Node<'f>> for Node<'f> {
118 } 121 }
119} 122}
120 123
121impl<'f> cmp::Eq for Node<'f> { 124impl<'f> cmp::Eq for Node<'f> {}
122}
123 125
124#[derive(Clone, Copy)] 126#[derive(Clone, Copy)]
125pub struct SyntaxError<'f> { 127pub struct SyntaxError<'f> {
@@ -134,7 +136,10 @@ impl<'f> SyntaxError<'f> {
134 136
135 pub fn after_child(&self) -> Option<Node<'f>> { 137 pub fn after_child(&self) -> Option<Node<'f>> {
136 let idx = self.data().after_child?; 138 let idx = self.data().after_child?;
137 Some(Node { file: self.file, idx }) 139 Some(Node {
140 file: self.file,
141 idx,
142 })
138 } 143 }
139 144
140 fn data(&self) -> &'f SyntaxErrorData { 145 fn data(&self) -> &'f SyntaxErrorData {
@@ -148,7 +153,7 @@ impl<'f> SyntaxError<'f> {
148 } 153 }
149 let result = SyntaxError { 154 let result = SyntaxError {
150 file: self.file, 155 file: self.file,
151 idx: ErrorIdx(next_idx) 156 idx: ErrorIdx(next_idx),
152 }; 157 };
153 if result.data().node != self.data().node { 158 if result.data().node != self.data().node {
154 return None; 159 return None;
@@ -185,7 +190,6 @@ impl<'f> Iterator for SyntaxErrors<'f> {
185 } 190 }
186} 191}
187 192
188
189#[derive(Clone, Copy, PartialEq, Eq)] 193#[derive(Clone, Copy, PartialEq, Eq)]
190struct NodeIdx(u32); 194struct NodeIdx(u32);
191 195