aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/lexer/comments.rs3
-rw-r--r--src/lexer/mod.rs50
-rw-r--r--src/lexer/numbers.rs3
-rw-r--r--src/lexer/strings.rs3
-rw-r--r--src/lib.rs37
-rw-r--r--src/parser/event.rs33
-rw-r--r--src/parser/grammar/mod.rs13
-rw-r--r--src/parser/input.rs10
-rw-r--r--src/parser/mod.rs26
-rw-r--r--src/parser/parser/imp.rs3
-rw-r--r--src/parser/parser/mod.rs3
-rw-r--r--src/syntax_kinds/generated.rs (renamed from src/syntax_kinds.rs)70
-rw-r--r--src/syntax_kinds/mod.rs27
-rw-r--r--src/tree/mod.rs27
-rw-r--r--src/yellow/builder.rs (renamed from src/tree/file_builder.rs)49
-rw-r--r--src/yellow/mod.rs4
-rw-r--r--src/yellow/syntax.rs10
17 files changed, 193 insertions, 178 deletions
diff --git a/src/lexer/comments.rs b/src/lexer/comments.rs
index d1e958817..01acb6515 100644
--- a/src/lexer/comments.rs
+++ b/src/lexer/comments.rs
@@ -1,7 +1,6 @@
1use lexer::ptr::Ptr; 1use lexer::ptr::Ptr;
2 2
3use SyntaxKind; 3use SyntaxKind::{self, *};
4use syntax_kinds::*;
5 4
6pub(crate) fn scan_shebang(ptr: &mut Ptr) -> bool { 5pub(crate) fn scan_shebang(ptr: &mut Ptr) -> bool {
7 if ptr.next_is('!') && ptr.nnext_is('/') { 6 if ptr.next_is('!') && ptr.nnext_is('/') {
diff --git a/src/lexer/mod.rs b/src/lexer/mod.rs
index 65a994327..69cab5b57 100644
--- a/src/lexer/mod.rs
+++ b/src/lexer/mod.rs
@@ -1,21 +1,32 @@
1use {SyntaxKind, Token};
2use syntax_kinds::*;
3
4mod ptr; 1mod ptr;
5use self::ptr::Ptr; 2mod comments;
6 3mod strings;
4mod numbers;
7mod classes; 5mod classes;
8use self::classes::*;
9 6
10mod numbers; 7use {
11use self::numbers::scan_number; 8 TextUnit,
9 SyntaxKind::{self, *},
10};
12 11
13mod strings; 12use self::{
14use self::strings::{is_string_literal_start, scan_byte_char_or_string, scan_char, scan_raw_string, 13 ptr::Ptr,
15 scan_string}; 14 classes::*,
15 numbers::scan_number,
16 strings::{
17 is_string_literal_start, scan_byte_char_or_string, scan_char,
18 scan_raw_string, scan_string},
19 comments::{scan_comment, scan_shebang},
20};
16 21
17mod comments; 22/// A token of Rust source.
18use self::comments::{scan_comment, scan_shebang}; 23#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
24pub struct Token {
25 /// The kind of token.
26 pub kind: SyntaxKind,
27 /// The length of the token.
28 pub len: TextUnit,
29}
19 30
20/// Break a string up into its component tokens 31/// Break a string up into its component tokens
21pub fn tokenize(text: &str) -> Vec<Token> { 32pub fn tokenize(text: &str) -> Vec<Token> {
@@ -29,6 +40,7 @@ pub fn tokenize(text: &str) -> Vec<Token> {
29 } 40 }
30 acc 41 acc
31} 42}
43
32/// Get the next token from a string 44/// Get the next token from a string
33pub fn next_token(text: &str) -> Token { 45pub fn next_token(text: &str) -> Token {
34 assert!(!text.is_empty()); 46 assert!(!text.is_empty());
@@ -109,7 +121,7 @@ fn next_token_inner(c: char, ptr: &mut Ptr) -> SyntaxKind {
109 DOTDOT 121 DOTDOT
110 } 122 }
111 _ => DOT, 123 _ => DOT,
112 } 124 };
113 } 125 }
114 ':' => { 126 ':' => {
115 return match ptr.next() { 127 return match ptr.next() {
@@ -118,7 +130,7 @@ fn next_token_inner(c: char, ptr: &mut Ptr) -> SyntaxKind {
118 COLONCOLON 130 COLONCOLON
119 } 131 }
120 _ => COLON, 132 _ => COLON,
121 } 133 };
122 } 134 }
123 '=' => { 135 '=' => {
124 return match ptr.next() { 136 return match ptr.next() {
@@ -131,7 +143,7 @@ fn next_token_inner(c: char, ptr: &mut Ptr) -> SyntaxKind {
131 FAT_ARROW 143 FAT_ARROW
132 } 144 }
133 _ => EQ, 145 _ => EQ,
134 } 146 };
135 } 147 }
136 '!' => { 148 '!' => {
137 return match ptr.next() { 149 return match ptr.next() {
@@ -140,7 +152,7 @@ fn next_token_inner(c: char, ptr: &mut Ptr) -> SyntaxKind {
140 NEQ 152 NEQ
141 } 153 }
142 _ => EXCL, 154 _ => EXCL,
143 } 155 };
144 } 156 }
145 '-' => { 157 '-' => {
146 return if ptr.next_is('>') { 158 return if ptr.next_is('>') {
@@ -148,7 +160,7 @@ fn next_token_inner(c: char, ptr: &mut Ptr) -> SyntaxKind {
148 THIN_ARROW 160 THIN_ARROW
149 } else { 161 } else {
150 MINUS 162 MINUS
151 } 163 };
152 } 164 }
153 165
154 // If the character is an ident start not followed by another single 166 // If the character is an ident start not followed by another single
@@ -202,7 +214,7 @@ fn scan_ident(c: char, ptr: &mut Ptr) -> SyntaxKind {
202 return if c == '_' { UNDERSCORE } else { IDENT }; 214 return if c == '_' { UNDERSCORE } else { IDENT };
203 } 215 }
204 ptr.bump_while(is_ident_continue); 216 ptr.bump_while(is_ident_continue);
205 if let Some(kind) = ident_to_keyword(ptr.current_token_text()) { 217 if let Some(kind) = SyntaxKind::from_keyword(ptr.current_token_text()) {
206 return kind; 218 return kind;
207 } 219 }
208 IDENT 220 IDENT
diff --git a/src/lexer/numbers.rs b/src/lexer/numbers.rs
index 95e42246f..38eac9212 100644
--- a/src/lexer/numbers.rs
+++ b/src/lexer/numbers.rs
@@ -1,8 +1,7 @@
1use lexer::ptr::Ptr; 1use lexer::ptr::Ptr;
2use lexer::classes::*; 2use lexer::classes::*;
3 3
4use SyntaxKind; 4use SyntaxKind::{self, *};
5use syntax_kinds::*;
6 5
7pub(crate) fn scan_number(c: char, ptr: &mut Ptr) -> SyntaxKind { 6pub(crate) fn scan_number(c: char, ptr: &mut Ptr) -> SyntaxKind {
8 if c == '0' { 7 if c == '0' {
diff --git a/src/lexer/strings.rs b/src/lexer/strings.rs
index 00a84ec85..e3704fbb3 100644
--- a/src/lexer/strings.rs
+++ b/src/lexer/strings.rs
@@ -1,5 +1,4 @@
1use SyntaxKind; 1use SyntaxKind::{self, *};
2use syntax_kinds::*;
3 2
4use lexer::ptr::Ptr; 3use lexer::ptr::Ptr;
5 4
diff --git a/src/lib.rs b/src/lib.rs
index 619ad62e5..4260e22e7 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -19,27 +19,36 @@
19extern crate unicode_xid; 19extern crate unicode_xid;
20extern crate text_unit; 20extern crate text_unit;
21 21
22mod tree;
23mod lexer; 22mod lexer;
24mod parser; 23mod parser;
25mod yellow; 24mod yellow;
25mod syntax_kinds;
26
27pub use {
28 text_unit::{TextRange, TextUnit},
29 syntax_kinds::SyntaxKind,
30 yellow::{SyntaxNode},
31 lexer::{tokenize, Token},
32};
33
34pub(crate) use {
35 yellow::SyntaxError
36};
37
38pub fn parse(text: String) -> SyntaxNode {
39 let tokens = tokenize(&text);
40 parser::parse::<yellow::GreenBuilder>(text, &tokens)
41}
26 42
27pub mod syntax_kinds;
28pub use text_unit::{TextRange, TextUnit};
29pub use tree::{SyntaxKind, Token};
30pub(crate) use tree::{Sink, GreenBuilder};
31pub use lexer::{next_token, tokenize};
32pub use yellow::SyntaxNode;
33pub(crate) use yellow::SError;
34pub use parser::{parse_green};
35 43
36/// Utilities for simple uses of the parser. 44/// Utilities for simple uses of the parser.
37pub mod utils { 45pub mod utils {
38 use std::fmt::Write; 46 use std::{
47 fmt::Write,
48 collections::BTreeSet
49 };
39 50
40 use {SyntaxNode}; 51 use {SyntaxNode, SyntaxError};
41 use std::collections::BTreeSet;
42 use SError;
43 52
44 /// Parse a file and create a string representation of the resulting parse tree. 53 /// Parse a file and create a string representation of the resulting parse tree.
45 pub fn dump_tree_green(syntax: &SyntaxNode) -> String { 54 pub fn dump_tree_green(syntax: &SyntaxNode) -> String {
@@ -48,7 +57,7 @@ pub mod utils {
48 go(syntax, &mut result, 0, &mut errors); 57 go(syntax, &mut result, 0, &mut errors);
49 return result; 58 return result;
50 59
51 fn go(node: &SyntaxNode, buff: &mut String, level: usize, errors: &mut BTreeSet<SError>) { 60 fn go(node: &SyntaxNode, buff: &mut String, level: usize, errors: &mut BTreeSet<SyntaxError>) {
52 buff.push_str(&String::from(" ").repeat(level)); 61 buff.push_str(&String::from(" ").repeat(level));
53 write!(buff, "{:?}\n", node).unwrap(); 62 write!(buff, "{:?}\n", node).unwrap();
54 let my_errors: Vec<_> = errors.iter().filter(|e| e.offset == node.range().start()) 63 let my_errors: Vec<_> = errors.iter().filter(|e| e.offset == node.range().start())
diff --git a/src/parser/event.rs b/src/parser/event.rs
index 83039c664..a8d503b3d 100644
--- a/src/parser/event.rs
+++ b/src/parser/event.rs
@@ -1,8 +1,29 @@
1//! This module provides a way to construct a `File`.
2//! It is intended to be completely decoupled from the
3//! parser, so as to allow to evolve the tree representation
4//! and the parser algorithm independently.
5//!
6//! The `Sink` trait is the bridge between the parser and the
7//! tree builder: the parser produces a stream of events like
8//! `start node`, `finish node`, and `FileBuilder` converts
9//! this stream to a real tree.
1use { 10use {
2 Sink, SyntaxKind, Token, 11 TextUnit,
3 syntax_kinds::TOMBSTONE, 12 SyntaxKind::{self, TOMBSTONE},
13 lexer::Token,
4}; 14};
5use super::is_insignificant; 15
16pub(crate) trait Sink {
17 type Tree;
18
19 fn new(text: String) -> Self;
20
21 fn leaf(&mut self, kind: SyntaxKind, len: TextUnit);
22 fn start_internal(&mut self, kind: SyntaxKind);
23 fn finish_internal(&mut self);
24 fn error(&mut self, err: String);
25 fn finish(self) -> Self::Tree;
26}
6 27
7/// `Parser` produces a flat list of `Event`s. 28/// `Parser` produces a flat list of `Event`s.
8/// They are converted to a tree-structure in 29/// They are converted to a tree-structure in
@@ -67,7 +88,7 @@ pub(crate) enum Event {
67 }, 88 },
68} 89}
69 90
70pub(super) fn process(builder: &mut Sink, tokens: &[Token], events: Vec<Event>) { 91pub(super) fn process(builder: &mut impl Sink, tokens: &[Token], events: Vec<Event>) {
71 let mut idx = 0; 92 let mut idx = 0;
72 93
73 let mut holes = Vec::new(); 94 let mut holes = Vec::new();
@@ -111,7 +132,7 @@ pub(super) fn process(builder: &mut Sink, tokens: &[Token], events: Vec<Event>)
111 &Event::Finish => { 132 &Event::Finish => {
112 while idx < tokens.len() { 133 while idx < tokens.len() {
113 let token = tokens[idx]; 134 let token = tokens[idx];
114 if is_insignificant(token.kind) { 135 if token.kind.is_trivia() {
115 idx += 1; 136 idx += 1;
116 builder.leaf(token.kind, token.len); 137 builder.leaf(token.kind, token.len);
117 } else { 138 } else {
@@ -128,7 +149,7 @@ pub(super) fn process(builder: &mut Sink, tokens: &[Token], events: Vec<Event>)
128 // this should be done in a sensible manner instead 149 // this should be done in a sensible manner instead
129 loop { 150 loop {
130 let token = tokens[idx]; 151 let token = tokens[idx];
131 if !is_insignificant(token.kind) { 152 if !token.kind.is_trivia() {
132 break; 153 break;
133 } 154 }
134 builder.leaf(token.kind, token.len); 155 builder.leaf(token.kind, token.len);
diff --git a/src/parser/grammar/mod.rs b/src/parser/grammar/mod.rs
index 23216452f..085e62d56 100644
--- a/src/parser/grammar/mod.rs
+++ b/src/parser/grammar/mod.rs
@@ -21,11 +21,6 @@
21//! After adding a new inline-test, run `cargo collect-tests` to extract 21//! After adding a new inline-test, run `cargo collect-tests` to extract
22//! it as a standalone text-fixture into `tests/data/parser/inline`, and 22//! it as a standalone text-fixture into `tests/data/parser/inline`, and
23//! run `cargo test` once to create the "gold" value. 23//! run `cargo test` once to create the "gold" value.
24use parser::parser::Parser;
25use parser::token_set::TokenSet;
26use SyntaxKind;
27use syntax_kinds::*;
28
29mod items; 24mod items;
30mod attributes; 25mod attributes;
31mod expressions; 26mod expressions;
@@ -34,6 +29,14 @@ mod patterns;
34mod paths; 29mod paths;
35mod type_params; 30mod type_params;
36 31
32use {
33 SyntaxKind::{self, *},
34 parser::{
35 parser::Parser,
36 token_set::TokenSet
37 }
38};
39
37pub(crate) fn file(p: &mut Parser) { 40pub(crate) fn file(p: &mut Parser) {
38 let file = p.start(); 41 let file = p.start();
39 p.eat(SHEBANG); 42 p.eat(SHEBANG);
diff --git a/src/parser/input.rs b/src/parser/input.rs
index 9b400b959..052981fbc 100644
--- a/src/parser/input.rs
+++ b/src/parser/input.rs
@@ -1,6 +1,8 @@
1use {SyntaxKind, TextRange, TextUnit, Token}; 1use {
2use syntax_kinds::EOF; 2 SyntaxKind, TextRange, TextUnit,
3use super::is_insignificant; 3 SyntaxKind::EOF,
4 lexer::Token,
5};
4 6
5use std::ops::{Add, AddAssign}; 7use std::ops::{Add, AddAssign};
6 8
@@ -16,7 +18,7 @@ impl<'t> ParserInput<'t> {
16 let mut start_offsets = Vec::new(); 18 let mut start_offsets = Vec::new();
17 let mut len = 0.into(); 19 let mut len = 0.into();
18 for &token in raw_tokens.iter() { 20 for &token in raw_tokens.iter() {
19 if !is_insignificant(token.kind) { 21 if !token.kind.is_trivia() {
20 tokens.push(token); 22 tokens.push(token);
21 start_offsets.push(len); 23 start_offsets.push(len);
22 } 24 }
diff --git a/src/parser/mod.rs b/src/parser/mod.rs
index b7d5e5832..e72ab05af 100644
--- a/src/parser/mod.rs
+++ b/src/parser/mod.rs
@@ -5,18 +5,16 @@ mod input;
5mod event; 5mod event;
6mod grammar; 6mod grammar;
7 7
8use std::sync::Arc;
9use { 8use {
10 Token, 9 lexer::Token,
11 yellow::SyntaxNode, 10 parser::event::{process}
12 syntax_kinds::*
13}; 11};
14use GreenBuilder; 12
15use parser::event::process; 13pub(crate) use self::event::Sink;
16 14
17 15
18/// Parse a sequence of tokens into the representative node tree 16/// Parse a sequence of tokens into the representative node tree
19pub fn parse_green(text: String, tokens: &[Token]) -> SyntaxNode { 17pub(crate) fn parse<S: Sink>(text: String, tokens: &[Token]) -> S::Tree {
20 let events = { 18 let events = {
21 let input = input::ParserInput::new(&text, tokens); 19 let input = input::ParserInput::new(&text, tokens);
22 let parser_impl = parser::imp::ParserImpl::new(&input); 20 let parser_impl = parser::imp::ParserImpl::new(&input);
@@ -24,15 +22,7 @@ pub fn parse_green(text: String, tokens: &[Token]) -> SyntaxNode {
24 grammar::file(&mut parser); 22 grammar::file(&mut parser);
25 parser.0.into_events() 23 parser.0.into_events()
26 }; 24 };
27 let mut builder = GreenBuilder::new(text); 25 let mut sink = S::new(text);
28 process(&mut builder, tokens, events); 26 process(&mut sink, tokens, events);
29 let (green, errors) = builder.finish(); 27 sink.finish()
30 SyntaxNode::new(Arc::new(green), errors)
31}
32
33fn is_insignificant(kind: SyntaxKind) -> bool {
34 match kind {
35 WHITESPACE | COMMENT => true,
36 _ => false,
37 }
38} 28}
diff --git a/src/parser/parser/imp.rs b/src/parser/parser/imp.rs
index f2641c388..38237ac06 100644
--- a/src/parser/parser/imp.rs
+++ b/src/parser/parser/imp.rs
@@ -1,8 +1,7 @@
1use parser::input::{InputPosition, ParserInput}; 1use parser::input::{InputPosition, ParserInput};
2use parser::event::Event; 2use parser::event::Event;
3 3
4use SyntaxKind; 4use SyntaxKind::{self, EOF, TOMBSTONE};
5use syntax_kinds::{EOF, TOMBSTONE};
6 5
7/// Implementation details of `Parser`, extracted 6/// Implementation details of `Parser`, extracted
8/// to a separate struct in order not to pollute 7/// to a separate struct in order not to pollute
diff --git a/src/parser/parser/mod.rs b/src/parser/parser/mod.rs
index 58f820738..0930ff9e4 100644
--- a/src/parser/parser/mod.rs
+++ b/src/parser/parser/mod.rs
@@ -1,5 +1,4 @@
1use SyntaxKind; 1use SyntaxKind::{self, ERROR};
2use syntax_kinds::ERROR;
3 2
4pub(super) mod imp; 3pub(super) mod imp;
5use self::imp::ParserImpl; 4use self::imp::ParserImpl;
diff --git a/src/syntax_kinds.rs b/src/syntax_kinds/generated.rs
index 1cc29bb61..d332fd02e 100644
--- a/src/syntax_kinds.rs
+++ b/src/syntax_kinds/generated.rs
@@ -1,7 +1,7 @@
1#![allow(bad_style, missing_docs, unreachable_pub)] 1#![allow(bad_style, missing_docs, unreachable_pub)]
2#![cfg_attr(rustfmt, rustfmt_skip)] 2#![cfg_attr(rustfmt, rustfmt_skip)]
3//! Generated from grammar.ron 3//! Generated from grammar.ron
4use tree::SyntaxInfo; 4use super::SyntaxInfo;
5 5
6/// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT_DEF`. 6/// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT_DEF`.
7#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] 7#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
@@ -288,39 +288,39 @@ impl SyntaxKind {
288 EOF => &SyntaxInfo { name: "EOF" }, 288 EOF => &SyntaxInfo { name: "EOF" },
289 } 289 }
290 } 290 }
291} 291 pub(crate) fn from_keyword(ident: &str) -> Option<SyntaxKind> {
292 292 match ident {
293pub(crate) fn ident_to_keyword(ident: &str) -> Option<SyntaxKind> { 293 "use" => Some(USE_KW),
294 match ident { 294 "fn" => Some(FN_KW),
295 "use" => Some(USE_KW), 295 "struct" => Some(STRUCT_KW),
296 "fn" => Some(FN_KW), 296 "enum" => Some(ENUM_KW),
297 "struct" => Some(STRUCT_KW), 297 "trait" => Some(TRAIT_KW),
298 "enum" => Some(ENUM_KW), 298 "impl" => Some(IMPL_KW),
299 "trait" => Some(TRAIT_KW), 299 "true" => Some(TRUE_KW),
300 "impl" => Some(IMPL_KW), 300 "false" => Some(FALSE_KW),
301 "true" => Some(TRUE_KW), 301 "as" => Some(AS_KW),
302 "false" => Some(FALSE_KW), 302 "extern" => Some(EXTERN_KW),
303 "as" => Some(AS_KW), 303 "crate" => Some(CRATE_KW),
304 "extern" => Some(EXTERN_KW), 304 "mod" => Some(MOD_KW),
305 "crate" => Some(CRATE_KW), 305 "pub" => Some(PUB_KW),
306 "mod" => Some(MOD_KW), 306 "self" => Some(SELF_KW),
307 "pub" => Some(PUB_KW), 307 "super" => Some(SUPER_KW),
308 "self" => Some(SELF_KW), 308 "in" => Some(IN_KW),
309 "super" => Some(SUPER_KW), 309 "where" => Some(WHERE_KW),
310 "in" => Some(IN_KW), 310 "for" => Some(FOR_KW),
311 "where" => Some(WHERE_KW), 311 "loop" => Some(LOOP_KW),
312 "for" => Some(FOR_KW), 312 "while" => Some(WHILE_KW),
313 "loop" => Some(LOOP_KW), 313 "if" => Some(IF_KW),
314 "while" => Some(WHILE_KW), 314 "match" => Some(MATCH_KW),
315 "if" => Some(IF_KW), 315 "const" => Some(CONST_KW),
316 "match" => Some(MATCH_KW), 316 "static" => Some(STATIC_KW),
317 "const" => Some(CONST_KW), 317 "mut" => Some(MUT_KW),
318 "static" => Some(STATIC_KW), 318 "unsafe" => Some(UNSAFE_KW),
319 "mut" => Some(MUT_KW), 319 "type" => Some(TYPE_KW),
320 "unsafe" => Some(UNSAFE_KW), 320 "ref" => Some(REF_KW),
321 "type" => Some(TYPE_KW), 321 "let" => Some(LET_KW),
322 "ref" => Some(REF_KW), 322 _ => None,
323 "let" => Some(LET_KW), 323 }
324 _ => None,
325 } 324 }
326} 325}
326
diff --git a/src/syntax_kinds/mod.rs b/src/syntax_kinds/mod.rs
new file mode 100644
index 000000000..a8e9bfe29
--- /dev/null
+++ b/src/syntax_kinds/mod.rs
@@ -0,0 +1,27 @@
1mod generated;
2
3use std::fmt;
4use ::{SyntaxKind::*};
5
6pub use self::generated::SyntaxKind;
7
8impl fmt::Debug for SyntaxKind {
9 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
10 let name = self.info().name;
11 f.write_str(name)
12 }
13}
14
15pub(crate) struct SyntaxInfo {
16 pub name: &'static str,
17}
18
19
20impl SyntaxKind {
21 pub(crate) fn is_trivia(self: SyntaxKind) -> bool {
22 match self {
23 WHITESPACE | COMMENT | DOC_COMMENT => true,
24 _ => false,
25 }
26 }
27}
diff --git a/src/tree/mod.rs b/src/tree/mod.rs
deleted file mode 100644
index efba82825..000000000
--- a/src/tree/mod.rs
+++ /dev/null
@@ -1,27 +0,0 @@
1mod file_builder;
2
3use ::{TextUnit};
4use std::{fmt};
5pub(crate) use self::file_builder::{Sink, GreenBuilder};
6
7pub use syntax_kinds::SyntaxKind;
8
9impl fmt::Debug for SyntaxKind {
10 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
11 let name = self.info().name;
12 f.write_str(name)
13 }
14}
15
16pub(crate) struct SyntaxInfo {
17 pub name: &'static str,
18}
19
20/// A token of Rust source.
21#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
22pub struct Token {
23 /// The kind of token.
24 pub kind: SyntaxKind,
25 /// The length of the token.
26 pub len: TextUnit,
27}
diff --git a/src/tree/file_builder.rs b/src/yellow/builder.rs
index f5d1751f9..346d561cd 100644
--- a/src/tree/file_builder.rs
+++ b/src/yellow/builder.rs
@@ -1,36 +1,26 @@
1//! This module provides a way to construct a `File`.
2//! It is intended to be completely decoupled from the
3//! parser, so as to allow to evolve the tree representation
4//! and the parser algorithm independently.
5//!
6//! The `Sink` trait is the bridge between the parser and the
7//! tree builder: the parser produces a stream of events like
8//! `start node`, `finish node`, and `FileBuilder` converts
9//! this stream to a real tree.
10use std::sync::Arc; 1use std::sync::Arc;
11use { 2use {
12 SyntaxKind, TextRange, TextUnit, 3 SyntaxKind, TextRange, TextUnit,
13 yellow::GreenNode 4 yellow::{SyntaxNode, GreenNode, SyntaxError},
5 parser::Sink
14}; 6};
15use SError;
16
17pub(crate) trait Sink {
18 fn leaf(&mut self, kind: SyntaxKind, len: TextUnit);
19 fn start_internal(&mut self, kind: SyntaxKind);
20 fn finish_internal(&mut self);
21 fn error(&mut self, err: String);
22}
23 7
24pub(crate) struct GreenBuilder { 8pub(crate) struct GreenBuilder {
25 text: String, 9 text: String,
26 stack: Vec<GreenNode>, 10 stack: Vec<GreenNode>,
27 pos: TextUnit, 11 pos: TextUnit,
28 root: Option<GreenNode>, 12 root: Option<GreenNode>,
29 errors: Vec<SError>, 13 errors: Vec<SyntaxError>,
30} 14}
31 15
32impl GreenBuilder { 16impl GreenBuilder {
33 pub(crate) fn new(text: String) -> GreenBuilder { 17
18}
19
20impl Sink for GreenBuilder {
21 type Tree = SyntaxNode;
22
23 fn new(text: String) -> Self {
34 GreenBuilder { 24 GreenBuilder {
35 text, 25 text,
36 stack: Vec::new(), 26 stack: Vec::new(),
@@ -40,12 +30,6 @@ impl GreenBuilder {
40 } 30 }
41 } 31 }
42 32
43 pub(crate) fn finish(self) -> (GreenNode, Vec<SError>) {
44 (self.root.unwrap(), self.errors)
45 }
46}
47
48impl Sink for GreenBuilder {
49 fn leaf(&mut self, kind: SyntaxKind, len: TextUnit) { 33 fn leaf(&mut self, kind: SyntaxKind, len: TextUnit) {
50 let range = TextRange::offset_len(self.pos, len); 34 let range = TextRange::offset_len(self.pos, len);
51 self.pos += len; 35 self.pos += len;
@@ -73,15 +57,12 @@ impl Sink for GreenBuilder {
73 } 57 }
74 58
75 fn error(&mut self, message: String) { 59 fn error(&mut self, message: String) {
76 self.errors.push(SError { message, offset: self.pos }) 60 self.errors.push(SyntaxError { message, offset: self.pos })
77 } 61 }
78} 62
79impl SyntaxKind { 63 fn finish(self) -> SyntaxNode {
80 fn is_trivia(self) -> bool { 64 SyntaxNode::new(Arc::new(self.root.unwrap()), self.errors)
81 match self {
82 SyntaxKind::WHITESPACE | SyntaxKind::DOC_COMMENT | SyntaxKind::COMMENT => true,
83 _ => false
84 }
85 } 65 }
86} 66}
87 67
68
diff --git a/src/yellow/mod.rs b/src/yellow/mod.rs
index 88d88e226..9e64d042f 100644
--- a/src/yellow/mod.rs
+++ b/src/yellow/mod.rs
@@ -1,6 +1,7 @@
1mod green; 1mod green;
2mod red; 2mod red;
3mod syntax; 3mod syntax;
4mod builder;
4 5
5use std::{ 6use std::{
6 sync::{Arc, Weak}, 7 sync::{Arc, Weak},
@@ -9,7 +10,8 @@ use std::{
9pub(crate) use self::{ 10pub(crate) use self::{
10 green::{GreenNode, TextLen}, 11 green::{GreenNode, TextLen},
11 red::RedNode, 12 red::RedNode,
12 syntax::SError, 13 syntax::SyntaxError,
14 builder::GreenBuilder,
13}; 15};
14pub use self::syntax::SyntaxNode; 16pub use self::syntax::SyntaxNode;
15 17
diff --git a/src/yellow/syntax.rs b/src/yellow/syntax.rs
index 7b1a05cd9..78fa5bf95 100644
--- a/src/yellow/syntax.rs
+++ b/src/yellow/syntax.rs
@@ -4,7 +4,8 @@ use std::{
4}; 4};
5 5
6use { 6use {
7 TextRange, TextUnit, SyntaxKind, 7 TextRange, TextUnit,
8 SyntaxKind::{self, *},
8 yellow::{Ptr, RedNode, GreenNode, TextLen}, 9 yellow::{Ptr, RedNode, GreenNode, TextLen},
9}; 10};
10 11
@@ -18,17 +19,17 @@ pub struct SyntaxNode {
18#[derive(Clone)] 19#[derive(Clone)]
19pub struct SyntaxRoot { 20pub struct SyntaxRoot {
20 red: Arc<RedNode>, 21 red: Arc<RedNode>,
21 pub(crate) errors: Arc<Vec<SError>>, 22 pub(crate) errors: Arc<Vec<SyntaxError>>,
22} 23}
23 24
24#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)] 25#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)]
25pub(crate) struct SError { 26pub(crate) struct SyntaxError {
26 pub(crate) message: String, 27 pub(crate) message: String,
27 pub(crate) offset: TextUnit, 28 pub(crate) offset: TextUnit,
28} 29}
29 30
30impl SyntaxNode { 31impl SyntaxNode {
31 pub(crate) fn new(root: Arc<GreenNode>, errors: Vec<SError>) -> SyntaxNode { 32 pub(crate) fn new(root: Arc<GreenNode>, errors: Vec<SyntaxError>) -> SyntaxNode {
32 let root = Arc::new(RedNode::new_root(root)); 33 let root = Arc::new(RedNode::new_root(root));
33 let red = Ptr::new(&root); 34 let red = Ptr::new(&root);
34 let root = SyntaxRoot { red: root, errors: Arc::new(errors) }; 35 let root = SyntaxRoot { red: root, errors: Arc::new(errors) };
@@ -123,7 +124,6 @@ impl fmt::Debug for SyntaxNode {
123} 124}
124 125
125fn has_short_text(kind: SyntaxKind) -> bool { 126fn has_short_text(kind: SyntaxKind) -> bool {
126 use syntax_kinds::*;
127 match kind { 127 match kind {
128 IDENT | LIFETIME => true, 128 IDENT | LIFETIME => true,
129 _ => false, 129 _ => false,