aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAleksey Kladov <[email protected]>2018-07-30 12:08:06 +0100
committerAleksey Kladov <[email protected]>2018-07-30 12:08:06 +0100
commit1edb58a802f183f79dc2c4bc15921394ef8abb31 (patch)
tree24114171c61a9f433b9a54008057fa0297f91302
parent6983091d6d255bcfd17c4f8c14015d8abc77928d (diff)
reformat
-rw-r--r--.travis.yml2
-rw-r--r--src/lexer/classes.rs9
-rw-r--r--src/lexer/mod.rs16
-rw-r--r--src/lexer/numbers.rs2
-rw-r--r--src/lib.rs47
-rw-r--r--src/parser/event.rs4
-rw-r--r--src/parser/grammar/items/mod.rs9
-rw-r--r--src/parser/grammar/mod.rs11
-rw-r--r--src/parser/input.rs6
-rw-r--r--src/parser/mod.rs10
-rw-r--r--src/parser/parser/imp.rs2
-rw-r--r--src/syntax_kinds/mod.rs3
-rw-r--r--src/yellow/builder.rs15
-rw-r--r--src/yellow/green.rs37
-rw-r--r--src/yellow/mod.rs6
-rw-r--r--src/yellow/red.rs34
-rw-r--r--src/yellow/syntax.rs23
-rw-r--r--tests/parser.rs4
-rw-r--r--tests/testutils/src/lib.rs2
-rw-r--r--tools/src/bin/main.rs21
20 files changed, 126 insertions, 137 deletions
diff --git a/.travis.yml b/.travis.yml
index f4ee048f4..9949312c8 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -6,7 +6,7 @@ matrix:
6 before_script: 6 before_script:
7 - rustup component add rustfmt-preview 7 - rustup component add rustfmt-preview
8 script: 8 script:
9 - cargo fmt --all -- --write-mode=diff 9 - cargo fmt --all -- --check
10 - cargo test 10 - cargo test
11 - cargo gen-kinds --verify 11 - cargo gen-kinds --verify
12 - cargo gen-tests --verify 12 - cargo gen-tests --verify
diff --git a/src/lexer/classes.rs b/src/lexer/classes.rs
index 7fed008af..4235d2648 100644
--- a/src/lexer/classes.rs
+++ b/src/lexer/classes.rs
@@ -1,12 +1,17 @@
1use unicode_xid::UnicodeXID; 1use unicode_xid::UnicodeXID;
2 2
3pub fn is_ident_start(c: char) -> bool { 3pub fn is_ident_start(c: char) -> bool {
4 (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_' 4 (c >= 'a' && c <= 'z')
5 || (c >= 'A' && c <= 'Z')
6 || c == '_'
5 || (c > '\x7f' && UnicodeXID::is_xid_start(c)) 7 || (c > '\x7f' && UnicodeXID::is_xid_start(c))
6} 8}
7 9
8pub fn is_ident_continue(c: char) -> bool { 10pub fn is_ident_continue(c: char) -> bool {
9 (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || c == '_' 11 (c >= 'a' && c <= 'z')
12 || (c >= 'A' && c <= 'Z')
13 || (c >= '0' && c <= '9')
14 || c == '_'
10 || (c > '\x7f' && UnicodeXID::is_xid_continue(c)) 15 || (c > '\x7f' && UnicodeXID::is_xid_continue(c))
11} 16}
12 17
diff --git a/src/lexer/mod.rs b/src/lexer/mod.rs
index 69cab5b57..f647838ea 100644
--- a/src/lexer/mod.rs
+++ b/src/lexer/mod.rs
@@ -1,22 +1,22 @@
1mod ptr; 1mod classes;
2mod comments; 2mod comments;
3mod strings;
4mod numbers; 3mod numbers;
5mod classes; 4mod ptr;
5mod strings;
6 6
7use { 7use {
8 TextUnit,
9 SyntaxKind::{self, *}, 8 SyntaxKind::{self, *},
9 TextUnit,
10}; 10};
11 11
12use self::{ 12use self::{
13 ptr::Ptr,
14 classes::*, 13 classes::*,
14 comments::{scan_comment, scan_shebang},
15 numbers::scan_number, 15 numbers::scan_number,
16 ptr::Ptr,
16 strings::{ 17 strings::{
17 is_string_literal_start, scan_byte_char_or_string, scan_char, 18 is_string_literal_start, scan_byte_char_or_string, scan_char, scan_raw_string, scan_string,
18 scan_raw_string, scan_string}, 19 },
19 comments::{scan_comment, scan_shebang},
20}; 20};
21 21
22/// A token of Rust source. 22/// A token of Rust source.
diff --git a/src/lexer/numbers.rs b/src/lexer/numbers.rs
index 38eac9212..5c4641a2d 100644
--- a/src/lexer/numbers.rs
+++ b/src/lexer/numbers.rs
@@ -1,5 +1,5 @@
1use lexer::ptr::Ptr;
2use lexer::classes::*; 1use lexer::classes::*;
2use lexer::ptr::Ptr;
3 3
4use SyntaxKind::{self, *}; 4use SyntaxKind::{self, *};
5 5
diff --git a/src/lib.rs b/src/lib.rs
index 34c71fd2c..91d060169 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -11,44 +11,42 @@
11//! [rfc#2256]: <https://github.com/rust-lang/rfcs/pull/2256> 11//! [rfc#2256]: <https://github.com/rust-lang/rfcs/pull/2256>
12//! [RFC.md]: <https://github.com/matklad/libsyntax2/blob/master/docs/RFC.md> 12//! [RFC.md]: <https://github.com/matklad/libsyntax2/blob/master/docs/RFC.md>
13 13
14#![forbid(missing_debug_implementations, unconditional_recursion, future_incompatible)] 14#![forbid(
15 missing_debug_implementations,
16 unconditional_recursion,
17 future_incompatible
18)]
15#![deny(bad_style, missing_docs)] 19#![deny(bad_style, missing_docs)]
16#![allow(missing_docs)] 20#![allow(missing_docs)]
17//#![warn(unreachable_pub)] // rust-lang/rust#47816 21//#![warn(unreachable_pub)] // rust-lang/rust#47816
18 22
19extern crate unicode_xid;
20extern crate text_unit; 23extern crate text_unit;
24extern crate unicode_xid;
21 25
22mod lexer; 26mod lexer;
23mod parser; 27mod parser;
24mod yellow;
25mod syntax_kinds; 28mod syntax_kinds;
29mod yellow;
26 30
27pub use { 31pub use {
28 text_unit::{TextRange, TextUnit}, 32 lexer::{tokenize, Token},
29 syntax_kinds::SyntaxKind, 33 syntax_kinds::SyntaxKind,
34 text_unit::{TextRange, TextUnit},
30 yellow::{SyntaxNode, SyntaxNodeRef}, 35 yellow::{SyntaxNode, SyntaxNodeRef},
31 lexer::{tokenize, Token},
32}; 36};
33 37
34pub(crate) use { 38pub(crate) use yellow::SyntaxError;
35 yellow::SyntaxError
36};
37 39
38pub fn parse(text: String) -> SyntaxNode { 40pub fn parse(text: String) -> SyntaxNode {
39 let tokens = tokenize(&text); 41 let tokens = tokenize(&text);
40 parser::parse::<yellow::GreenBuilder>(text, &tokens) 42 parser::parse::<yellow::GreenBuilder>(text, &tokens)
41} 43}
42 44
43
44/// Utilities for simple uses of the parser. 45/// Utilities for simple uses of the parser.
45pub mod utils { 46pub mod utils {
46 use std::{ 47 use std::{collections::BTreeSet, fmt::Write};
47 fmt::Write,
48 collections::BTreeSet
49 };
50 48
51 use {SyntaxNode, SyntaxNodeRef, SyntaxError}; 49 use {SyntaxError, SyntaxNode, SyntaxNodeRef};
52 50
53 /// Parse a file and create a string representation of the resulting parse tree. 51 /// Parse a file and create a string representation of the resulting parse tree.
54 pub fn dump_tree_green(syntax: &SyntaxNode) -> String { 52 pub fn dump_tree_green(syntax: &SyntaxNode) -> String {
@@ -58,11 +56,19 @@ pub mod utils {
58 go(syntax, &mut result, 0, &mut errors); 56 go(syntax, &mut result, 0, &mut errors);
59 return result; 57 return result;
60 58
61 fn go(node: SyntaxNodeRef, buff: &mut String, level: usize, errors: &mut BTreeSet<SyntaxError>) { 59 fn go(
60 node: SyntaxNodeRef,
61 buff: &mut String,
62 level: usize,
63 errors: &mut BTreeSet<SyntaxError>,
64 ) {
62 buff.push_str(&String::from(" ").repeat(level)); 65 buff.push_str(&String::from(" ").repeat(level));
63 write!(buff, "{:?}\n", node).unwrap(); 66 write!(buff, "{:?}\n", node).unwrap();
64 let my_errors: Vec<_> = errors.iter().filter(|e| e.offset == node.range().start()) 67 let my_errors: Vec<_> = errors
65 .cloned().collect(); 68 .iter()
69 .filter(|e| e.offset == node.range().start())
70 .cloned()
71 .collect();
66 for err in my_errors { 72 for err in my_errors {
67 errors.remove(&err); 73 errors.remove(&err);
68 buff.push_str(&String::from(" ").repeat(level)); 74 buff.push_str(&String::from(" ").repeat(level));
@@ -73,8 +79,11 @@ pub mod utils {
73 go(child, buff, level + 1, errors) 79 go(child, buff, level + 1, errors)
74 } 80 }
75 81
76 let my_errors: Vec<_> = errors.iter().filter(|e| e.offset == node.range().end()) 82 let my_errors: Vec<_> = errors
77 .cloned().collect(); 83 .iter()
84 .filter(|e| e.offset == node.range().end())
85 .cloned()
86 .collect();
78 for err in my_errors { 87 for err in my_errors {
79 errors.remove(&err); 88 errors.remove(&err);
80 buff.push_str(&String::from(" ").repeat(level)); 89 buff.push_str(&String::from(" ").repeat(level));
diff --git a/src/parser/event.rs b/src/parser/event.rs
index a8d503b3d..0086d32ea 100644
--- a/src/parser/event.rs
+++ b/src/parser/event.rs
@@ -8,9 +8,9 @@
8//! `start node`, `finish node`, and `FileBuilder` converts 8//! `start node`, `finish node`, and `FileBuilder` converts
9//! this stream to a real tree. 9//! this stream to a real tree.
10use { 10use {
11 TextUnit,
12 SyntaxKind::{self, TOMBSTONE},
13 lexer::Token, 11 lexer::Token,
12 SyntaxKind::{self, TOMBSTONE},
13 TextUnit,
14}; 14};
15 15
16pub(crate) trait Sink { 16pub(crate) trait Sink {
diff --git a/src/parser/grammar/items/mod.rs b/src/parser/grammar/items/mod.rs
index 1fe646652..5d8d57a80 100644
--- a/src/parser/grammar/items/mod.rs
+++ b/src/parser/grammar/items/mod.rs
@@ -1,9 +1,9 @@
1use super::*; 1use super::*;
2 2
3mod structs;
4mod use_item;
5mod consts; 3mod consts;
4mod structs;
6mod traits; 5mod traits;
6mod use_item;
7 7
8pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) { 8pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) {
9 attributes::inner_attributes(p); 9 attributes::inner_attributes(p);
@@ -12,9 +12,8 @@ pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) {
12 } 12 }
13} 13}
14 14
15pub(super) const ITEM_FIRST: TokenSet = token_set![ 15pub(super) const ITEM_FIRST: TokenSet =
16 EXTERN_KW, MOD_KW, USE_KW, STRUCT_KW, ENUM_KW, FN_KW, PUB_KW, POUND 16 token_set![EXTERN_KW, MOD_KW, USE_KW, STRUCT_KW, ENUM_KW, FN_KW, PUB_KW, POUND];
17];
18 17
19fn item(p: &mut Parser) { 18fn item(p: &mut Parser) {
20 let item = p.start(); 19 let item = p.start();
diff --git a/src/parser/grammar/mod.rs b/src/parser/grammar/mod.rs
index 085e62d56..e24f1055e 100644
--- a/src/parser/grammar/mod.rs
+++ b/src/parser/grammar/mod.rs
@@ -21,20 +21,17 @@
21//! After adding a new inline-test, run `cargo collect-tests` to extract 21//! After adding a new inline-test, run `cargo collect-tests` to extract
22//! it as a standalone text-fixture into `tests/data/parser/inline`, and 22//! it as a standalone text-fixture into `tests/data/parser/inline`, and
23//! run `cargo test` once to create the "gold" value. 23//! run `cargo test` once to create the "gold" value.
24mod items;
25mod attributes; 24mod attributes;
26mod expressions; 25mod expressions;
27mod types; 26mod items;
28mod patterns;
29mod paths; 27mod paths;
28mod patterns;
30mod type_params; 29mod type_params;
30mod types;
31 31
32use { 32use {
33 parser::{parser::Parser, token_set::TokenSet},
33 SyntaxKind::{self, *}, 34 SyntaxKind::{self, *},
34 parser::{
35 parser::Parser,
36 token_set::TokenSet
37 }
38}; 35};
39 36
40pub(crate) fn file(p: &mut Parser) { 37pub(crate) fn file(p: &mut Parser) {
diff --git a/src/parser/input.rs b/src/parser/input.rs
index 052981fbc..db76364b2 100644
--- a/src/parser/input.rs
+++ b/src/parser/input.rs
@@ -1,8 +1,4 @@
1use { 1use {lexer::Token, SyntaxKind, SyntaxKind::EOF, TextRange, TextUnit};
2 SyntaxKind, TextRange, TextUnit,
3 SyntaxKind::EOF,
4 lexer::Token,
5};
6 2
7use std::ops::{Add, AddAssign}; 3use std::ops::{Add, AddAssign};
8 4
diff --git a/src/parser/mod.rs b/src/parser/mod.rs
index e72ab05af..8631baa2e 100644
--- a/src/parser/mod.rs
+++ b/src/parser/mod.rs
@@ -1,18 +1,14 @@
1#[macro_use] 1#[macro_use]
2mod token_set; 2mod token_set;
3mod parser;
4mod input;
5mod event; 3mod event;
6mod grammar; 4mod grammar;
5mod input;
6mod parser;
7 7
8use { 8use {lexer::Token, parser::event::process};
9 lexer::Token,
10 parser::event::{process}
11};
12 9
13pub(crate) use self::event::Sink; 10pub(crate) use self::event::Sink;
14 11
15
16/// Parse a sequence of tokens into the representative node tree 12/// Parse a sequence of tokens into the representative node tree
17pub(crate) fn parse<S: Sink>(text: String, tokens: &[Token]) -> S::Tree { 13pub(crate) fn parse<S: Sink>(text: String, tokens: &[Token]) -> S::Tree {
18 let events = { 14 let events = {
diff --git a/src/parser/parser/imp.rs b/src/parser/parser/imp.rs
index 38237ac06..c653e3524 100644
--- a/src/parser/parser/imp.rs
+++ b/src/parser/parser/imp.rs
@@ -1,5 +1,5 @@
1use parser::input::{InputPosition, ParserInput};
2use parser::event::Event; 1use parser::event::Event;
2use parser::input::{InputPosition, ParserInput};
3 3
4use SyntaxKind::{self, EOF, TOMBSTONE}; 4use SyntaxKind::{self, EOF, TOMBSTONE};
5 5
diff --git a/src/syntax_kinds/mod.rs b/src/syntax_kinds/mod.rs
index a8e9bfe29..4c21c02ae 100644
--- a/src/syntax_kinds/mod.rs
+++ b/src/syntax_kinds/mod.rs
@@ -1,7 +1,7 @@
1mod generated; 1mod generated;
2 2
3use std::fmt; 3use std::fmt;
4use ::{SyntaxKind::*}; 4use SyntaxKind::*;
5 5
6pub use self::generated::SyntaxKind; 6pub use self::generated::SyntaxKind;
7 7
@@ -16,7 +16,6 @@ pub(crate) struct SyntaxInfo {
16 pub name: &'static str, 16 pub name: &'static str,
17} 17}
18 18
19
20impl SyntaxKind { 19impl SyntaxKind {
21 pub(crate) fn is_trivia(self: SyntaxKind) -> bool { 20 pub(crate) fn is_trivia(self: SyntaxKind) -> bool {
22 match self { 21 match self {
diff --git a/src/yellow/builder.rs b/src/yellow/builder.rs
index 65cc97ff9..0f7ca45d7 100644
--- a/src/yellow/builder.rs
+++ b/src/yellow/builder.rs
@@ -1,7 +1,7 @@
1use { 1use {
2 parser::Sink,
3 yellow::{GreenNode, GreenNodeBuilder, SyntaxError, SyntaxNode, SyntaxRoot},
2 SyntaxKind, TextRange, TextUnit, 4 SyntaxKind, TextRange, TextUnit,
3 yellow::{SyntaxNode, SyntaxRoot, GreenNode, GreenNodeBuilder, SyntaxError},
4 parser::Sink
5}; 5};
6 6
7pub(crate) struct GreenBuilder { 7pub(crate) struct GreenBuilder {
@@ -12,9 +12,7 @@ pub(crate) struct GreenBuilder {
12 errors: Vec<SyntaxError>, 12 errors: Vec<SyntaxError>,
13} 13}
14 14
15impl GreenBuilder { 15impl GreenBuilder {}
16
17}
18 16
19impl Sink for GreenBuilder { 17impl Sink for GreenBuilder {
20 type Tree = SyntaxNode; 18 type Tree = SyntaxNode;
@@ -53,7 +51,10 @@ impl Sink for GreenBuilder {
53 } 51 }
54 52
55 fn error(&mut self, message: String) { 53 fn error(&mut self, message: String) {
56 self.errors.push(SyntaxError { message, offset: self.pos }) 54 self.errors.push(SyntaxError {
55 message,
56 offset: self.pos,
57 })
57 } 58 }
58 59
59 fn finish(self) -> SyntaxNode { 60 fn finish(self) -> SyntaxNode {
@@ -61,5 +62,3 @@ impl Sink for GreenBuilder {
61 SyntaxNode::new_owned(root) 62 SyntaxNode::new_owned(root)
62 } 63 }
63} 64}
64
65
diff --git a/src/yellow/green.rs b/src/yellow/green.rs
index cb9dff128..507e4d57e 100644
--- a/src/yellow/green.rs
+++ b/src/yellow/green.rs
@@ -1,5 +1,8 @@
1use std::sync::Arc; 1use std::sync::Arc;
2use {SyntaxKind::{self, *}, TextUnit}; 2use {
3 SyntaxKind::{self, *},
4 TextUnit,
5};
3 6
4#[derive(Clone, Debug)] 7#[derive(Clone, Debug)]
5pub(crate) enum GreenNode { 8pub(crate) enum GreenNode {
@@ -36,9 +39,7 @@ impl GreenNode {
36 fn go(node: &GreenNode, buff: &mut String) { 39 fn go(node: &GreenNode, buff: &mut String) {
37 match node { 40 match node {
38 GreenNode::Leaf(l) => buff.push_str(&l.text()), 41 GreenNode::Leaf(l) => buff.push_str(&l.text()),
39 GreenNode::Branch(b) => { 42 GreenNode::Branch(b) => b.children().iter().for_each(|child| go(child, buff)),
40 b.children().iter().for_each(|child| go(child, buff))
41 }
42 } 43 }
43 } 44 }
44 } 45 }
@@ -71,7 +72,6 @@ impl GreenNodeBuilder {
71 } 72 }
72} 73}
73 74
74
75#[test] 75#[test]
76fn assert_send_sync() { 76fn assert_send_sync() {
77 fn f<T: Send + Sync>() {} 77 fn f<T: Send + Sync>() {}
@@ -80,14 +80,8 @@ fn assert_send_sync() {
80 80
81#[derive(Clone, Debug)] 81#[derive(Clone, Debug)]
82pub(crate) enum GreenLeaf { 82pub(crate) enum GreenLeaf {
83 Whitespace { 83 Whitespace { newlines: u8, spaces: u8 },
84 newlines: u8, 84 Token { kind: SyntaxKind, text: Arc<str> },
85 spaces: u8,
86 },
87 Token {
88 kind: SyntaxKind,
89 text: Arc<str>,
90 },
91} 85}
92 86
93impl GreenLeaf { 87impl GreenLeaf {
@@ -96,10 +90,16 @@ impl GreenLeaf {
96 let newlines = text.bytes().take_while(|&b| b == b'\n').count(); 90 let newlines = text.bytes().take_while(|&b| b == b'\n').count();
97 let spaces = text[newlines..].bytes().take_while(|&b| b == b' ').count(); 91 let spaces = text[newlines..].bytes().take_while(|&b| b == b' ').count();
98 if newlines + spaces == text.len() && newlines <= N_NEWLINES && spaces <= N_SPACES { 92 if newlines + spaces == text.len() && newlines <= N_NEWLINES && spaces <= N_SPACES {
99 return GreenLeaf::Whitespace { newlines: newlines as u8, spaces: spaces as u8 }; 93 return GreenLeaf::Whitespace {
94 newlines: newlines as u8,
95 spaces: spaces as u8,
96 };
100 } 97 }
101 } 98 }
102 GreenLeaf::Token { kind, text: text.to_owned().into_boxed_str().into() } 99 GreenLeaf::Token {
100 kind,
101 text: text.to_owned().into_boxed_str().into(),
102 }
103 } 103 }
104 104
105 pub(crate) fn kind(&self) -> SyntaxKind { 105 pub(crate) fn kind(&self) -> SyntaxKind {
@@ -141,7 +141,11 @@ pub(crate) struct GreenBranch {
141impl GreenBranch { 141impl GreenBranch {
142 fn new(kind: SyntaxKind, children: Vec<GreenNode>) -> GreenBranch { 142 fn new(kind: SyntaxKind, children: Vec<GreenNode>) -> GreenBranch {
143 let text_len = children.iter().map(|x| x.text_len()).sum::<TextUnit>(); 143 let text_len = children.iter().map(|x| x.text_len()).sum::<TextUnit>();
144 GreenBranch { text_len, kind, children } 144 GreenBranch {
145 text_len,
146 kind,
147 children,
148 }
145 } 149 }
146 150
147 pub fn kind(&self) -> SyntaxKind { 151 pub fn kind(&self) -> SyntaxKind {
@@ -156,4 +160,3 @@ impl GreenBranch {
156 self.children.as_slice() 160 self.children.as_slice()
157 } 161 }
158} 162}
159
diff --git a/src/yellow/mod.rs b/src/yellow/mod.rs
index 89eefc98b..cada65d2f 100644
--- a/src/yellow/mod.rs
+++ b/src/yellow/mod.rs
@@ -1,12 +1,12 @@
1mod builder;
1mod green; 2mod green;
2mod red; 3mod red;
3mod syntax; 4mod syntax;
4mod builder;
5 5
6pub use self::syntax::{SyntaxNode, SyntaxNodeRef};
6pub(crate) use self::{ 7pub(crate) use self::{
8 builder::GreenBuilder,
7 green::{GreenNode, GreenNodeBuilder}, 9 green::{GreenNode, GreenNodeBuilder},
8 red::RedNode, 10 red::RedNode,
9 syntax::{SyntaxError, SyntaxRoot}, 11 syntax::{SyntaxError, SyntaxRoot},
10 builder::GreenBuilder,
11}; 12};
12pub use self::syntax::{SyntaxNode, SyntaxNodeRef};
diff --git a/src/yellow/red.rs b/src/yellow/red.rs
index 3f0ddd04c..8907100e4 100644
--- a/src/yellow/red.rs
+++ b/src/yellow/red.rs
@@ -1,11 +1,5 @@
1use std::{ 1use std::{ptr, sync::RwLock};
2 ptr, 2use {yellow::GreenNode, TextUnit};
3 sync::RwLock,
4};
5use {
6 TextUnit,
7 yellow::GreenNode,
8};
9 3
10#[derive(Debug)] 4#[derive(Debug)]
11pub(crate) struct RedNode { 5pub(crate) struct RedNode {
@@ -22,9 +16,7 @@ struct ParentData {
22} 16}
23 17
24impl RedNode { 18impl RedNode {
25 pub fn new_root( 19 pub fn new_root(green: GreenNode) -> RedNode {
26 green: GreenNode,
27 ) -> RedNode {
28 RedNode::new(green, None) 20 RedNode::new(green, None)
29 } 21 }
30 22
@@ -42,13 +34,14 @@ impl RedNode {
42 RedNode::new(green, Some(parent_data)) 34 RedNode::new(green, Some(parent_data))
43 } 35 }
44 36
45 fn new( 37 fn new(green: GreenNode, parent: Option<ParentData>) -> RedNode {
46 green: GreenNode,
47 parent: Option<ParentData>,
48 ) -> RedNode {
49 let n_children = green.children().len(); 38 let n_children = green.children().len();
50 let children = (0..n_children).map(|_| None).collect(); 39 let children = (0..n_children).map(|_| None).collect();
51 RedNode { green, parent, children: RwLock::new(children) } 40 RedNode {
41 green,
42 parent,
43 children: RwLock::new(children),
44 }
52 } 45 }
53 46
54 pub(crate) fn green(&self) -> &GreenNode { 47 pub(crate) fn green(&self) -> &GreenNode {
@@ -75,12 +68,15 @@ impl RedNode {
75 if children[idx].is_none() { 68 if children[idx].is_none() {
76 let green_children = self.green.children(); 69 let green_children = self.green.children();
77 let start_offset = self.start_offset() 70 let start_offset = self.start_offset()
78 + green_children[..idx].iter().map(|x| x.text_len()).sum::<TextUnit>(); 71 + green_children[..idx]
79 let child = RedNode::new_child(green_children[idx].clone(), self.into(), start_offset, idx); 72 .iter()
73 .map(|x| x.text_len())
74 .sum::<TextUnit>();
75 let child =
76 RedNode::new_child(green_children[idx].clone(), self.into(), start_offset, idx);
80 children[idx] = Some(child) 77 children[idx] = Some(child)
81 } 78 }
82 children[idx].as_ref().unwrap().into() 79 children[idx].as_ref().unwrap().into()
83
84 } 80 }
85 81
86 pub(crate) fn parent(&self) -> Option<ptr::NonNull<RedNode>> { 82 pub(crate) fn parent(&self) -> Option<ptr::NonNull<RedNode>> {
diff --git a/src/yellow/syntax.rs b/src/yellow/syntax.rs
index 64af08236..19a9b8ac2 100644
--- a/src/yellow/syntax.rs
+++ b/src/yellow/syntax.rs
@@ -1,17 +1,12 @@
1use std::{ 1use std::{fmt, ops::Deref, ptr, sync::Arc};
2 fmt,
3 sync::Arc,
4 ptr,
5 ops::Deref,
6};
7 2
8use { 3use {
9 TextRange, TextUnit, 4 yellow::{GreenNode, RedNode},
10 SyntaxKind::{self, *}, 5 SyntaxKind::{self, *},
11 yellow::{RedNode, GreenNode}, 6 TextRange, TextUnit,
12}; 7};
13 8
14pub trait TreeRoot: Deref<Target=SyntaxRoot> + Clone {} 9pub trait TreeRoot: Deref<Target = SyntaxRoot> + Clone {}
15impl TreeRoot for Arc<SyntaxRoot> {} 10impl TreeRoot for Arc<SyntaxRoot> {}
16impl<'a> TreeRoot for &'a SyntaxRoot {} 11impl<'a> TreeRoot for &'a SyntaxRoot {}
17 12
@@ -50,7 +45,10 @@ impl SyntaxNode<Arc<SyntaxRoot>> {
50 pub(crate) fn new_owned(root: SyntaxRoot) -> Self { 45 pub(crate) fn new_owned(root: SyntaxRoot) -> Self {
51 let root = Arc::new(root); 46 let root = Arc::new(root);
52 let red_weak = ptr::NonNull::from(&root.red); 47 let red_weak = ptr::NonNull::from(&root.red);
53 SyntaxNode { root, red: red_weak } 48 SyntaxNode {
49 root,
50 red: red_weak,
51 }
54 } 52 }
55} 53}
56 54
@@ -68,10 +66,7 @@ impl<ROOT: TreeRoot> SyntaxNode<ROOT> {
68 66
69 pub fn range(&self) -> TextRange { 67 pub fn range(&self) -> TextRange {
70 let red = self.red(); 68 let red = self.red();
71 TextRange::offset_len( 69 TextRange::offset_len(red.start_offset(), red.green().text_len())
72 red.start_offset(),
73 red.green().text_len(),
74 )
75 } 70 }
76 71
77 pub fn text(&self) -> String { 72 pub fn text(&self) -> String {
diff --git a/tests/parser.rs b/tests/parser.rs
index 3b6670cb0..eb955278e 100644
--- a/tests/parser.rs
+++ b/tests/parser.rs
@@ -1,8 +1,8 @@
1extern crate libsyntax2; 1extern crate libsyntax2;
2extern crate testutils; 2extern crate testutils;
3 3
4use libsyntax2::{parse}; 4use libsyntax2::parse;
5use libsyntax2::utils::{dump_tree_green}; 5use libsyntax2::utils::dump_tree_green;
6use testutils::dir_tests; 6use testutils::dir_tests;
7 7
8#[test] 8#[test]
diff --git a/tests/testutils/src/lib.rs b/tests/testutils/src/lib.rs
index ae1dea810..43b806541 100644
--- a/tests/testutils/src/lib.rs
+++ b/tests/testutils/src/lib.rs
@@ -1,8 +1,8 @@
1extern crate difference; 1extern crate difference;
2extern crate file; 2extern crate file;
3 3
4use std::path::{Path, PathBuf};
5use std::fs::read_dir; 4use std::fs::read_dir;
5use std::path::{Path, PathBuf};
6 6
7use difference::Changeset; 7use difference::Changeset;
8 8
diff --git a/tools/src/bin/main.rs b/tools/src/bin/main.rs
index 6a9793fff..125930127 100644
--- a/tools/src/bin/main.rs
+++ b/tools/src/bin/main.rs
@@ -1,18 +1,14 @@
1extern crate clap; 1extern crate clap;
2#[macro_use] 2#[macro_use]
3extern crate failure; 3extern crate failure;
4extern crate tera; 4extern crate itertools;
5extern crate ron; 5extern crate ron;
6extern crate tera;
6extern crate walkdir; 7extern crate walkdir;
7extern crate itertools;
8 8
9use std::{
10 fs,
11 path::{Path},
12 collections::HashSet,
13};
14use clap::{App, Arg, SubCommand}; 9use clap::{App, Arg, SubCommand};
15use itertools::Itertools; 10use itertools::Itertools;
11use std::{collections::HashSet, fs, path::Path};
16 12
17type Result<T> = ::std::result::Result<T, failure::Error>; 13type Result<T> = ::std::result::Result<T, failure::Error>;
18 14
@@ -29,7 +25,7 @@ fn main() -> Result<()> {
29 Arg::with_name("verify") 25 Arg::with_name("verify")
30 .long("--verify") 26 .long("--verify")
31 .help("Verify that generated code is up-to-date") 27 .help("Verify that generated code is up-to-date")
32 .global(true) 28 .global(true),
33 ) 29 )
34 .subcommand(SubCommand::with_name("gen-kinds")) 30 .subcommand(SubCommand::with_name("gen-kinds"))
35 .subcommand(SubCommand::with_name("gen-tests")) 31 .subcommand(SubCommand::with_name("gen-tests"))
@@ -66,9 +62,8 @@ fn update(path: &Path, contents: &str, verify: bool) -> Result<()> {
66fn get_kinds() -> Result<String> { 62fn get_kinds() -> Result<String> {
67 let grammar = grammar()?; 63 let grammar = grammar()?;
68 let template = fs::read_to_string(SYNTAX_KINDS_TEMPLATE)?; 64 let template = fs::read_to_string(SYNTAX_KINDS_TEMPLATE)?;
69 let ret = tera::Tera::one_off(&template, &grammar, false).map_err(|e| { 65 let ret = tera::Tera::one_off(&template, &grammar, false)
70 format_err!("template error: {}", e) 66 .map_err(|e| format_err!("template error: {}", e))?;
71 })?;
72 Ok(ret) 67 Ok(ret)
73} 68}
74 69
@@ -142,7 +137,8 @@ fn tests_from_dir(dir: &Path) -> Result<HashSet<Test>> {
142fn collect_tests(s: &str) -> Vec<Test> { 137fn collect_tests(s: &str) -> Vec<Test> {
143 let mut res = vec![]; 138 let mut res = vec![];
144 let prefix = "// "; 139 let prefix = "// ";
145 let comment_blocks = s.lines() 140 let comment_blocks = s
141 .lines()
146 .map(str::trim_left) 142 .map(str::trim_left)
147 .group_by(|line| line.starts_with(prefix)); 143 .group_by(|line| line.starts_with(prefix));
148 144
@@ -181,4 +177,3 @@ fn existing_tests(dir: &Path) -> Result<HashSet<Test>> {
181 } 177 }
182 Ok(res) 178 Ok(res)
183} 179}
184