diff options
-rw-r--r-- | grammar.ron | 5 | ||||
-rw-r--r-- | src/bin/gen.rs | 14 | ||||
-rw-r--r-- | src/lib.rs | 2 | ||||
-rw-r--r-- | src/syntax_kinds.rs | 4 | ||||
-rw-r--r-- | tests/data/parser/0000_empty.rs | 0 | ||||
-rw-r--r-- | tests/data/parser/0000_empty.txt | 1 | ||||
-rw-r--r-- | tests/parser.rs | 76 |
7 files changed, 95 insertions, 7 deletions
diff --git a/grammar.ron b/grammar.ron index 71e354dac..439c4ef9c 100644 --- a/grammar.ron +++ b/grammar.ron | |||
@@ -1,5 +1,5 @@ | |||
1 | Grammar( | 1 | Grammar( |
2 | syntax_kinds: [ | 2 | tokens: [ |
3 | "ERROR", | 3 | "ERROR", |
4 | "IDENT", | 4 | "IDENT", |
5 | "UNDERSCORE", | 5 | "UNDERSCORE", |
@@ -51,5 +51,8 @@ Grammar( | |||
51 | "COMMENT", | 51 | "COMMENT", |
52 | "DOC_COMMENT", | 52 | "DOC_COMMENT", |
53 | "SHEBANG", | 53 | "SHEBANG", |
54 | ], | ||
55 | nodes: [ | ||
56 | "FILE" | ||
54 | ] | 57 | ] |
55 | ) \ No newline at end of file | 58 | ) \ No newline at end of file |
diff --git a/src/bin/gen.rs b/src/bin/gen.rs index 4acf381e3..f5a66d9f2 100644 --- a/src/bin/gen.rs +++ b/src/bin/gen.rs | |||
@@ -17,7 +17,8 @@ fn main() { | |||
17 | 17 | ||
18 | #[derive(Deserialize)] | 18 | #[derive(Deserialize)] |
19 | struct Grammar { | 19 | struct Grammar { |
20 | syntax_kinds: Vec<String>, | 20 | tokens: Vec<String>, |
21 | nodes: Vec<String>, | ||
21 | } | 22 | } |
22 | 23 | ||
23 | impl Grammar { | 24 | impl Grammar { |
@@ -31,7 +32,12 @@ impl Grammar { | |||
31 | acc.push_str("// Generated from grammar.ron\n"); | 32 | acc.push_str("// Generated from grammar.ron\n"); |
32 | acc.push_str("use tree::{SyntaxKind, SyntaxInfo};\n"); | 33 | acc.push_str("use tree::{SyntaxKind, SyntaxInfo};\n"); |
33 | acc.push_str("\n"); | 34 | acc.push_str("\n"); |
34 | for (idx, kind) in self.syntax_kinds.iter().enumerate() { | 35 | |
36 | let syntax_kinds: Vec<&String> = | ||
37 | self.tokens.iter().chain(self.nodes.iter()) | ||
38 | .collect(); | ||
39 | |||
40 | for (idx, kind) in syntax_kinds.iter().enumerate() { | ||
35 | let sname = scream(kind); | 41 | let sname = scream(kind); |
36 | write!( | 42 | write!( |
37 | acc, | 43 | acc, |
@@ -40,8 +46,8 @@ impl Grammar { | |||
40 | ).unwrap(); | 46 | ).unwrap(); |
41 | } | 47 | } |
42 | acc.push_str("\n"); | 48 | acc.push_str("\n"); |
43 | write!(acc, "static INFOS: [SyntaxInfo; {}] = [\n", self.syntax_kinds.len()).unwrap(); | 49 | write!(acc, "static INFOS: [SyntaxInfo; {}] = [\n", syntax_kinds.len()).unwrap(); |
44 | for kind in self.syntax_kinds.iter() { | 50 | for kind in syntax_kinds.iter() { |
45 | let sname = scream(kind); | 51 | let sname = scream(kind); |
46 | write!( | 52 | write!( |
47 | acc, | 53 | acc, |
diff --git a/src/lib.rs b/src/lib.rs index 82213e2b3..0685e3f2c 100644 --- a/src/lib.rs +++ b/src/lib.rs | |||
@@ -6,5 +6,5 @@ mod lexer; | |||
6 | 6 | ||
7 | pub mod syntax_kinds; | 7 | pub mod syntax_kinds; |
8 | pub use text::{TextUnit, TextRange}; | 8 | pub use text::{TextUnit, TextRange}; |
9 | pub use tree::{SyntaxKind, Token}; | 9 | pub use tree::{SyntaxKind, Token, FileBuilder, File, Node}; |
10 | pub use lexer::{next_token, tokenize}; | 10 | pub use lexer::{next_token, tokenize}; |
diff --git a/src/syntax_kinds.rs b/src/syntax_kinds.rs index ec2a036b9..b83f48dd8 100644 --- a/src/syntax_kinds.rs +++ b/src/syntax_kinds.rs | |||
@@ -52,8 +52,9 @@ pub const THIN_ARROW: SyntaxKind = SyntaxKind(47); | |||
52 | pub const COMMENT: SyntaxKind = SyntaxKind(48); | 52 | pub const COMMENT: SyntaxKind = SyntaxKind(48); |
53 | pub const DOC_COMMENT: SyntaxKind = SyntaxKind(49); | 53 | pub const DOC_COMMENT: SyntaxKind = SyntaxKind(49); |
54 | pub const SHEBANG: SyntaxKind = SyntaxKind(50); | 54 | pub const SHEBANG: SyntaxKind = SyntaxKind(50); |
55 | pub const FILE: SyntaxKind = SyntaxKind(51); | ||
55 | 56 | ||
56 | static INFOS: [SyntaxInfo; 51] = [ | 57 | static INFOS: [SyntaxInfo; 52] = [ |
57 | SyntaxInfo { name: "ERROR" }, | 58 | SyntaxInfo { name: "ERROR" }, |
58 | SyntaxInfo { name: "IDENT" }, | 59 | SyntaxInfo { name: "IDENT" }, |
59 | SyntaxInfo { name: "UNDERSCORE" }, | 60 | SyntaxInfo { name: "UNDERSCORE" }, |
@@ -105,6 +106,7 @@ static INFOS: [SyntaxInfo; 51] = [ | |||
105 | SyntaxInfo { name: "COMMENT" }, | 106 | SyntaxInfo { name: "COMMENT" }, |
106 | SyntaxInfo { name: "DOC_COMMENT" }, | 107 | SyntaxInfo { name: "DOC_COMMENT" }, |
107 | SyntaxInfo { name: "SHEBANG" }, | 108 | SyntaxInfo { name: "SHEBANG" }, |
109 | SyntaxInfo { name: "FILE" }, | ||
108 | ]; | 110 | ]; |
109 | 111 | ||
110 | pub(crate) fn syntax_info(kind: SyntaxKind) -> &'static SyntaxInfo { | 112 | pub(crate) fn syntax_info(kind: SyntaxKind) -> &'static SyntaxInfo { |
diff --git a/tests/data/parser/0000_empty.rs b/tests/data/parser/0000_empty.rs new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/tests/data/parser/0000_empty.rs | |||
diff --git a/tests/data/parser/0000_empty.txt b/tests/data/parser/0000_empty.txt new file mode 100644 index 000000000..843ea118d --- /dev/null +++ b/tests/data/parser/0000_empty.txt | |||
@@ -0,0 +1 @@ | |||
FILE@[0; 0) \ No newline at end of file | |||
diff --git a/tests/parser.rs b/tests/parser.rs new file mode 100644 index 000000000..f61b6830b --- /dev/null +++ b/tests/parser.rs | |||
@@ -0,0 +1,76 @@ | |||
1 | extern crate file; | ||
2 | #[macro_use(assert_diff)] | ||
3 | extern crate difference; | ||
4 | extern crate libsyntax2; | ||
5 | |||
6 | use std::path::{PathBuf, Path}; | ||
7 | use std::fs::read_dir; | ||
8 | use std::fmt::Write; | ||
9 | |||
10 | use libsyntax2::{tokenize, Token, Node, File, FileBuilder}; | ||
11 | |||
12 | #[test] | ||
13 | fn parser_tests() { | ||
14 | for test_case in parser_test_cases() { | ||
15 | parser_test_case(&test_case); | ||
16 | } | ||
17 | } | ||
18 | |||
19 | fn parser_test_dir() -> PathBuf { | ||
20 | let dir = env!("CARGO_MANIFEST_DIR"); | ||
21 | PathBuf::from(dir).join("tests/data/parser") | ||
22 | } | ||
23 | |||
24 | fn parser_test_cases() -> Vec<PathBuf> { | ||
25 | let mut acc = Vec::new(); | ||
26 | let dir = parser_test_dir(); | ||
27 | for file in read_dir(&dir).unwrap() { | ||
28 | let file = file.unwrap(); | ||
29 | let path = file.path(); | ||
30 | if path.extension().unwrap_or_default() == "rs" { | ||
31 | acc.push(path); | ||
32 | } | ||
33 | } | ||
34 | acc.sort(); | ||
35 | acc | ||
36 | } | ||
37 | |||
38 | fn parser_test_case(path: &Path) { | ||
39 | let actual = { | ||
40 | let text = file::get_text(path).unwrap(); | ||
41 | let tokens = tokenize(&text); | ||
42 | let file = parse(text, &tokens); | ||
43 | dump_tree(&file) | ||
44 | }; | ||
45 | let expected = file::get_text(&path.with_extension("txt")).unwrap(); | ||
46 | let expected = expected.as_str(); | ||
47 | let actual = actual.as_str(); | ||
48 | if expected == actual { | ||
49 | return | ||
50 | } | ||
51 | if expected.trim() == actual.trim() { | ||
52 | panic!("Whitespace difference!") | ||
53 | } | ||
54 | assert_diff!(expected, actual, "\n", 0) | ||
55 | } | ||
56 | |||
57 | fn dump_tree(file: &File) -> String { | ||
58 | let mut result = String::new(); | ||
59 | go(file.root(), &mut result, 0); | ||
60 | return result; | ||
61 | |||
62 | fn go(node: Node, buff: &mut String, level: usize) { | ||
63 | buff.push_str(&String::from(" ").repeat(level)); | ||
64 | write!(buff, "{:?}\n", node); | ||
65 | for child in node.children() { | ||
66 | go(child, buff, level + 1) | ||
67 | } | ||
68 | } | ||
69 | } | ||
70 | |||
71 | fn parse(text: String, tokens: &[Token]) -> File { | ||
72 | let mut builder = FileBuilder::new(text); | ||
73 | builder.start_internal(libsyntax2::syntax_kinds::FILE); | ||
74 | builder.finish_internal(); | ||
75 | builder.finish() | ||
76 | } \ No newline at end of file | ||