diff options
-rw-r--r-- | src/parser/event_parser/grammar.rs | 13 | ||||
-rw-r--r-- | tests/data/parser/err/0002_duplicate_shebang.rs | 2 | ||||
-rw-r--r-- | tests/data/parser/err/0002_duplicate_shebang.txt | 7 | ||||
-rw-r--r-- | tests/data/parser/ok/0004_file_shebang.rs | 1 | ||||
-rw-r--r-- | tests/data/parser/ok/0004_file_shebang.txt | 2 | ||||
-rw-r--r-- | tests/lexer.rs | 26 | ||||
-rw-r--r-- | tests/parser.rs | 30 | ||||
-rw-r--r-- | tests/testutils/src/lib.rs | 30 |
8 files changed, 64 insertions, 47 deletions
diff --git a/src/parser/event_parser/grammar.rs b/src/parser/event_parser/grammar.rs index e1f717714..f26cbc4f6 100644 --- a/src/parser/event_parser/grammar.rs +++ b/src/parser/event_parser/grammar.rs | |||
@@ -6,7 +6,7 @@ use syntax_kinds::*; | |||
6 | 6 | ||
7 | pub fn file(p: &mut Parser) { | 7 | pub fn file(p: &mut Parser) { |
8 | node(p, FILE, |p| { | 8 | node(p, FILE, |p| { |
9 | shebang(p); | 9 | p.optional(SHEBANG); |
10 | inner_attributes(p); | 10 | inner_attributes(p); |
11 | many(p, |p| { | 11 | many(p, |p| { |
12 | skip_to_first( | 12 | skip_to_first( |
@@ -17,11 +17,6 @@ pub fn file(p: &mut Parser) { | |||
17 | }) | 17 | }) |
18 | } | 18 | } |
19 | 19 | ||
20 | |||
21 | fn shebang(_: &mut Parser) { | ||
22 | //TODO | ||
23 | } | ||
24 | |||
25 | fn inner_attributes(_: &mut Parser) { | 20 | fn inner_attributes(_: &mut Parser) { |
26 | //TODO | 21 | //TODO |
27 | } | 22 | } |
@@ -136,4 +131,10 @@ impl<'p> Parser<'p> { | |||
136 | false | 131 | false |
137 | } | 132 | } |
138 | } | 133 | } |
134 | |||
135 | pub(crate) fn optional(&mut self, kind: SyntaxKind) { | ||
136 | if self.current_is(kind) { | ||
137 | self.bump(); | ||
138 | } | ||
139 | } | ||
139 | } \ No newline at end of file | 140 | } \ No newline at end of file |
diff --git a/tests/data/parser/err/0002_duplicate_shebang.rs b/tests/data/parser/err/0002_duplicate_shebang.rs new file mode 100644 index 000000000..48a3a3980 --- /dev/null +++ b/tests/data/parser/err/0002_duplicate_shebang.rs | |||
@@ -0,0 +1,2 @@ | |||
1 | #!/use/bin/env rusti | ||
2 | #!/use/bin/env rusti | ||
diff --git a/tests/data/parser/err/0002_duplicate_shebang.txt b/tests/data/parser/err/0002_duplicate_shebang.txt new file mode 100644 index 000000000..e7cf7187d --- /dev/null +++ b/tests/data/parser/err/0002_duplicate_shebang.txt | |||
@@ -0,0 +1,7 @@ | |||
1 | FILE@[0; 42) | ||
2 | SHEBANG@[0; 20) | ||
3 | ERROR@[20; 42) | ||
4 | err: `expected item` | ||
5 | WHITESPACE@[20; 21) | ||
6 | SHEBANG@[21; 41) | ||
7 | WHITESPACE@[41; 42) | ||
diff --git a/tests/data/parser/ok/0004_file_shebang.rs b/tests/data/parser/ok/0004_file_shebang.rs new file mode 100644 index 000000000..53dc9e617 --- /dev/null +++ b/tests/data/parser/ok/0004_file_shebang.rs | |||
@@ -0,0 +1 @@ | |||
#!/use/bin/env rusti \ No newline at end of file | |||
diff --git a/tests/data/parser/ok/0004_file_shebang.txt b/tests/data/parser/ok/0004_file_shebang.txt new file mode 100644 index 000000000..33055cf3d --- /dev/null +++ b/tests/data/parser/ok/0004_file_shebang.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | FILE@[0; 20) | ||
2 | SHEBANG@[0; 20) | ||
diff --git a/tests/lexer.rs b/tests/lexer.rs index 6c7531596..20840f456 100644 --- a/tests/lexer.rs +++ b/tests/lexer.rs | |||
@@ -2,30 +2,20 @@ extern crate file; | |||
2 | extern crate libsyntax2; | 2 | extern crate libsyntax2; |
3 | extern crate testutils; | 3 | extern crate testutils; |
4 | 4 | ||
5 | use std::path::{Path}; | ||
6 | use std::fmt::Write; | 5 | use std::fmt::Write; |
7 | 6 | ||
8 | use libsyntax2::{Token, tokenize}; | 7 | use libsyntax2::{Token, tokenize}; |
9 | use testutils::{assert_equal_text, collect_tests}; | 8 | use testutils::dir_tests; |
10 | 9 | ||
11 | #[test] | 10 | #[test] |
12 | fn lexer_tests() { | 11 | fn lexer_tests() { |
13 | for test_case in collect_tests(&["lexer"]) { | 12 | dir_tests( |
14 | lexer_test_case(&test_case); | 13 | &["lexer"], |
15 | } | 14 | |text| { |
16 | } | 15 | let tokens = tokenize(text); |
17 | 16 | dump_tokens(&tokens, text) | |
18 | fn lexer_test_case(path: &Path) { | 17 | } |
19 | let actual = { | 18 | ) |
20 | let text = file::get_text(path).unwrap(); | ||
21 | let tokens = tokenize(&text); | ||
22 | dump_tokens(&tokens, &text) | ||
23 | }; | ||
24 | let path = path.with_extension("txt"); | ||
25 | let expected = file::get_text(&path).unwrap(); | ||
26 | let expected = expected.as_str(); | ||
27 | let actual = actual.as_str(); | ||
28 | assert_equal_text(expected, actual, &path) | ||
29 | } | 19 | } |
30 | 20 | ||
31 | fn dump_tokens(tokens: &[Token], text: &str) -> String { | 21 | fn dump_tokens(tokens: &[Token], text: &str) -> String { |
diff --git a/tests/parser.rs b/tests/parser.rs index 518852bb2..02cef938d 100644 --- a/tests/parser.rs +++ b/tests/parser.rs | |||
@@ -2,33 +2,21 @@ extern crate file; | |||
2 | extern crate libsyntax2; | 2 | extern crate libsyntax2; |
3 | extern crate testutils; | 3 | extern crate testutils; |
4 | 4 | ||
5 | use std::path::{Path}; | ||
6 | use std::fmt::Write; | 5 | use std::fmt::Write; |
7 | 6 | ||
8 | use libsyntax2::{tokenize, parse, Node, File}; | 7 | use libsyntax2::{tokenize, parse, Node, File}; |
9 | use testutils::{collect_tests, assert_equal_text}; | 8 | use testutils::dir_tests; |
10 | 9 | ||
11 | #[test] | 10 | #[test] |
12 | fn parser_tests() { | 11 | fn parser_tests() { |
13 | for test_case in collect_tests(&["parser/ok", "parser/err"]) { | 12 | dir_tests( |
14 | parser_test_case(&test_case); | 13 | &["parser/ok", "parser/err"], |
15 | } | 14 | |text| { |
16 | } | 15 | let tokens = tokenize(text); |
17 | 16 | let file = parse(text.to_string(), &tokens); | |
18 | fn parser_test_case(path: &Path) { | 17 | dump_tree(&file) |
19 | let actual = { | 18 | } |
20 | let text = file::get_text(path).unwrap(); | 19 | ) |
21 | let tokens = tokenize(&text); | ||
22 | let file = parse(text, &tokens); | ||
23 | dump_tree(&file) | ||
24 | }; | ||
25 | let expected_path = path.with_extension("txt"); | ||
26 | let expected = file::get_text(&expected_path).expect( | ||
27 | &format!("Can't read {}", expected_path.display()) | ||
28 | ); | ||
29 | let expected = expected.as_str(); | ||
30 | let actual = actual.as_str(); | ||
31 | assert_equal_text(expected, actual, &expected_path); | ||
32 | } | 20 | } |
33 | 21 | ||
34 | fn dump_tree(file: &File) -> String { | 22 | fn dump_tree(file: &File) -> String { |
diff --git a/tests/testutils/src/lib.rs b/tests/testutils/src/lib.rs index 9fc85cc24..b34517c5f 100644 --- a/tests/testutils/src/lib.rs +++ b/tests/testutils/src/lib.rs | |||
@@ -6,7 +6,33 @@ use std::fs::read_dir; | |||
6 | 6 | ||
7 | use difference::Changeset; | 7 | use difference::Changeset; |
8 | 8 | ||
9 | pub fn assert_equal_text( | 9 | pub fn dir_tests<F>( |
10 | paths: &[&str], | ||
11 | f: F | ||
12 | ) | ||
13 | where | ||
14 | F: Fn(&str) -> String | ||
15 | { | ||
16 | for path in collect_tests(paths) { | ||
17 | let actual = { | ||
18 | let text = file::get_text(&path).unwrap(); | ||
19 | f(&text) | ||
20 | }; | ||
21 | let path = path.with_extension("txt"); | ||
22 | if !path.exists() { | ||
23 | println!("\nfile: {}", path.display()); | ||
24 | println!("No .txt file with expected result, creating..."); | ||
25 | file::put_text(&path, actual).unwrap(); | ||
26 | panic!("No expected result") | ||
27 | } | ||
28 | let expected = file::get_text(&path).unwrap(); | ||
29 | let expected = expected.as_str(); | ||
30 | let actual = actual.as_str(); | ||
31 | assert_equal_text(expected, actual, &path); | ||
32 | } | ||
33 | } | ||
34 | |||
35 | fn assert_equal_text( | ||
10 | expected: &str, | 36 | expected: &str, |
11 | actual: &str, | 37 | actual: &str, |
12 | path: &Path | 38 | path: &Path |
@@ -16,7 +42,7 @@ pub fn assert_equal_text( | |||
16 | } | 42 | } |
17 | } | 43 | } |
18 | 44 | ||
19 | pub fn collect_tests(paths: &[&str]) -> Vec<PathBuf> { | 45 | fn collect_tests(paths: &[&str]) -> Vec<PathBuf> { |
20 | paths.iter().flat_map(|path| { | 46 | paths.iter().flat_map(|path| { |
21 | let path = test_data_dir().join(path); | 47 | let path = test_data_dir().join(path); |
22 | test_from_dir(&path).into_iter() | 48 | test_from_dir(&path).into_iter() |