diff options
-rw-r--r-- | Cargo.toml | 2 | ||||
-rw-r--r-- | src/parser/event_parser/grammar.rs | 26 | ||||
-rw-r--r-- | src/tree/file_builder.rs | 9 | ||||
-rw-r--r-- | tests/data/parser/err/0001_item_recovery_in_file.rs | 3 | ||||
-rw-r--r-- | tests/data/parser/err/0001_item_recovery_in_file.txt | 14 | ||||
-rw-r--r-- | tests/lexer.rs | 39 | ||||
-rw-r--r-- | tests/parser.rs | 48 | ||||
-rw-r--r-- | tests/testutils/Cargo.toml | 8 | ||||
-rw-r--r-- | tests/testutils/src/lib.rs | 64 |
9 files changed, 131 insertions, 82 deletions
diff --git a/Cargo.toml b/Cargo.toml index 063d52211..043f97752 100644 --- a/Cargo.toml +++ b/Cargo.toml | |||
@@ -12,4 +12,4 @@ file = "1.1.1" | |||
12 | ron = "0.1.5" | 12 | ron = "0.1.5" |
13 | 13 | ||
14 | [dev-dependencies] | 14 | [dev-dependencies] |
15 | difference = "1.0.0" | 15 | testutils = { path = "./tests/testutils" } \ No newline at end of file |
diff --git a/src/parser/event_parser/grammar.rs b/src/parser/event_parser/grammar.rs index 79ef8b31c..e1f717714 100644 --- a/src/parser/event_parser/grammar.rs +++ b/src/parser/event_parser/grammar.rs | |||
@@ -8,7 +8,12 @@ pub fn file(p: &mut Parser) { | |||
8 | node(p, FILE, |p| { | 8 | node(p, FILE, |p| { |
9 | shebang(p); | 9 | shebang(p); |
10 | inner_attributes(p); | 10 | inner_attributes(p); |
11 | many(p, |p| skip_to_first(p, item_first, item)); | 11 | many(p, |p| { |
12 | skip_to_first( | ||
13 | p, item_first, item, | ||
14 | "expected item", | ||
15 | ) | ||
16 | }); | ||
12 | }) | 17 | }) |
13 | } | 18 | } |
14 | 19 | ||
@@ -84,19 +89,34 @@ fn comma_list<F: Fn(&mut Parser) -> bool>(p: &mut Parser, f: F) { | |||
84 | } | 89 | } |
85 | 90 | ||
86 | 91 | ||
87 | fn skip_to_first<C, F>(p: &mut Parser, cond: C, f: F) -> bool | 92 | fn skip_to_first<C, F>(p: &mut Parser, cond: C, f: F, message: &str) -> bool |
88 | where | 93 | where |
89 | C: Fn(&Parser) -> bool, | 94 | C: Fn(&Parser) -> bool, |
90 | F: FnOnce(&mut Parser), | 95 | F: FnOnce(&mut Parser), |
91 | { | 96 | { |
97 | let mut skipped = false; | ||
92 | loop { | 98 | loop { |
93 | if cond(p) { | 99 | if cond(p) { |
100 | if skipped { | ||
101 | p.finish(); | ||
102 | } | ||
94 | f(p); | 103 | f(p); |
95 | return true; | 104 | return true; |
96 | } | 105 | } |
97 | if p.bump().is_none() { | 106 | if p.is_eof() { |
107 | if skipped { | ||
108 | p.finish(); | ||
109 | } | ||
98 | return false; | 110 | return false; |
99 | } | 111 | } |
112 | if !skipped { | ||
113 | p.start(ERROR); | ||
114 | p.error() | ||
115 | .message(message) | ||
116 | .emit(); | ||
117 | } | ||
118 | p.bump().unwrap(); | ||
119 | skipped = true; | ||
100 | } | 120 | } |
101 | } | 121 | } |
102 | 122 | ||
diff --git a/src/tree/file_builder.rs b/src/tree/file_builder.rs index b07f4027b..37bd5b2c8 100644 --- a/src/tree/file_builder.rs +++ b/src/tree/file_builder.rs | |||
@@ -73,7 +73,9 @@ impl FileBuilder { | |||
73 | pub fn finish(self) -> File { | 73 | pub fn finish(self) -> File { |
74 | assert!( | 74 | assert!( |
75 | self.in_progress.is_empty(), | 75 | self.in_progress.is_empty(), |
76 | "some nodes in FileBuilder are unfinished" | 76 | "some nodes in FileBuilder are unfinished: {:?}", |
77 | self.in_progress.iter().map(|&(idx, _)| self.nodes[idx].kind) | ||
78 | .collect::<Vec<_>>() | ||
77 | ); | 79 | ); |
78 | assert!( | 80 | assert!( |
79 | self.pos == (self.text.len() as u32).into(), | 81 | self.pos == (self.text.len() as u32).into(), |
@@ -122,11 +124,6 @@ impl FileBuilder { | |||
122 | let idx = self.current_id(); | 124 | let idx = self.current_id(); |
123 | &mut self.nodes[idx] | 125 | &mut self.nodes[idx] |
124 | } | 126 | } |
125 | |||
126 | fn current_sibling(&mut self) -> Option<&mut NodeData> { | ||
127 | let idx = self.in_progress.last().unwrap().1?; | ||
128 | Some(&mut self.nodes[idx]) | ||
129 | } | ||
130 | } | 127 | } |
131 | 128 | ||
132 | fn fill<T>(slot: &mut Option<T>, value: T) { | 129 | fn fill<T>(slot: &mut Option<T>, value: T) { |
diff --git a/tests/data/parser/err/0001_item_recovery_in_file.rs b/tests/data/parser/err/0001_item_recovery_in_file.rs new file mode 100644 index 000000000..98f23de1f --- /dev/null +++ b/tests/data/parser/err/0001_item_recovery_in_file.rs | |||
@@ -0,0 +1,3 @@ | |||
1 | if match | ||
2 | |||
3 | struct S {} \ No newline at end of file | ||
diff --git a/tests/data/parser/err/0001_item_recovery_in_file.txt b/tests/data/parser/err/0001_item_recovery_in_file.txt new file mode 100644 index 000000000..730367694 --- /dev/null +++ b/tests/data/parser/err/0001_item_recovery_in_file.txt | |||
@@ -0,0 +1,14 @@ | |||
1 | FILE@[0; 21) | ||
2 | ERROR@[0; 10) | ||
3 | err: `expected item` | ||
4 | IDENT@[0; 2) | ||
5 | WHITESPACE@[2; 3) | ||
6 | IDENT@[3; 8) | ||
7 | WHITESPACE@[8; 10) | ||
8 | STRUCT_ITEM@[10; 21) | ||
9 | STRUCT_KW@[10; 16) | ||
10 | WHITESPACE@[16; 17) | ||
11 | IDENT@[17; 18) | ||
12 | WHITESPACE@[18; 19) | ||
13 | L_CURLY@[19; 20) | ||
14 | R_CURLY@[20; 21) | ||
diff --git a/tests/lexer.rs b/tests/lexer.rs index beca19c24..6c7531596 100644 --- a/tests/lexer.rs +++ b/tests/lexer.rs | |||
@@ -1,56 +1,31 @@ | |||
1 | extern crate file; | 1 | extern crate file; |
2 | #[macro_use(assert_diff)] | ||
3 | extern crate difference; | ||
4 | extern crate libsyntax2; | 2 | extern crate libsyntax2; |
3 | extern crate testutils; | ||
5 | 4 | ||
6 | use std::path::{PathBuf, Path}; | 5 | use std::path::{Path}; |
7 | use std::fs::read_dir; | ||
8 | use std::fmt::Write; | 6 | use std::fmt::Write; |
9 | 7 | ||
10 | use libsyntax2::{Token, tokenize}; | 8 | use libsyntax2::{Token, tokenize}; |
9 | use testutils::{assert_equal_text, collect_tests}; | ||
11 | 10 | ||
12 | #[test] | 11 | #[test] |
13 | fn lexer_tests() { | 12 | fn lexer_tests() { |
14 | for test_case in lexer_test_cases() { | 13 | for test_case in collect_tests(&["lexer"]) { |
15 | lexer_test_case(&test_case); | 14 | lexer_test_case(&test_case); |
16 | } | 15 | } |
17 | } | 16 | } |
18 | 17 | ||
19 | fn lexer_test_dir() -> PathBuf { | ||
20 | let dir = env!("CARGO_MANIFEST_DIR"); | ||
21 | PathBuf::from(dir).join("tests/data/lexer") | ||
22 | } | ||
23 | |||
24 | fn lexer_test_cases() -> Vec<PathBuf> { | ||
25 | let mut acc = Vec::new(); | ||
26 | let dir = lexer_test_dir(); | ||
27 | for file in read_dir(&dir).unwrap() { | ||
28 | let file = file.unwrap(); | ||
29 | let path = file.path(); | ||
30 | if path.extension().unwrap_or_default() == "rs" { | ||
31 | acc.push(path); | ||
32 | } | ||
33 | } | ||
34 | acc.sort(); | ||
35 | acc | ||
36 | } | ||
37 | |||
38 | fn lexer_test_case(path: &Path) { | 18 | fn lexer_test_case(path: &Path) { |
39 | let actual = { | 19 | let actual = { |
40 | let text = file::get_text(path).unwrap(); | 20 | let text = file::get_text(path).unwrap(); |
41 | let tokens = tokenize(&text); | 21 | let tokens = tokenize(&text); |
42 | dump_tokens(&tokens, &text) | 22 | dump_tokens(&tokens, &text) |
43 | }; | 23 | }; |
44 | let expected = file::get_text(&path.with_extension("txt")).unwrap(); | 24 | let path = path.with_extension("txt"); |
25 | let expected = file::get_text(&path).unwrap(); | ||
45 | let expected = expected.as_str(); | 26 | let expected = expected.as_str(); |
46 | let actual = actual.as_str(); | 27 | let actual = actual.as_str(); |
47 | if expected == actual { | 28 | assert_equal_text(expected, actual, &path) |
48 | return | ||
49 | } | ||
50 | if expected.trim() == actual.trim() { | ||
51 | panic!("Whitespace difference!") | ||
52 | } | ||
53 | assert_diff!(expected, actual, "\n", 0) | ||
54 | } | 29 | } |
55 | 30 | ||
56 | fn dump_tokens(tokens: &[Token], text: &str) -> String { | 31 | fn dump_tokens(tokens: &[Token], text: &str) -> String { |
diff --git a/tests/parser.rs b/tests/parser.rs index 43d04e491..518852bb2 100644 --- a/tests/parser.rs +++ b/tests/parser.rs | |||
@@ -1,46 +1,20 @@ | |||
1 | extern crate file; | 1 | extern crate file; |
2 | #[macro_use(assert_diff)] | ||
3 | extern crate difference; | ||
4 | extern crate libsyntax2; | 2 | extern crate libsyntax2; |
3 | extern crate testutils; | ||
5 | 4 | ||
6 | use std::path::{PathBuf, Path}; | 5 | use std::path::{Path}; |
7 | use std::fs::read_dir; | ||
8 | use std::fmt::Write; | 6 | use std::fmt::Write; |
9 | 7 | ||
10 | use libsyntax2::{tokenize, parse, Node, File}; | 8 | use libsyntax2::{tokenize, parse, Node, File}; |
9 | use testutils::{collect_tests, assert_equal_text}; | ||
11 | 10 | ||
12 | #[test] | 11 | #[test] |
13 | fn parser_tests() { | 12 | fn parser_tests() { |
14 | for test_case in parser_test_cases() { | 13 | for test_case in collect_tests(&["parser/ok", "parser/err"]) { |
15 | parser_test_case(&test_case); | 14 | parser_test_case(&test_case); |
16 | } | 15 | } |
17 | } | 16 | } |
18 | 17 | ||
19 | fn parser_test_dir() -> PathBuf { | ||
20 | let dir = env!("CARGO_MANIFEST_DIR"); | ||
21 | PathBuf::from(dir).join("tests/data/parser") | ||
22 | } | ||
23 | |||
24 | fn test_from_dir(dir: &Path) -> Vec<PathBuf> { | ||
25 | let mut acc = Vec::new(); | ||
26 | for file in read_dir(&dir).unwrap() { | ||
27 | let file = file.unwrap(); | ||
28 | let path = file.path(); | ||
29 | if path.extension().unwrap_or_default() == "rs" { | ||
30 | acc.push(path); | ||
31 | } | ||
32 | } | ||
33 | acc.sort(); | ||
34 | acc | ||
35 | } | ||
36 | |||
37 | fn parser_test_cases() -> Vec<PathBuf> { | ||
38 | let mut acc = Vec::new(); | ||
39 | acc.extend(test_from_dir(&parser_test_dir().join("ok"))); | ||
40 | acc.extend(test_from_dir(&parser_test_dir().join("err"))); | ||
41 | acc | ||
42 | } | ||
43 | |||
44 | fn parser_test_case(path: &Path) { | 18 | fn parser_test_case(path: &Path) { |
45 | let actual = { | 19 | let actual = { |
46 | let text = file::get_text(path).unwrap(); | 20 | let text = file::get_text(path).unwrap(); |
@@ -48,19 +22,13 @@ fn parser_test_case(path: &Path) { | |||
48 | let file = parse(text, &tokens); | 22 | let file = parse(text, &tokens); |
49 | dump_tree(&file) | 23 | dump_tree(&file) |
50 | }; | 24 | }; |
51 | let expected = path.with_extension("txt"); | 25 | let expected_path = path.with_extension("txt"); |
52 | let expected = file::get_text(&expected).expect( | 26 | let expected = file::get_text(&expected_path).expect( |
53 | &format!("Can't read {}", expected.display()) | 27 | &format!("Can't read {}", expected_path.display()) |
54 | ); | 28 | ); |
55 | let expected = expected.as_str(); | 29 | let expected = expected.as_str(); |
56 | let actual = actual.as_str(); | 30 | let actual = actual.as_str(); |
57 | if expected == actual { | 31 | assert_equal_text(expected, actual, &expected_path); |
58 | return | ||
59 | } | ||
60 | if expected.trim() == actual.trim() { | ||
61 | panic!("Whitespace difference! {}", path.display()) | ||
62 | } | ||
63 | assert_diff!(expected, actual, "\n", 0) | ||
64 | } | 32 | } |
65 | 33 | ||
66 | fn dump_tree(file: &File) -> String { | 34 | fn dump_tree(file: &File) -> String { |
diff --git a/tests/testutils/Cargo.toml b/tests/testutils/Cargo.toml new file mode 100644 index 000000000..9003822ee --- /dev/null +++ b/tests/testutils/Cargo.toml | |||
@@ -0,0 +1,8 @@ | |||
1 | [package] | ||
2 | name = "testutils" | ||
3 | version = "0.1.0" | ||
4 | authors = ["Aleksey Kladov <[email protected]>"] | ||
5 | |||
6 | [dependencies] | ||
7 | file = "1.0" | ||
8 | difference = "1.0.0" | ||
diff --git a/tests/testutils/src/lib.rs b/tests/testutils/src/lib.rs new file mode 100644 index 000000000..9fc85cc24 --- /dev/null +++ b/tests/testutils/src/lib.rs | |||
@@ -0,0 +1,64 @@ | |||
1 | extern crate difference; | ||
2 | extern crate file; | ||
3 | |||
4 | use std::path::{PathBuf, Path}; | ||
5 | use std::fs::read_dir; | ||
6 | |||
7 | use difference::Changeset; | ||
8 | |||
9 | pub fn assert_equal_text( | ||
10 | expected: &str, | ||
11 | actual: &str, | ||
12 | path: &Path | ||
13 | ) { | ||
14 | if expected != actual { | ||
15 | print_difference(expected, actual, path) | ||
16 | } | ||
17 | } | ||
18 | |||
19 | pub fn collect_tests(paths: &[&str]) -> Vec<PathBuf> { | ||
20 | paths.iter().flat_map(|path| { | ||
21 | let path = test_data_dir().join(path); | ||
22 | test_from_dir(&path).into_iter() | ||
23 | }).collect() | ||
24 | } | ||
25 | |||
26 | fn test_from_dir(dir: &Path) -> Vec<PathBuf> { | ||
27 | let mut acc = Vec::new(); | ||
28 | for file in read_dir(&dir).unwrap() { | ||
29 | let file = file.unwrap(); | ||
30 | let path = file.path(); | ||
31 | if path.extension().unwrap_or_default() == "rs" { | ||
32 | acc.push(path); | ||
33 | } | ||
34 | } | ||
35 | acc.sort(); | ||
36 | acc | ||
37 | } | ||
38 | |||
39 | fn print_difference(expected: &str, actual: &str, path: &Path) { | ||
40 | let dir = project_dir(); | ||
41 | let path = path.strip_prefix(&dir).unwrap_or_else(|_| path); | ||
42 | println!("\nfile: {}", path.display()); | ||
43 | if expected.trim() == actual.trim() { | ||
44 | println!("whitespace difference"); | ||
45 | println!("rewriting the file"); | ||
46 | file::put_text(path, actual).unwrap(); | ||
47 | } else { | ||
48 | let changeset = Changeset::new(actual, expected, "\n"); | ||
49 | println!("{}", changeset); | ||
50 | } | ||
51 | panic!("Comparison failed") | ||
52 | } | ||
53 | |||
54 | fn project_dir() -> PathBuf { | ||
55 | let dir = env!("CARGO_MANIFEST_DIR"); | ||
56 | PathBuf::from(dir) | ||
57 | .parent().unwrap() | ||
58 | .parent().unwrap() | ||
59 | .to_owned() | ||
60 | } | ||
61 | |||
62 | fn test_data_dir() -> PathBuf { | ||
63 | project_dir().join("tests/data") | ||
64 | } \ No newline at end of file | ||