diff options
author | Aleksey Kladov <[email protected]> | 2018-01-07 11:56:08 +0000 |
---|---|---|
committer | Aleksey Kladov <[email protected]> | 2018-01-07 11:56:08 +0000 |
commit | 9e4052cc2ee12751ba94909ff479bd03df141ac4 (patch) | |
tree | 2e7c3a063369c5151fd851910c997e5d1020a164 /tests/lexer.rs | |
parent | 18f9e50b2d1aaf91992be9fd2f2a7e1866a943d3 (diff) |
Test utils
Diffstat (limited to 'tests/lexer.rs')
-rw-r--r-- | tests/lexer.rs | 39 |
1 files changed, 7 insertions, 32 deletions
diff --git a/tests/lexer.rs b/tests/lexer.rs index beca19c24..6c7531596 100644 --- a/tests/lexer.rs +++ b/tests/lexer.rs | |||
@@ -1,56 +1,31 @@ | |||
1 | extern crate file; | 1 | extern crate file; |
2 | #[macro_use(assert_diff)] | ||
3 | extern crate difference; | ||
4 | extern crate libsyntax2; | 2 | extern crate libsyntax2; |
3 | extern crate testutils; | ||
5 | 4 | ||
6 | use std::path::{PathBuf, Path}; | 5 | use std::path::{Path}; |
7 | use std::fs::read_dir; | ||
8 | use std::fmt::Write; | 6 | use std::fmt::Write; |
9 | 7 | ||
10 | use libsyntax2::{Token, tokenize}; | 8 | use libsyntax2::{Token, tokenize}; |
9 | use testutils::{assert_equal_text, collect_tests}; | ||
11 | 10 | ||
12 | #[test] | 11 | #[test] |
13 | fn lexer_tests() { | 12 | fn lexer_tests() { |
14 | for test_case in lexer_test_cases() { | 13 | for test_case in collect_tests(&["lexer"]) { |
15 | lexer_test_case(&test_case); | 14 | lexer_test_case(&test_case); |
16 | } | 15 | } |
17 | } | 16 | } |
18 | 17 | ||
19 | fn lexer_test_dir() -> PathBuf { | ||
20 | let dir = env!("CARGO_MANIFEST_DIR"); | ||
21 | PathBuf::from(dir).join("tests/data/lexer") | ||
22 | } | ||
23 | |||
24 | fn lexer_test_cases() -> Vec<PathBuf> { | ||
25 | let mut acc = Vec::new(); | ||
26 | let dir = lexer_test_dir(); | ||
27 | for file in read_dir(&dir).unwrap() { | ||
28 | let file = file.unwrap(); | ||
29 | let path = file.path(); | ||
30 | if path.extension().unwrap_or_default() == "rs" { | ||
31 | acc.push(path); | ||
32 | } | ||
33 | } | ||
34 | acc.sort(); | ||
35 | acc | ||
36 | } | ||
37 | |||
38 | fn lexer_test_case(path: &Path) { | 18 | fn lexer_test_case(path: &Path) { |
39 | let actual = { | 19 | let actual = { |
40 | let text = file::get_text(path).unwrap(); | 20 | let text = file::get_text(path).unwrap(); |
41 | let tokens = tokenize(&text); | 21 | let tokens = tokenize(&text); |
42 | dump_tokens(&tokens, &text) | 22 | dump_tokens(&tokens, &text) |
43 | }; | 23 | }; |
44 | let expected = file::get_text(&path.with_extension("txt")).unwrap(); | 24 | let path = path.with_extension("txt"); |
25 | let expected = file::get_text(&path).unwrap(); | ||
45 | let expected = expected.as_str(); | 26 | let expected = expected.as_str(); |
46 | let actual = actual.as_str(); | 27 | let actual = actual.as_str(); |
47 | if expected == actual { | 28 | assert_equal_text(expected, actual, &path) |
48 | return | ||
49 | } | ||
50 | if expected.trim() == actual.trim() { | ||
51 | panic!("Whitespace difference!") | ||
52 | } | ||
53 | assert_diff!(expected, actual, "\n", 0) | ||
54 | } | 29 | } |
55 | 30 | ||
56 | fn dump_tokens(tokens: &[Token], text: &str) -> String { | 31 | fn dump_tokens(tokens: &[Token], text: &str) -> String { |