diff options
author | Aleksey Kladov <[email protected]> | 2018-09-16 10:54:24 +0100 |
---|---|---|
committer | Aleksey Kladov <[email protected]> | 2018-09-16 11:07:39 +0100 |
commit | b5021411a84822cb3f1e3aeffad9550dd15bdeb6 (patch) | |
tree | 9dca564f8e51b298dced01c4ce669c756dce3142 /crates/libsyntax2/tests/test | |
parent | ba0bfeee12e19da40b5eabc8d0408639af10e96f (diff) |
rename all things
Diffstat (limited to 'crates/libsyntax2/tests/test')
-rw-r--r-- | crates/libsyntax2/tests/test/main.rs | 151 |
1 files changed, 0 insertions, 151 deletions
diff --git a/crates/libsyntax2/tests/test/main.rs b/crates/libsyntax2/tests/test/main.rs deleted file mode 100644 index 5a8879fce..000000000 --- a/crates/libsyntax2/tests/test/main.rs +++ /dev/null | |||
@@ -1,151 +0,0 @@ | |||
1 | extern crate libsyntax2; | ||
2 | #[macro_use] | ||
3 | extern crate test_utils; | ||
4 | extern crate walkdir; | ||
5 | |||
6 | use std::{ | ||
7 | fs, | ||
8 | path::{Path, PathBuf}, | ||
9 | fmt::Write, | ||
10 | }; | ||
11 | |||
12 | use libsyntax2::{ | ||
13 | File, | ||
14 | utils::{dump_tree, check_fuzz_invariants}, | ||
15 | }; | ||
16 | |||
17 | #[test] | ||
18 | fn lexer_tests() { | ||
19 | dir_tests(&["lexer"], |text| { | ||
20 | let tokens = libsyntax2::tokenize(text); | ||
21 | dump_tokens(&tokens, text) | ||
22 | }) | ||
23 | } | ||
24 | |||
25 | #[test] | ||
26 | fn parser_tests() { | ||
27 | dir_tests(&["parser/inline", "parser/ok", "parser/err"], |text| { | ||
28 | let file = File::parse(text); | ||
29 | dump_tree(file.syntax()) | ||
30 | }) | ||
31 | } | ||
32 | |||
33 | #[test] | ||
34 | fn parser_fuzz_tests() { | ||
35 | for (_, text) in collect_tests(&["parser/fuzz-failures"]) { | ||
36 | check_fuzz_invariants(&text) | ||
37 | } | ||
38 | } | ||
39 | |||
40 | |||
41 | /// Read file and normalize newlines. | ||
42 | /// | ||
43 | /// `rustc` seems to always normalize `\r\n` newlines to `\n`: | ||
44 | /// | ||
45 | /// ``` | ||
46 | /// let s = " | ||
47 | /// "; | ||
48 | /// assert_eq!(s.as_bytes(), &[10]); | ||
49 | /// ``` | ||
50 | /// | ||
51 | /// so this should always be correct. | ||
52 | fn read_text(path: &Path) -> String { | ||
53 | fs::read_to_string(path).unwrap().replace("\r\n", "\n") | ||
54 | } | ||
55 | |||
56 | pub fn dir_tests<F>(paths: &[&str], f: F) | ||
57 | where | ||
58 | F: Fn(&str) -> String, | ||
59 | { | ||
60 | for (path, input_code) in collect_tests(paths) { | ||
61 | let parse_tree = f(&input_code); | ||
62 | let path = path.with_extension("txt"); | ||
63 | if !path.exists() { | ||
64 | println!("\nfile: {}", path.display()); | ||
65 | println!("No .txt file with expected result, creating...\n"); | ||
66 | println!("{}\n{}", input_code, parse_tree); | ||
67 | fs::write(&path, parse_tree).unwrap(); | ||
68 | panic!("No expected result") | ||
69 | } | ||
70 | let expected = read_text(&path); | ||
71 | let expected = expected.as_str(); | ||
72 | let parse_tree = parse_tree.as_str(); | ||
73 | assert_equal_text(expected, parse_tree, &path); | ||
74 | } | ||
75 | } | ||
76 | |||
77 | const REWRITE: bool = false; | ||
78 | |||
79 | fn assert_equal_text(expected: &str, actual: &str, path: &Path) { | ||
80 | if expected == actual { | ||
81 | return; | ||
82 | } | ||
83 | let dir = project_dir(); | ||
84 | let pretty_path = path.strip_prefix(&dir).unwrap_or_else(|_| path); | ||
85 | if expected.trim() == actual.trim() { | ||
86 | println!("whitespace difference, rewriting"); | ||
87 | println!("file: {}\n", pretty_path.display()); | ||
88 | fs::write(path, actual).unwrap(); | ||
89 | return; | ||
90 | } | ||
91 | if REWRITE { | ||
92 | println!("rewriting {}", pretty_path.display()); | ||
93 | fs::write(path, actual).unwrap(); | ||
94 | return; | ||
95 | } | ||
96 | assert_eq_text!(expected, actual, "file: {}", pretty_path.display()); | ||
97 | } | ||
98 | |||
99 | fn collect_tests(paths: &[&str]) -> Vec<(PathBuf, String)> { | ||
100 | paths | ||
101 | .iter() | ||
102 | .flat_map(|path| { | ||
103 | let path = test_data_dir().join(path); | ||
104 | test_from_dir(&path).into_iter() | ||
105 | }) | ||
106 | .map(|path| { | ||
107 | let text = read_text(&path); | ||
108 | (path, text) | ||
109 | }) | ||
110 | .collect() | ||
111 | } | ||
112 | |||
113 | fn test_from_dir(dir: &Path) -> Vec<PathBuf> { | ||
114 | let mut acc = Vec::new(); | ||
115 | for file in fs::read_dir(&dir).unwrap() { | ||
116 | let file = file.unwrap(); | ||
117 | let path = file.path(); | ||
118 | if path.extension().unwrap_or_default() == "rs" { | ||
119 | acc.push(path); | ||
120 | } | ||
121 | } | ||
122 | acc.sort(); | ||
123 | acc | ||
124 | } | ||
125 | |||
126 | fn project_dir() -> PathBuf { | ||
127 | let dir = env!("CARGO_MANIFEST_DIR"); | ||
128 | PathBuf::from(dir) | ||
129 | .parent() | ||
130 | .unwrap() | ||
131 | .parent() | ||
132 | .unwrap() | ||
133 | .to_owned() | ||
134 | } | ||
135 | |||
136 | fn test_data_dir() -> PathBuf { | ||
137 | project_dir().join("crates/libsyntax2/tests/data") | ||
138 | } | ||
139 | |||
140 | fn dump_tokens(tokens: &[libsyntax2::Token], text: &str) -> String { | ||
141 | let mut acc = String::new(); | ||
142 | let mut offset = 0; | ||
143 | for token in tokens { | ||
144 | let len: u32 = token.len.into(); | ||
145 | let len = len as usize; | ||
146 | let token_text = &text[offset..offset + len]; | ||
147 | offset += len; | ||
148 | write!(acc, "{:?} {} {:?}\n", token.kind, token.len, token_text).unwrap() | ||
149 | } | ||
150 | acc | ||
151 | } | ||