diff options
author | Dmitry <[email protected]> | 2020-08-14 19:32:05 +0100 |
---|---|---|
committer | Dmitry <[email protected]> | 2020-08-14 19:32:05 +0100 |
commit | 178c3e135a2a249692f7784712492e7884ae0c00 (patch) | |
tree | ac6b769dbf7162150caa0c1624786a4dd79ff3be /crates/syntax/src/tests.rs | |
parent | 06ff8e6c760ff05f10e868b5d1f9d79e42fbb49c (diff) | |
parent | c2594daf2974dbd4ce3d9b7ec72481764abaceb5 (diff) |
Merge remote-tracking branch 'origin/master'
Diffstat (limited to 'crates/syntax/src/tests.rs')
-rw-r--r-- | crates/syntax/src/tests.rs | 280 |
1 files changed, 280 insertions, 0 deletions
diff --git a/crates/syntax/src/tests.rs b/crates/syntax/src/tests.rs new file mode 100644 index 000000000..ddc718369 --- /dev/null +++ b/crates/syntax/src/tests.rs | |||
@@ -0,0 +1,280 @@ | |||
1 | use std::{ | ||
2 | fmt::Write, | ||
3 | fs, | ||
4 | path::{Path, PathBuf}, | ||
5 | }; | ||
6 | |||
7 | use expect::expect_file; | ||
8 | use rayon::prelude::*; | ||
9 | use test_utils::project_dir; | ||
10 | |||
11 | use crate::{fuzz, tokenize, SourceFile, SyntaxError, TextRange, TextSize, Token}; | ||
12 | |||
13 | #[test] | ||
14 | fn lexer_tests() { | ||
15 | // FIXME: | ||
16 | // * Add tests for unicode escapes in byte-character and [raw]-byte-string literals | ||
17 | // * Add tests for unescape errors | ||
18 | |||
19 | dir_tests(&test_data_dir(), &["lexer/ok"], "txt", |text, path| { | ||
20 | let (tokens, errors) = tokenize(text); | ||
21 | assert_errors_are_absent(&errors, path); | ||
22 | dump_tokens_and_errors(&tokens, &errors, text) | ||
23 | }); | ||
24 | dir_tests(&test_data_dir(), &["lexer/err"], "txt", |text, path| { | ||
25 | let (tokens, errors) = tokenize(text); | ||
26 | assert_errors_are_present(&errors, path); | ||
27 | dump_tokens_and_errors(&tokens, &errors, text) | ||
28 | }); | ||
29 | } | ||
30 | |||
31 | #[test] | ||
32 | fn parse_smoke_test() { | ||
33 | let code = r##" | ||
34 | fn main() { | ||
35 | println!("Hello, world!") | ||
36 | } | ||
37 | "##; | ||
38 | |||
39 | let parse = SourceFile::parse(code); | ||
40 | // eprintln!("{:#?}", parse.syntax_node()); | ||
41 | assert!(parse.ok().is_ok()); | ||
42 | } | ||
43 | |||
44 | #[test] | ||
45 | fn parser_tests() { | ||
46 | dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], "rast", |text, path| { | ||
47 | let parse = SourceFile::parse(text); | ||
48 | let errors = parse.errors(); | ||
49 | assert_errors_are_absent(&errors, path); | ||
50 | parse.debug_dump() | ||
51 | }); | ||
52 | dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], "rast", |text, path| { | ||
53 | let parse = SourceFile::parse(text); | ||
54 | let errors = parse.errors(); | ||
55 | assert_errors_are_present(&errors, path); | ||
56 | parse.debug_dump() | ||
57 | }); | ||
58 | } | ||
59 | |||
60 | #[test] | ||
61 | fn expr_parser_tests() { | ||
62 | fragment_parser_dir_test( | ||
63 | &["parser/fragments/expr/ok"], | ||
64 | &["parser/fragments/expr/err"], | ||
65 | crate::ast::Expr::parse, | ||
66 | ); | ||
67 | } | ||
68 | |||
69 | #[test] | ||
70 | fn path_parser_tests() { | ||
71 | fragment_parser_dir_test( | ||
72 | &["parser/fragments/path/ok"], | ||
73 | &["parser/fragments/path/err"], | ||
74 | crate::ast::Path::parse, | ||
75 | ); | ||
76 | } | ||
77 | |||
78 | #[test] | ||
79 | fn pattern_parser_tests() { | ||
80 | fragment_parser_dir_test( | ||
81 | &["parser/fragments/pattern/ok"], | ||
82 | &["parser/fragments/pattern/err"], | ||
83 | crate::ast::Pat::parse, | ||
84 | ); | ||
85 | } | ||
86 | |||
87 | #[test] | ||
88 | fn item_parser_tests() { | ||
89 | fragment_parser_dir_test( | ||
90 | &["parser/fragments/item/ok"], | ||
91 | &["parser/fragments/item/err"], | ||
92 | crate::ast::Item::parse, | ||
93 | ); | ||
94 | } | ||
95 | |||
96 | #[test] | ||
97 | fn type_parser_tests() { | ||
98 | fragment_parser_dir_test( | ||
99 | &["parser/fragments/type/ok"], | ||
100 | &["parser/fragments/type/err"], | ||
101 | crate::ast::Type::parse, | ||
102 | ); | ||
103 | } | ||
104 | |||
105 | #[test] | ||
106 | fn parser_fuzz_tests() { | ||
107 | for (_, text) in collect_rust_files(&test_data_dir(), &["parser/fuzz-failures"]) { | ||
108 | fuzz::check_parser(&text) | ||
109 | } | ||
110 | } | ||
111 | |||
112 | #[test] | ||
113 | fn reparse_fuzz_tests() { | ||
114 | for (_, text) in collect_rust_files(&test_data_dir(), &["reparse/fuzz-failures"]) { | ||
115 | let check = fuzz::CheckReparse::from_data(text.as_bytes()).unwrap(); | ||
116 | println!("{:?}", check); | ||
117 | check.run(); | ||
118 | } | ||
119 | } | ||
120 | |||
121 | /// Test that Rust-analyzer can parse and validate the rust-analyzer | ||
122 | /// FIXME: Use this as a benchmark | ||
123 | #[test] | ||
124 | fn self_hosting_parsing() { | ||
125 | let dir = project_dir().join("crates"); | ||
126 | let files = walkdir::WalkDir::new(dir) | ||
127 | .into_iter() | ||
128 | .filter_entry(|entry| { | ||
129 | // Get all files which are not in the crates/syntax/test_data folder | ||
130 | !entry.path().components().any(|component| component.as_os_str() == "test_data") | ||
131 | }) | ||
132 | .map(|e| e.unwrap()) | ||
133 | .filter(|entry| { | ||
134 | // Get all `.rs ` files | ||
135 | !entry.path().is_dir() && (entry.path().extension().unwrap_or_default() == "rs") | ||
136 | }) | ||
137 | .map(|entry| entry.into_path()) | ||
138 | .collect::<Vec<_>>(); | ||
139 | assert!( | ||
140 | files.len() > 100, | ||
141 | "self_hosting_parsing found too few files - is it running in the right directory?" | ||
142 | ); | ||
143 | |||
144 | let errors = files | ||
145 | .into_par_iter() | ||
146 | .filter_map(|file| { | ||
147 | let text = read_text(&file); | ||
148 | match SourceFile::parse(&text).ok() { | ||
149 | Ok(_) => None, | ||
150 | Err(err) => Some((file, err)), | ||
151 | } | ||
152 | }) | ||
153 | .collect::<Vec<_>>(); | ||
154 | |||
155 | if !errors.is_empty() { | ||
156 | let errors = errors | ||
157 | .into_iter() | ||
158 | .map(|(path, err)| format!("{}: {:?}\n", path.display(), err)) | ||
159 | .collect::<String>(); | ||
160 | panic!("Parsing errors:\n{}\n", errors); | ||
161 | } | ||
162 | } | ||
163 | |||
164 | fn test_data_dir() -> PathBuf { | ||
165 | project_dir().join("crates/syntax/test_data") | ||
166 | } | ||
167 | |||
168 | fn assert_errors_are_present(errors: &[SyntaxError], path: &Path) { | ||
169 | assert!(!errors.is_empty(), "There should be errors in the file {:?}", path.display()); | ||
170 | } | ||
171 | fn assert_errors_are_absent(errors: &[SyntaxError], path: &Path) { | ||
172 | assert_eq!( | ||
173 | errors, | ||
174 | &[] as &[SyntaxError], | ||
175 | "There should be no errors in the file {:?}", | ||
176 | path.display(), | ||
177 | ); | ||
178 | } | ||
179 | |||
180 | fn dump_tokens_and_errors(tokens: &[Token], errors: &[SyntaxError], text: &str) -> String { | ||
181 | let mut acc = String::new(); | ||
182 | let mut offset: TextSize = 0.into(); | ||
183 | for token in tokens { | ||
184 | let token_len = token.len; | ||
185 | let token_text = &text[TextRange::at(offset, token.len)]; | ||
186 | offset += token.len; | ||
187 | writeln!(acc, "{:?} {:?} {:?}", token.kind, token_len, token_text).unwrap(); | ||
188 | } | ||
189 | for err in errors { | ||
190 | writeln!(acc, "> error{:?} token({:?}) msg({})", err.range(), &text[err.range()], err) | ||
191 | .unwrap(); | ||
192 | } | ||
193 | acc | ||
194 | } | ||
195 | |||
196 | fn fragment_parser_dir_test<T, F>(ok_paths: &[&str], err_paths: &[&str], f: F) | ||
197 | where | ||
198 | T: crate::AstNode, | ||
199 | F: Fn(&str) -> Result<T, ()>, | ||
200 | { | ||
201 | dir_tests(&test_data_dir(), ok_paths, "rast", |text, path| { | ||
202 | if let Ok(node) = f(text) { | ||
203 | format!("{:#?}", crate::ast::AstNode::syntax(&node)) | ||
204 | } else { | ||
205 | panic!("Failed to parse '{:?}'", path); | ||
206 | } | ||
207 | }); | ||
208 | dir_tests(&test_data_dir(), err_paths, "rast", |text, path| { | ||
209 | if let Ok(_) = f(text) { | ||
210 | panic!("'{:?}' successfully parsed when it should have errored", path); | ||
211 | } else { | ||
212 | "ERROR\n".to_owned() | ||
213 | } | ||
214 | }); | ||
215 | } | ||
216 | |||
217 | /// Calls callback `f` with input code and file paths for each `.rs` file in `test_data_dir` | ||
218 | /// subdirectories defined by `paths`. | ||
219 | /// | ||
220 | /// If the content of the matching output file differs from the output of `f()` | ||
221 | /// the test will fail. | ||
222 | /// | ||
223 | /// If there is no matching output file it will be created and filled with the | ||
224 | /// output of `f()`, but the test will fail. | ||
225 | fn dir_tests<F>(test_data_dir: &Path, paths: &[&str], outfile_extension: &str, f: F) | ||
226 | where | ||
227 | F: Fn(&str, &Path) -> String, | ||
228 | { | ||
229 | for (path, input_code) in collect_rust_files(test_data_dir, paths) { | ||
230 | let actual = f(&input_code, &path); | ||
231 | let path = path.with_extension(outfile_extension); | ||
232 | expect_file![path].assert_eq(&actual) | ||
233 | } | ||
234 | } | ||
235 | |||
236 | /// Collects all `.rs` files from `dir` subdirectories defined by `paths`. | ||
237 | fn collect_rust_files(root_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, String)> { | ||
238 | paths | ||
239 | .iter() | ||
240 | .flat_map(|path| { | ||
241 | let path = root_dir.to_owned().join(path); | ||
242 | rust_files_in_dir(&path).into_iter() | ||
243 | }) | ||
244 | .map(|path| { | ||
245 | let text = read_text(&path); | ||
246 | (path, text) | ||
247 | }) | ||
248 | .collect() | ||
249 | } | ||
250 | |||
251 | /// Collects paths to all `.rs` files from `dir` in a sorted `Vec<PathBuf>`. | ||
252 | fn rust_files_in_dir(dir: &Path) -> Vec<PathBuf> { | ||
253 | let mut acc = Vec::new(); | ||
254 | for file in fs::read_dir(&dir).unwrap() { | ||
255 | let file = file.unwrap(); | ||
256 | let path = file.path(); | ||
257 | if path.extension().unwrap_or_default() == "rs" { | ||
258 | acc.push(path); | ||
259 | } | ||
260 | } | ||
261 | acc.sort(); | ||
262 | acc | ||
263 | } | ||
264 | |||
265 | /// Read file and normalize newlines. | ||
266 | /// | ||
267 | /// `rustc` seems to always normalize `\r\n` newlines to `\n`: | ||
268 | /// | ||
269 | /// ``` | ||
270 | /// let s = " | ||
271 | /// "; | ||
272 | /// assert_eq!(s.as_bytes(), &[10]); | ||
273 | /// ``` | ||
274 | /// | ||
275 | /// so this should always be correct. | ||
276 | fn read_text(path: &Path) -> String { | ||
277 | fs::read_to_string(path) | ||
278 | .unwrap_or_else(|_| panic!("File at {:?} should be valid", path)) | ||
279 | .replace("\r\n", "\n") | ||
280 | } | ||