diff options
Diffstat (limited to 'crates/ra_syntax/src')
-rw-r--r-- | crates/ra_syntax/src/lib.rs | 2 | ||||
-rw-r--r-- | crates/ra_syntax/src/tests.rs | 101 |
2 files changed, 103 insertions, 0 deletions
diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs index 7f69b86e1..4c4e0580a 100644 --- a/crates/ra_syntax/src/lib.rs +++ b/crates/ra_syntax/src/lib.rs | |||
@@ -24,6 +24,8 @@ mod syntax_error; | |||
24 | mod parsing; | 24 | mod parsing; |
25 | mod validation; | 25 | mod validation; |
26 | mod ptr; | 26 | mod ptr; |
27 | #[cfg(test)] | ||
28 | mod tests; | ||
27 | 29 | ||
28 | pub mod algo; | 30 | pub mod algo; |
29 | pub mod ast; | 31 | pub mod ast; |
diff --git a/crates/ra_syntax/src/tests.rs b/crates/ra_syntax/src/tests.rs new file mode 100644 index 000000000..fa5d2d5d8 --- /dev/null +++ b/crates/ra_syntax/src/tests.rs | |||
@@ -0,0 +1,101 @@ | |||
1 | use std::{ | ||
2 | fmt::Write, | ||
3 | path::{Component, PathBuf}, | ||
4 | }; | ||
5 | |||
6 | use test_utils::{collect_tests, dir_tests, project_dir, read_text}; | ||
7 | |||
8 | use crate::{fuzz, SourceFile}; | ||
9 | |||
10 | #[test] | ||
11 | fn lexer_tests() { | ||
12 | dir_tests(&test_data_dir(), &["lexer"], |text, _| { | ||
13 | let tokens = crate::tokenize(text); | ||
14 | dump_tokens(&tokens, text) | ||
15 | }) | ||
16 | } | ||
17 | |||
18 | #[test] | ||
19 | fn parser_tests() { | ||
20 | dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| { | ||
21 | let parse = SourceFile::parse(text); | ||
22 | let errors = parse.errors(); | ||
23 | assert_eq!( | ||
24 | errors, | ||
25 | &[] as &[crate::SyntaxError], | ||
26 | "There should be no errors in the file {:?}", | ||
27 | path.display(), | ||
28 | ); | ||
29 | parse.debug_dump() | ||
30 | }); | ||
31 | dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| { | ||
32 | let parse = SourceFile::parse(text); | ||
33 | let errors = parse.errors(); | ||
34 | assert!(!errors.is_empty(), "There should be errors in the file {:?}", path.display()); | ||
35 | parse.debug_dump() | ||
36 | }); | ||
37 | } | ||
38 | |||
39 | #[test] | ||
40 | fn parser_fuzz_tests() { | ||
41 | for (_, text) in collect_tests(&test_data_dir(), &["parser/fuzz-failures"]) { | ||
42 | fuzz::check_parser(&text) | ||
43 | } | ||
44 | } | ||
45 | |||
46 | #[test] | ||
47 | fn reparse_fuzz_tests() { | ||
48 | for (_, text) in collect_tests(&test_data_dir(), &["reparse/fuzz-failures"]) { | ||
49 | let check = fuzz::CheckReparse::from_data(text.as_bytes()).unwrap(); | ||
50 | println!("{:?}", check); | ||
51 | check.run(); | ||
52 | } | ||
53 | } | ||
54 | |||
55 | /// Test that Rust-analyzer can parse and validate the rust-analyzer | ||
56 | /// FIXME: Use this as a benchmark | ||
57 | #[test] | ||
58 | fn self_hosting_parsing() { | ||
59 | use std::ffi::OsStr; | ||
60 | let dir = project_dir().join("crates"); | ||
61 | let mut count = 0; | ||
62 | for entry in walkdir::WalkDir::new(dir) | ||
63 | .into_iter() | ||
64 | .filter_entry(|entry| { | ||
65 | !entry.path().components().any(|component| { | ||
66 | // Get all files which are not in the crates/ra_syntax/tests/data folder | ||
67 | component == Component::Normal(OsStr::new("test_data")) | ||
68 | }) | ||
69 | }) | ||
70 | .map(|e| e.unwrap()) | ||
71 | .filter(|entry| { | ||
72 | // Get all `.rs ` files | ||
73 | !entry.path().is_dir() && (entry.path().extension() == Some(OsStr::new("rs"))) | ||
74 | }) | ||
75 | { | ||
76 | count += 1; | ||
77 | let text = read_text(entry.path()); | ||
78 | SourceFile::parse(&text).ok().expect("There should be no errors in the file"); | ||
79 | } | ||
80 | assert!( | ||
81 | count > 30, | ||
82 | "self_hosting_parsing found too few files - is it running in the right directory?" | ||
83 | ) | ||
84 | } | ||
85 | |||
86 | fn test_data_dir() -> PathBuf { | ||
87 | project_dir().join("crates/ra_syntax/test_data") | ||
88 | } | ||
89 | |||
90 | fn dump_tokens(tokens: &[crate::Token], text: &str) -> String { | ||
91 | let mut acc = String::new(); | ||
92 | let mut offset = 0; | ||
93 | for token in tokens { | ||
94 | let len: u32 = token.len.into(); | ||
95 | let len = len as usize; | ||
96 | let token_text = &text[offset..offset + len]; | ||
97 | offset += len; | ||
98 | write!(acc, "{:?} {} {:?}\n", token.kind, token.len, token_text).unwrap() | ||
99 | } | ||
100 | acc | ||
101 | } | ||