aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_syntax/tests/test.rs
diff options
context:
space:
mode:
Diffstat (limited to 'crates/ra_syntax/tests/test.rs')
-rw-r--r--crates/ra_syntax/tests/test.rs157
1 files changed, 34 insertions, 123 deletions
diff --git a/crates/ra_syntax/tests/test.rs b/crates/ra_syntax/tests/test.rs
index 4266864bd..2235dc401 100644
--- a/crates/ra_syntax/tests/test.rs
+++ b/crates/ra_syntax/tests/test.rs
@@ -1,14 +1,13 @@
1extern crate ra_syntax; 1extern crate ra_syntax;
2#[macro_use]
3extern crate test_utils; 2extern crate test_utils;
4extern crate walkdir; 3extern crate walkdir;
5 4
6use std::{ 5use std::{
7 fmt::Write, 6 fmt::Write,
8 fs, 7 path::{PathBuf, Component},
9 path::{Path, PathBuf, Component},
10}; 8};
11 9
10use test_utils::{project_dir, dir_tests, read_text, collect_tests};
12use ra_syntax::{ 11use ra_syntax::{
13 utils::{check_fuzz_invariants, dump_tree}, 12 utils::{check_fuzz_invariants, dump_tree},
14 SourceFileNode, 13 SourceFileNode,
@@ -16,7 +15,7 @@ use ra_syntax::{
16 15
17#[test] 16#[test]
18fn lexer_tests() { 17fn lexer_tests() {
19 dir_tests(&["lexer"], |text, _| { 18 dir_tests(&test_data_dir(), &["lexer"], |text, _| {
20 let tokens = ra_syntax::tokenize(text); 19 let tokens = ra_syntax::tokenize(text);
21 dump_tokens(&tokens, text) 20 dump_tokens(&tokens, text)
22 }) 21 })
@@ -24,33 +23,41 @@ fn lexer_tests() {
24 23
25#[test] 24#[test]
26fn parser_tests() { 25fn parser_tests() {
27 dir_tests(&["parser/inline/ok", "parser/ok"], |text, path| { 26 dir_tests(
28 let file = SourceFileNode::parse(text); 27 &test_data_dir(),
29 let errors = file.errors(); 28 &["parser/inline/ok", "parser/ok"],
30 assert_eq!( 29 |text, path| {
31 &*errors, 30 let file = SourceFileNode::parse(text);
32 &[] as &[ra_syntax::SyntaxError], 31 let errors = file.errors();
33 "There should be no errors in the file {:?}", 32 assert_eq!(
34 path.display() 33 &*errors,
35 ); 34 &[] as &[ra_syntax::SyntaxError],
36 dump_tree(file.syntax()) 35 "There should be no errors in the file {:?}",
37 }); 36 path.display()
38 dir_tests(&["parser/err", "parser/inline/err"], |text, path| { 37 );
39 let file = SourceFileNode::parse(text); 38 dump_tree(file.syntax())
40 let errors = file.errors(); 39 },
41 assert_ne!( 40 );
42 &*errors, 41 dir_tests(
43 &[] as &[ra_syntax::SyntaxError], 42 &test_data_dir(),
44 "There should be errors in the file {:?}", 43 &["parser/err", "parser/inline/err"],
45 path.display() 44 |text, path| {
46 ); 45 let file = SourceFileNode::parse(text);
47 dump_tree(file.syntax()) 46 let errors = file.errors();
48 }); 47 assert_ne!(
48 &*errors,
49 &[] as &[ra_syntax::SyntaxError],
50 "There should be errors in the file {:?}",
51 path.display()
52 );
53 dump_tree(file.syntax())
54 },
55 );
49} 56}
50 57
51#[test] 58#[test]
52fn parser_fuzz_tests() { 59fn parser_fuzz_tests() {
53 for (_, text) in collect_tests(&["parser/fuzz-failures"]) { 60 for (_, text) in collect_tests(&test_data_dir(), &["parser/fuzz-failures"]) {
54 check_fuzz_invariants(&text) 61 check_fuzz_invariants(&text)
55 } 62 }
56} 63}
@@ -92,102 +99,6 @@ fn self_hosting_parsing() {
92 "self_hosting_parsing found too few files - is it running in the right directory?" 99 "self_hosting_parsing found too few files - is it running in the right directory?"
93 ) 100 )
94} 101}
95/// Read file and normalize newlines.
96///
97/// `rustc` seems to always normalize `\r\n` newlines to `\n`:
98///
99/// ```
100/// let s = "
101/// ";
102/// assert_eq!(s.as_bytes(), &[10]);
103/// ```
104///
105/// so this should always be correct.
106fn read_text(path: &Path) -> String {
107 fs::read_to_string(path)
108 .expect(&format!("File at {:?} should be valid", path))
109 .replace("\r\n", "\n")
110}
111
112fn dir_tests<F>(paths: &[&str], f: F)
113where
114 F: Fn(&str, &Path) -> String,
115{
116 for (path, input_code) in collect_tests(paths) {
117 let parse_tree = f(&input_code, &path);
118 let path = path.with_extension("txt");
119 if !path.exists() {
120 println!("\nfile: {}", path.display());
121 println!("No .txt file with expected result, creating...\n");
122 println!("{}\n{}", input_code, parse_tree);
123 fs::write(&path, &parse_tree).unwrap();
124 panic!("No expected result")
125 }
126 let expected = read_text(&path);
127 let expected = expected.as_str();
128 let parse_tree = parse_tree.as_str();
129 assert_equal_text(expected, parse_tree, &path);
130 }
131}
132
133const REWRITE: bool = false;
134
135fn assert_equal_text(expected: &str, actual: &str, path: &Path) {
136 if expected == actual {
137 return;
138 }
139 let dir = project_dir();
140 let pretty_path = path.strip_prefix(&dir).unwrap_or_else(|_| path);
141 if expected.trim() == actual.trim() {
142 println!("whitespace difference, rewriting");
143 println!("file: {}\n", pretty_path.display());
144 fs::write(path, actual).unwrap();
145 return;
146 }
147 if REWRITE {
148 println!("rewriting {}", pretty_path.display());
149 fs::write(path, actual).unwrap();
150 return;
151 }
152 assert_eq_text!(expected, actual, "file: {}", pretty_path.display());
153}
154
155fn collect_tests(paths: &[&str]) -> Vec<(PathBuf, String)> {
156 paths
157 .iter()
158 .flat_map(|path| {
159 let path = test_data_dir().join(path);
160 test_from_dir(&path).into_iter()
161 })
162 .map(|path| {
163 let text = read_text(&path);
164 (path, text)
165 })
166 .collect()
167}
168
169fn test_from_dir(dir: &Path) -> Vec<PathBuf> {
170 let mut acc = Vec::new();
171 for file in fs::read_dir(&dir).unwrap() {
172 let file = file.unwrap();
173 let path = file.path();
174 if path.extension().unwrap_or_default() == "rs" {
175 acc.push(path);
176 }
177 }
178 acc.sort();
179 acc
180}
181
182fn project_dir() -> PathBuf {
183 let dir = env!("CARGO_MANIFEST_DIR");
184 PathBuf::from(dir)
185 .parent()
186 .unwrap()
187 .parent()
188 .unwrap()
189 .to_owned()
190}
191 102
192fn test_data_dir() -> PathBuf { 103fn test_data_dir() -> PathBuf {
193 project_dir().join("crates/ra_syntax/tests/data") 104 project_dir().join("crates/ra_syntax/tests/data")