aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_syntax
diff options
context:
space:
mode:
authorFlorian Diebold <[email protected]>2018-12-23 11:05:54 +0000
committerFlorian Diebold <[email protected]>2018-12-23 12:48:04 +0000
commit7348f7883fa2bd571fff036c82e98c102d05c362 (patch)
treee7882097498b6d85e631d570dac0d8a89cd24875 /crates/ra_syntax
parent3899898d75176ce3cd87f9e2acecd7e3a987dda5 (diff)
Add testing infrastructure for type inference
- move dir_tests to test_utils for that.
Diffstat (limited to 'crates/ra_syntax')
-rw-r--r--crates/ra_syntax/tests/test.rs105
1 files changed, 5 insertions, 100 deletions
diff --git a/crates/ra_syntax/tests/test.rs b/crates/ra_syntax/tests/test.rs
index 4266864bd..9d94a1a23 100644
--- a/crates/ra_syntax/tests/test.rs
+++ b/crates/ra_syntax/tests/test.rs
@@ -9,6 +9,7 @@ use std::{
9 path::{Path, PathBuf, Component}, 9 path::{Path, PathBuf, Component},
10}; 10};
11 11
12use test_utils::{project_dir, dir_tests, read_text, collect_tests};
12use ra_syntax::{ 13use ra_syntax::{
13 utils::{check_fuzz_invariants, dump_tree}, 14 utils::{check_fuzz_invariants, dump_tree},
14 SourceFileNode, 15 SourceFileNode,
@@ -16,7 +17,7 @@ use ra_syntax::{
16 17
17#[test] 18#[test]
18fn lexer_tests() { 19fn lexer_tests() {
19 dir_tests(&["lexer"], |text, _| { 20 dir_tests(&test_data_dir(), &["lexer"], |text, _| {
20 let tokens = ra_syntax::tokenize(text); 21 let tokens = ra_syntax::tokenize(text);
21 dump_tokens(&tokens, text) 22 dump_tokens(&tokens, text)
22 }) 23 })
@@ -24,7 +25,7 @@ fn lexer_tests() {
24 25
25#[test] 26#[test]
26fn parser_tests() { 27fn parser_tests() {
27 dir_tests(&["parser/inline/ok", "parser/ok"], |text, path| { 28 dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| {
28 let file = SourceFileNode::parse(text); 29 let file = SourceFileNode::parse(text);
29 let errors = file.errors(); 30 let errors = file.errors();
30 assert_eq!( 31 assert_eq!(
@@ -35,7 +36,7 @@ fn parser_tests() {
35 ); 36 );
36 dump_tree(file.syntax()) 37 dump_tree(file.syntax())
37 }); 38 });
38 dir_tests(&["parser/err", "parser/inline/err"], |text, path| { 39 dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| {
39 let file = SourceFileNode::parse(text); 40 let file = SourceFileNode::parse(text);
40 let errors = file.errors(); 41 let errors = file.errors();
41 assert_ne!( 42 assert_ne!(
@@ -50,7 +51,7 @@ fn parser_tests() {
50 51
51#[test] 52#[test]
52fn parser_fuzz_tests() { 53fn parser_fuzz_tests() {
53 for (_, text) in collect_tests(&["parser/fuzz-failures"]) { 54 for (_, text) in collect_tests(&test_data_dir(), &["parser/fuzz-failures"]) {
54 check_fuzz_invariants(&text) 55 check_fuzz_invariants(&text)
55 } 56 }
56} 57}
@@ -92,102 +93,6 @@ fn self_hosting_parsing() {
92 "self_hosting_parsing found too few files - is it running in the right directory?" 93 "self_hosting_parsing found too few files - is it running in the right directory?"
93 ) 94 )
94} 95}
95/// Read file and normalize newlines.
96///
97/// `rustc` seems to always normalize `\r\n` newlines to `\n`:
98///
99/// ```
100/// let s = "
101/// ";
102/// assert_eq!(s.as_bytes(), &[10]);
103/// ```
104///
105/// so this should always be correct.
106fn read_text(path: &Path) -> String {
107 fs::read_to_string(path)
108 .expect(&format!("File at {:?} should be valid", path))
109 .replace("\r\n", "\n")
110}
111
112fn dir_tests<F>(paths: &[&str], f: F)
113where
114 F: Fn(&str, &Path) -> String,
115{
116 for (path, input_code) in collect_tests(paths) {
117 let parse_tree = f(&input_code, &path);
118 let path = path.with_extension("txt");
119 if !path.exists() {
120 println!("\nfile: {}", path.display());
121 println!("No .txt file with expected result, creating...\n");
122 println!("{}\n{}", input_code, parse_tree);
123 fs::write(&path, &parse_tree).unwrap();
124 panic!("No expected result")
125 }
126 let expected = read_text(&path);
127 let expected = expected.as_str();
128 let parse_tree = parse_tree.as_str();
129 assert_equal_text(expected, parse_tree, &path);
130 }
131}
132
133const REWRITE: bool = false;
134
135fn assert_equal_text(expected: &str, actual: &str, path: &Path) {
136 if expected == actual {
137 return;
138 }
139 let dir = project_dir();
140 let pretty_path = path.strip_prefix(&dir).unwrap_or_else(|_| path);
141 if expected.trim() == actual.trim() {
142 println!("whitespace difference, rewriting");
143 println!("file: {}\n", pretty_path.display());
144 fs::write(path, actual).unwrap();
145 return;
146 }
147 if REWRITE {
148 println!("rewriting {}", pretty_path.display());
149 fs::write(path, actual).unwrap();
150 return;
151 }
152 assert_eq_text!(expected, actual, "file: {}", pretty_path.display());
153}
154
155fn collect_tests(paths: &[&str]) -> Vec<(PathBuf, String)> {
156 paths
157 .iter()
158 .flat_map(|path| {
159 let path = test_data_dir().join(path);
160 test_from_dir(&path).into_iter()
161 })
162 .map(|path| {
163 let text = read_text(&path);
164 (path, text)
165 })
166 .collect()
167}
168
169fn test_from_dir(dir: &Path) -> Vec<PathBuf> {
170 let mut acc = Vec::new();
171 for file in fs::read_dir(&dir).unwrap() {
172 let file = file.unwrap();
173 let path = file.path();
174 if path.extension().unwrap_or_default() == "rs" {
175 acc.push(path);
176 }
177 }
178 acc.sort();
179 acc
180}
181
182fn project_dir() -> PathBuf {
183 let dir = env!("CARGO_MANIFEST_DIR");
184 PathBuf::from(dir)
185 .parent()
186 .unwrap()
187 .parent()
188 .unwrap()
189 .to_owned()
190}
191 96
192fn test_data_dir() -> PathBuf { 97fn test_data_dir() -> PathBuf {
193 project_dir().join("crates/ra_syntax/tests/data") 98 project_dir().join("crates/ra_syntax/tests/data")