diff options
-rw-r--r-- | crates/ra_analysis/src/imp.rs | 14 | ||||
-rw-r--r-- | crates/ra_analysis/src/lib.rs | 3 | ||||
-rw-r--r-- | crates/ra_hir/src/mock.rs | 9 | ||||
-rw-r--r-- | crates/ra_hir/src/ty.rs | 30 | ||||
-rw-r--r-- | crates/ra_hir/src/ty/tests.rs | 62 | ||||
-rw-r--r-- | crates/ra_hir/src/ty/tests/data/0001_basics.rs | 11 | ||||
-rw-r--r-- | crates/ra_hir/src/ty/tests/data/0001_basics.txt | 13 | ||||
-rw-r--r-- | crates/ra_syntax/tests/test.rs | 105 | ||||
-rw-r--r-- | crates/test_utils/src/lib.rs | 99 |
9 files changed, 222 insertions, 124 deletions
diff --git a/crates/ra_analysis/src/imp.rs b/crates/ra_analysis/src/imp.rs index b01382808..4e0631679 100644 --- a/crates/ra_analysis/src/imp.rs +++ b/crates/ra_analysis/src/imp.rs | |||
@@ -5,7 +5,8 @@ use std::{ | |||
5 | 5 | ||
6 | use ra_editor::{self, find_node_at_offset, FileSymbol, LineIndex, LocalEdit}; | 6 | use ra_editor::{self, find_node_at_offset, FileSymbol, LineIndex, LocalEdit}; |
7 | use ra_syntax::{ | 7 | use ra_syntax::{ |
8 | ast::{self, ArgListOwner, Expr, NameOwner}, | 8 | ast::{self, ArgListOwner, Expr, NameOwner, FnDef}, |
9 | algo::find_covering_node, | ||
9 | AstNode, SourceFileNode, | 10 | AstNode, SourceFileNode, |
10 | SyntaxKind::*, | 11 | SyntaxKind::*, |
11 | SyntaxNodeRef, TextRange, TextUnit, | 12 | SyntaxNodeRef, TextRange, TextUnit, |
@@ -510,6 +511,17 @@ impl AnalysisImpl { | |||
510 | Ok(None) | 511 | Ok(None) |
511 | } | 512 | } |
512 | 513 | ||
514 | pub fn type_of(&self, file_id: FileId, range: TextRange) -> Cancelable<Option<String>> { | ||
515 | let file = self.db.source_file(file_id); | ||
516 | let syntax = file.syntax(); | ||
517 | let node = find_covering_node(syntax, range); | ||
518 | let parent_fn = node.ancestors().filter_map(FnDef::cast).next(); | ||
519 | let parent_fn = if let Some(p) = parent_fn { p } else { return Ok(None) }; | ||
520 | let function = ctry!(source_binder::function_from_source(&*self.db, file_id, parent_fn)?); | ||
521 | let infer = function.infer(&*self.db); | ||
522 | Ok(infer.type_of_node(node).map(|t| t.to_string())) | ||
523 | } | ||
524 | |||
513 | fn index_resolve(&self, name_ref: ast::NameRef) -> Cancelable<Vec<(FileId, FileSymbol)>> { | 525 | fn index_resolve(&self, name_ref: ast::NameRef) -> Cancelable<Vec<(FileId, FileSymbol)>> { |
514 | let name = name_ref.text(); | 526 | let name = name_ref.text(); |
515 | let mut query = Query::new(name.to_string()); | 527 | let mut query = Query::new(name.to_string()); |
diff --git a/crates/ra_analysis/src/lib.rs b/crates/ra_analysis/src/lib.rs index 85df9c089..830898140 100644 --- a/crates/ra_analysis/src/lib.rs +++ b/crates/ra_analysis/src/lib.rs | |||
@@ -366,6 +366,9 @@ impl Analysis { | |||
366 | ) -> Cancelable<Option<(FnSignatureInfo, Option<usize>)>> { | 366 | ) -> Cancelable<Option<(FnSignatureInfo, Option<usize>)>> { |
367 | self.imp.resolve_callable(position) | 367 | self.imp.resolve_callable(position) |
368 | } | 368 | } |
369 | pub fn type_of(&self, file_id: FileId, range: TextRange) -> Cancelable<Option<String>> { | ||
370 | self.imp.type_of(file_id, range) | ||
371 | } | ||
369 | } | 372 | } |
370 | 373 | ||
371 | pub struct LibraryData { | 374 | pub struct LibraryData { |
diff --git a/crates/ra_hir/src/mock.rs b/crates/ra_hir/src/mock.rs index a9fa540d5..3020ee793 100644 --- a/crates/ra_hir/src/mock.rs +++ b/crates/ra_hir/src/mock.rs | |||
@@ -24,6 +24,15 @@ impl MockDatabase { | |||
24 | (db, source_root) | 24 | (db, source_root) |
25 | } | 25 | } |
26 | 26 | ||
27 | pub(crate) fn with_single_file(text: &str) -> (MockDatabase, SourceRoot, FileId) { | ||
28 | let mut db = MockDatabase::default(); | ||
29 | let mut source_root = SourceRoot::default(); | ||
30 | let file_id = db.add_file(&mut source_root, "/main.rs", text); | ||
31 | db.query_mut(ra_db::SourceRootQuery) | ||
32 | .set(WORKSPACE, Arc::new(source_root.clone())); | ||
33 | (db, source_root, file_id) | ||
34 | } | ||
35 | |||
27 | pub(crate) fn with_position(fixture: &str) -> (MockDatabase, FilePosition) { | 36 | pub(crate) fn with_position(fixture: &str) -> (MockDatabase, FilePosition) { |
28 | let (db, _, position) = MockDatabase::from_fixture(fixture); | 37 | let (db, _, position) = MockDatabase::from_fixture(fixture); |
29 | let position = position.expect("expected a marker ( <|> )"); | 38 | let position = position.expect("expected a marker ( <|> )"); |
diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs index 087385b98..66b204dcd 100644 --- a/crates/ra_hir/src/ty.rs +++ b/crates/ra_hir/src/ty.rs | |||
@@ -6,6 +6,7 @@ use rustc_hash::{FxHashMap, FxHashSet}; | |||
6 | 6 | ||
7 | use std::sync::Arc; | 7 | use std::sync::Arc; |
8 | use std::collections::HashMap; | 8 | use std::collections::HashMap; |
9 | use std::fmt; | ||
9 | 10 | ||
10 | use ra_db::LocalSyntaxPtr; | 11 | use ra_db::LocalSyntaxPtr; |
11 | use ra_syntax::{ | 12 | use ra_syntax::{ |
@@ -184,11 +185,40 @@ impl Ty { | |||
184 | } | 185 | } |
185 | } | 186 | } |
186 | 187 | ||
188 | impl fmt::Display for Ty { | ||
189 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
190 | match self { | ||
191 | Ty::Bool => write!(f, "bool"), | ||
192 | Ty::Char => write!(f, "char"), | ||
193 | Ty::Int(t) => write!(f, "{}", t.ty_to_string()), | ||
194 | Ty::Uint(t) => write!(f, "{}", t.ty_to_string()), | ||
195 | Ty::Float(t) => write!(f, "{}", t.ty_to_string()), | ||
196 | Ty::Str => write!(f, "str"), | ||
197 | Ty::Slice(t) => write!(f, "[{}]", t), | ||
198 | Ty::Never => write!(f, "!"), | ||
199 | Ty::Tuple(ts) => { | ||
200 | write!(f, "(")?; | ||
201 | for t in ts { | ||
202 | write!(f, "{},", t)?; | ||
203 | } | ||
204 | write!(f, ")") | ||
205 | } | ||
206 | Ty::Unknown => write!(f, "[unknown]") | ||
207 | } | ||
208 | } | ||
209 | } | ||
210 | |||
187 | #[derive(Clone, PartialEq, Eq, Debug)] | 211 | #[derive(Clone, PartialEq, Eq, Debug)] |
188 | pub struct InferenceResult { | 212 | pub struct InferenceResult { |
189 | type_for: FxHashMap<LocalSyntaxPtr, Ty>, | 213 | type_for: FxHashMap<LocalSyntaxPtr, Ty>, |
190 | } | 214 | } |
191 | 215 | ||
216 | impl InferenceResult { | ||
217 | pub fn type_of_node(&self, node: SyntaxNodeRef) -> Option<Ty> { | ||
218 | self.type_for.get(&LocalSyntaxPtr::new(node)).cloned() | ||
219 | } | ||
220 | } | ||
221 | |||
192 | #[derive(Clone, PartialEq, Eq, Debug)] | 222 | #[derive(Clone, PartialEq, Eq, Debug)] |
193 | pub struct InferenceContext { | 223 | pub struct InferenceContext { |
194 | scopes: Arc<FnScopes>, | 224 | scopes: Arc<FnScopes>, |
diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs index f2466dd51..98eedaa3f 100644 --- a/crates/ra_hir/src/ty/tests.rs +++ b/crates/ra_hir/src/ty/tests.rs | |||
@@ -1,8 +1,11 @@ | |||
1 | use std::fmt::Write; | ||
1 | use std::sync::Arc; | 2 | use std::sync::Arc; |
3 | use std::path::{Path, PathBuf}; | ||
2 | 4 | ||
3 | use salsa::Database; | 5 | use salsa::Database; |
4 | use ra_db::{FilesDatabase, CrateGraph, SyntaxDatabase}; | 6 | use ra_db::{FilesDatabase, CrateGraph, SyntaxDatabase}; |
5 | use ra_syntax::{SmolStr, algo::visit::{visitor, Visitor}, ast::{self, AstNode}}; | 7 | use ra_syntax::{SmolStr, algo::visit::{visitor, Visitor}, ast::{self, AstNode}}; |
8 | use test_utils::{project_dir, dir_tests}; | ||
6 | use relative_path::RelativePath; | 9 | use relative_path::RelativePath; |
7 | 10 | ||
8 | use crate::{source_binder, mock::WORKSPACE, module::ModuleSourceNode}; | 11 | use crate::{source_binder, mock::WORKSPACE, module::ModuleSourceNode}; |
@@ -13,33 +16,46 @@ use crate::{ | |||
13 | mock::MockDatabase, | 16 | mock::MockDatabase, |
14 | }; | 17 | }; |
15 | 18 | ||
16 | fn infer_all_fns(fixture: &str) -> () { | 19 | fn infer_file(content: &str) -> String { |
17 | let (db, source_root) = MockDatabase::with_files(fixture); | 20 | let (db, source_root, file_id) = MockDatabase::with_single_file(content); |
18 | for &file_id in source_root.files.values() { | 21 | let source_file = db.source_file(file_id); |
19 | let source_file = db.source_file(file_id); | 22 | let mut acc = String::new(); |
20 | for fn_def in source_file.syntax().descendants().filter_map(ast::FnDef::cast) { | 23 | for fn_def in source_file.syntax().descendants().filter_map(ast::FnDef::cast) { |
21 | let func = source_binder::function_from_source(&db, file_id, fn_def).unwrap().unwrap(); | 24 | let func = source_binder::function_from_source(&db, file_id, fn_def).unwrap().unwrap(); |
22 | let inference_result = func.infer(&db); | 25 | let inference_result = func.infer(&db); |
23 | for (syntax_ptr, ty) in &inference_result.type_for { | 26 | for (syntax_ptr, ty) in &inference_result.type_for { |
24 | let node = syntax_ptr.resolve(&source_file); | 27 | let node = syntax_ptr.resolve(&source_file); |
25 | eprintln!("{} '{}': {:?}", syntax_ptr.range(), node.text(), ty); | 28 | write!(acc, "{} '{}': {}\n", syntax_ptr.range(), ellipsize(node.text().to_string().replace("\n", " "), 15), ty); |
26 | } | ||
27 | } | 29 | } |
28 | } | 30 | } |
31 | acc | ||
32 | } | ||
33 | |||
34 | fn ellipsize(mut text: String, max_len: usize) -> String { | ||
35 | if text.len() <= max_len { | ||
36 | return text; | ||
37 | } | ||
38 | let ellipsis = "..."; | ||
39 | let e_len = ellipsis.len(); | ||
40 | let mut prefix_len = (max_len - e_len) / 2; | ||
41 | while !text.is_char_boundary(prefix_len) { | ||
42 | prefix_len += 1; | ||
43 | } | ||
44 | let mut suffix_len = max_len - e_len - prefix_len; | ||
45 | while !text.is_char_boundary(text.len() - suffix_len) { | ||
46 | suffix_len += 1; | ||
47 | } | ||
48 | text.replace_range(prefix_len..text.len() - suffix_len, ellipsis); | ||
49 | text | ||
29 | } | 50 | } |
30 | 51 | ||
31 | #[test] | 52 | #[test] |
32 | fn infer_smoke_test() { | 53 | pub fn infer_tests() { |
33 | let text = " | 54 | dir_tests(&test_data_dir(), &["."], |text, _path| { |
34 | //- /lib.rs | 55 | infer_file(text) |
35 | fn foo(x: u32, y: !) -> i128 { | 56 | }); |
36 | x; | 57 | } |
37 | y; | ||
38 | return 1; | ||
39 | \"hello\"; | ||
40 | 0 | ||
41 | } | ||
42 | "; | ||
43 | 58 | ||
44 | infer_all_fns(text); | 59 | fn test_data_dir() -> PathBuf { |
60 | project_dir().join("crates/ra_hir/src/ty/tests/data") | ||
45 | } | 61 | } |
diff --git a/crates/ra_hir/src/ty/tests/data/0001_basics.rs b/crates/ra_hir/src/ty/tests/data/0001_basics.rs new file mode 100644 index 000000000..59a60d031 --- /dev/null +++ b/crates/ra_hir/src/ty/tests/data/0001_basics.rs | |||
@@ -0,0 +1,11 @@ | |||
1 | |||
2 | fn test(a: u32, b: isize, c: !, d: &str) { | ||
3 | a; | ||
4 | b; | ||
5 | c; | ||
6 | d; | ||
7 | 1usize; | ||
8 | 1isize; | ||
9 | "test"; | ||
10 | 1.0f32; | ||
11 | } | ||
diff --git a/crates/ra_hir/src/ty/tests/data/0001_basics.txt b/crates/ra_hir/src/ty/tests/data/0001_basics.txt new file mode 100644 index 000000000..0c46f243a --- /dev/null +++ b/crates/ra_hir/src/ty/tests/data/0001_basics.txt | |||
@@ -0,0 +1,13 @@ | |||
1 | [33; 34) 'd': [unknown] | ||
2 | [88; 94) '1isize': [unknown] | ||
3 | [48; 49) 'a': u32 | ||
4 | [55; 56) 'b': isize | ||
5 | [112; 118) '1.0f32': [unknown] | ||
6 | [76; 82) '1usize': [unknown] | ||
7 | [9; 10) 'a': u32 | ||
8 | [27; 28) 'c': ! | ||
9 | [62; 63) 'c': ! | ||
10 | [17; 18) 'b': isize | ||
11 | [100; 106) '"test"': [unknown] | ||
12 | [42; 121) '{ ...f32; }': () | ||
13 | [69; 70) 'd': [unknown] | ||
diff --git a/crates/ra_syntax/tests/test.rs b/crates/ra_syntax/tests/test.rs index 4266864bd..9d94a1a23 100644 --- a/crates/ra_syntax/tests/test.rs +++ b/crates/ra_syntax/tests/test.rs | |||
@@ -9,6 +9,7 @@ use std::{ | |||
9 | path::{Path, PathBuf, Component}, | 9 | path::{Path, PathBuf, Component}, |
10 | }; | 10 | }; |
11 | 11 | ||
12 | use test_utils::{project_dir, dir_tests, read_text, collect_tests}; | ||
12 | use ra_syntax::{ | 13 | use ra_syntax::{ |
13 | utils::{check_fuzz_invariants, dump_tree}, | 14 | utils::{check_fuzz_invariants, dump_tree}, |
14 | SourceFileNode, | 15 | SourceFileNode, |
@@ -16,7 +17,7 @@ use ra_syntax::{ | |||
16 | 17 | ||
17 | #[test] | 18 | #[test] |
18 | fn lexer_tests() { | 19 | fn lexer_tests() { |
19 | dir_tests(&["lexer"], |text, _| { | 20 | dir_tests(&test_data_dir(), &["lexer"], |text, _| { |
20 | let tokens = ra_syntax::tokenize(text); | 21 | let tokens = ra_syntax::tokenize(text); |
21 | dump_tokens(&tokens, text) | 22 | dump_tokens(&tokens, text) |
22 | }) | 23 | }) |
@@ -24,7 +25,7 @@ fn lexer_tests() { | |||
24 | 25 | ||
25 | #[test] | 26 | #[test] |
26 | fn parser_tests() { | 27 | fn parser_tests() { |
27 | dir_tests(&["parser/inline/ok", "parser/ok"], |text, path| { | 28 | dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| { |
28 | let file = SourceFileNode::parse(text); | 29 | let file = SourceFileNode::parse(text); |
29 | let errors = file.errors(); | 30 | let errors = file.errors(); |
30 | assert_eq!( | 31 | assert_eq!( |
@@ -35,7 +36,7 @@ fn parser_tests() { | |||
35 | ); | 36 | ); |
36 | dump_tree(file.syntax()) | 37 | dump_tree(file.syntax()) |
37 | }); | 38 | }); |
38 | dir_tests(&["parser/err", "parser/inline/err"], |text, path| { | 39 | dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| { |
39 | let file = SourceFileNode::parse(text); | 40 | let file = SourceFileNode::parse(text); |
40 | let errors = file.errors(); | 41 | let errors = file.errors(); |
41 | assert_ne!( | 42 | assert_ne!( |
@@ -50,7 +51,7 @@ fn parser_tests() { | |||
50 | 51 | ||
51 | #[test] | 52 | #[test] |
52 | fn parser_fuzz_tests() { | 53 | fn parser_fuzz_tests() { |
53 | for (_, text) in collect_tests(&["parser/fuzz-failures"]) { | 54 | for (_, text) in collect_tests(&test_data_dir(), &["parser/fuzz-failures"]) { |
54 | check_fuzz_invariants(&text) | 55 | check_fuzz_invariants(&text) |
55 | } | 56 | } |
56 | } | 57 | } |
@@ -92,102 +93,6 @@ fn self_hosting_parsing() { | |||
92 | "self_hosting_parsing found too few files - is it running in the right directory?" | 93 | "self_hosting_parsing found too few files - is it running in the right directory?" |
93 | ) | 94 | ) |
94 | } | 95 | } |
95 | /// Read file and normalize newlines. | ||
96 | /// | ||
97 | /// `rustc` seems to always normalize `\r\n` newlines to `\n`: | ||
98 | /// | ||
99 | /// ``` | ||
100 | /// let s = " | ||
101 | /// "; | ||
102 | /// assert_eq!(s.as_bytes(), &[10]); | ||
103 | /// ``` | ||
104 | /// | ||
105 | /// so this should always be correct. | ||
106 | fn read_text(path: &Path) -> String { | ||
107 | fs::read_to_string(path) | ||
108 | .expect(&format!("File at {:?} should be valid", path)) | ||
109 | .replace("\r\n", "\n") | ||
110 | } | ||
111 | |||
112 | fn dir_tests<F>(paths: &[&str], f: F) | ||
113 | where | ||
114 | F: Fn(&str, &Path) -> String, | ||
115 | { | ||
116 | for (path, input_code) in collect_tests(paths) { | ||
117 | let parse_tree = f(&input_code, &path); | ||
118 | let path = path.with_extension("txt"); | ||
119 | if !path.exists() { | ||
120 | println!("\nfile: {}", path.display()); | ||
121 | println!("No .txt file with expected result, creating...\n"); | ||
122 | println!("{}\n{}", input_code, parse_tree); | ||
123 | fs::write(&path, &parse_tree).unwrap(); | ||
124 | panic!("No expected result") | ||
125 | } | ||
126 | let expected = read_text(&path); | ||
127 | let expected = expected.as_str(); | ||
128 | let parse_tree = parse_tree.as_str(); | ||
129 | assert_equal_text(expected, parse_tree, &path); | ||
130 | } | ||
131 | } | ||
132 | |||
133 | const REWRITE: bool = false; | ||
134 | |||
135 | fn assert_equal_text(expected: &str, actual: &str, path: &Path) { | ||
136 | if expected == actual { | ||
137 | return; | ||
138 | } | ||
139 | let dir = project_dir(); | ||
140 | let pretty_path = path.strip_prefix(&dir).unwrap_or_else(|_| path); | ||
141 | if expected.trim() == actual.trim() { | ||
142 | println!("whitespace difference, rewriting"); | ||
143 | println!("file: {}\n", pretty_path.display()); | ||
144 | fs::write(path, actual).unwrap(); | ||
145 | return; | ||
146 | } | ||
147 | if REWRITE { | ||
148 | println!("rewriting {}", pretty_path.display()); | ||
149 | fs::write(path, actual).unwrap(); | ||
150 | return; | ||
151 | } | ||
152 | assert_eq_text!(expected, actual, "file: {}", pretty_path.display()); | ||
153 | } | ||
154 | |||
155 | fn collect_tests(paths: &[&str]) -> Vec<(PathBuf, String)> { | ||
156 | paths | ||
157 | .iter() | ||
158 | .flat_map(|path| { | ||
159 | let path = test_data_dir().join(path); | ||
160 | test_from_dir(&path).into_iter() | ||
161 | }) | ||
162 | .map(|path| { | ||
163 | let text = read_text(&path); | ||
164 | (path, text) | ||
165 | }) | ||
166 | .collect() | ||
167 | } | ||
168 | |||
169 | fn test_from_dir(dir: &Path) -> Vec<PathBuf> { | ||
170 | let mut acc = Vec::new(); | ||
171 | for file in fs::read_dir(&dir).unwrap() { | ||
172 | let file = file.unwrap(); | ||
173 | let path = file.path(); | ||
174 | if path.extension().unwrap_or_default() == "rs" { | ||
175 | acc.push(path); | ||
176 | } | ||
177 | } | ||
178 | acc.sort(); | ||
179 | acc | ||
180 | } | ||
181 | |||
182 | fn project_dir() -> PathBuf { | ||
183 | let dir = env!("CARGO_MANIFEST_DIR"); | ||
184 | PathBuf::from(dir) | ||
185 | .parent() | ||
186 | .unwrap() | ||
187 | .parent() | ||
188 | .unwrap() | ||
189 | .to_owned() | ||
190 | } | ||
191 | 96 | ||
192 | fn test_data_dir() -> PathBuf { | 97 | fn test_data_dir() -> PathBuf { |
193 | project_dir().join("crates/ra_syntax/tests/data") | 98 | project_dir().join("crates/ra_syntax/tests/data") |
diff --git a/crates/test_utils/src/lib.rs b/crates/test_utils/src/lib.rs index beb936c61..012b1d0b4 100644 --- a/crates/test_utils/src/lib.rs +++ b/crates/test_utils/src/lib.rs | |||
@@ -1,4 +1,6 @@ | |||
1 | use std::fmt; | 1 | use std::fmt; |
2 | use std::fs; | ||
3 | use std::path::{Path, PathBuf}; | ||
2 | 4 | ||
3 | use itertools::Itertools; | 5 | use itertools::Itertools; |
4 | use text_unit::{TextRange, TextUnit}; | 6 | use text_unit::{TextRange, TextUnit}; |
@@ -262,3 +264,100 @@ pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a | |||
262 | _ => Some((expected, actual)), | 264 | _ => Some((expected, actual)), |
263 | } | 265 | } |
264 | } | 266 | } |
267 | |||
268 | pub fn dir_tests<F>(test_data_dir: &Path, paths: &[&str], f: F) | ||
269 | where | ||
270 | F: Fn(&str, &Path) -> String, | ||
271 | { | ||
272 | for (path, input_code) in collect_tests(test_data_dir, paths) { | ||
273 | let parse_tree = f(&input_code, &path); | ||
274 | let path = path.with_extension("txt"); | ||
275 | if !path.exists() { | ||
276 | println!("\nfile: {}", path.display()); | ||
277 | println!("No .txt file with expected result, creating...\n"); | ||
278 | println!("{}\n{}", input_code, parse_tree); | ||
279 | fs::write(&path, &parse_tree).unwrap(); | ||
280 | panic!("No expected result") | ||
281 | } | ||
282 | let expected = read_text(&path); | ||
283 | let expected = expected.as_str(); | ||
284 | let parse_tree = parse_tree.as_str(); | ||
285 | assert_equal_text(expected, parse_tree, &path); | ||
286 | } | ||
287 | } | ||
288 | |||
289 | pub fn collect_tests(test_data_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, String)> { | ||
290 | paths | ||
291 | .iter() | ||
292 | .flat_map(|path| { | ||
293 | let path = test_data_dir.to_owned().join(path); | ||
294 | test_from_dir(&path).into_iter() | ||
295 | }) | ||
296 | .map(|path| { | ||
297 | let text = read_text(&path); | ||
298 | (path, text) | ||
299 | }) | ||
300 | .collect() | ||
301 | } | ||
302 | |||
303 | fn test_from_dir(dir: &Path) -> Vec<PathBuf> { | ||
304 | let mut acc = Vec::new(); | ||
305 | for file in fs::read_dir(&dir).unwrap() { | ||
306 | let file = file.unwrap(); | ||
307 | let path = file.path(); | ||
308 | if path.extension().unwrap_or_default() == "rs" { | ||
309 | acc.push(path); | ||
310 | } | ||
311 | } | ||
312 | acc.sort(); | ||
313 | acc | ||
314 | } | ||
315 | |||
316 | pub fn project_dir() -> PathBuf { | ||
317 | let dir = env!("CARGO_MANIFEST_DIR"); | ||
318 | PathBuf::from(dir) | ||
319 | .parent() | ||
320 | .unwrap() | ||
321 | .parent() | ||
322 | .unwrap() | ||
323 | .to_owned() | ||
324 | } | ||
325 | |||
326 | /// Read file and normalize newlines. | ||
327 | /// | ||
328 | /// `rustc` seems to always normalize `\r\n` newlines to `\n`: | ||
329 | /// | ||
330 | /// ``` | ||
331 | /// let s = " | ||
332 | /// "; | ||
333 | /// assert_eq!(s.as_bytes(), &[10]); | ||
334 | /// ``` | ||
335 | /// | ||
336 | /// so this should always be correct. | ||
337 | pub fn read_text(path: &Path) -> String { | ||
338 | fs::read_to_string(path) | ||
339 | .expect(&format!("File at {:?} should be valid", path)) | ||
340 | .replace("\r\n", "\n") | ||
341 | } | ||
342 | |||
343 | const REWRITE: bool = false; | ||
344 | |||
345 | fn assert_equal_text(expected: &str, actual: &str, path: &Path) { | ||
346 | if expected == actual { | ||
347 | return; | ||
348 | } | ||
349 | let dir = project_dir(); | ||
350 | let pretty_path = path.strip_prefix(&dir).unwrap_or_else(|_| path); | ||
351 | if expected.trim() == actual.trim() { | ||
352 | println!("whitespace difference, rewriting"); | ||
353 | println!("file: {}\n", pretty_path.display()); | ||
354 | fs::write(path, actual).unwrap(); | ||
355 | return; | ||
356 | } | ||
357 | if REWRITE { | ||
358 | println!("rewriting {}", pretty_path.display()); | ||
359 | fs::write(path, actual).unwrap(); | ||
360 | return; | ||
361 | } | ||
362 | assert_eq_text!(expected, actual, "file: {}", pretty_path.display()); | ||
363 | } | ||