aboutsummaryrefslogtreecommitdiff
path: root/crates/libsyntax2/tests/test
diff options
context:
space:
mode:
authorAleksey Kladov <[email protected]>2018-08-11 08:03:03 +0100
committerAleksey Kladov <[email protected]>2018-08-11 08:03:03 +0100
commitce898183b83eb2cf86c8c6ceeeac16ef68fdc802 (patch)
tree2fa5c6fb6ac70f6b24d2463c2f985005ca17260f /crates/libsyntax2/tests/test
parentb18d2882f4f05078abfcf0595d08c226860de6c3 (diff)
Simplify tests
Diffstat (limited to 'crates/libsyntax2/tests/test')
-rw-r--r--crates/libsyntax2/tests/test/main.rs143
1 files changed, 143 insertions, 0 deletions
diff --git a/crates/libsyntax2/tests/test/main.rs b/crates/libsyntax2/tests/test/main.rs
new file mode 100644
index 000000000..18e5bc4d4
--- /dev/null
+++ b/crates/libsyntax2/tests/test/main.rs
@@ -0,0 +1,143 @@
1extern crate libsyntax2;
2extern crate difference;
3
4use std::{
5 fs,
6 path::{Path, PathBuf},
7 fmt::Write,
8};
9
10use difference::Changeset;
11
12#[test]
13fn lexer_tests() {
14 dir_tests(&["lexer"], |text| {
15 let tokens = libsyntax2::tokenize(text);
16 dump_tokens(&tokens, text)
17 })
18}
19
20#[test]
21fn parser_tests() {
22 dir_tests(&["parser/inline", "parser/ok", "parser/err"], |text| {
23 let file = libsyntax2::parse(text);
24 libsyntax2::utils::dump_tree(&file)
25 })
26}
27
28
29/// Read file and normalize newlines.
30///
31/// `rustc` seems to always normalize `\r\n` newlines to `\n`:
32///
33/// ```
34/// let s = "
35/// ";
36/// assert_eq!(s.as_bytes(), &[10]);
37/// ```
38///
39/// so this should always be correct.
40fn read_text(path: &Path) -> String {
41 fs::read_to_string(path).unwrap().replace("\r\n", "\n")
42}
43
44pub fn dir_tests<F>(paths: &[&str], f: F)
45 where
46 F: Fn(&str) -> String,
47{
48 for path in collect_tests(paths) {
49 let input_code = read_text(&path);
50 let parse_tree = f(&input_code);
51 let path = path.with_extension("txt");
52 if !path.exists() {
53 println!("\nfile: {}", path.display());
54 println!("No .txt file with expected result, creating...\n");
55 println!("{}\n{}", input_code, parse_tree);
56 fs::write(&path, parse_tree).unwrap();
57 panic!("No expected result")
58 }
59 let expected = read_text(&path);
60 let expected = expected.as_str();
61 let parse_tree = parse_tree.as_str();
62 assert_equal_text(expected, parse_tree, &path);
63 }
64}
65
66fn assert_equal_text(expected: &str, actual: &str, path: &Path) {
67 if expected != actual {
68 print_difference(expected, actual, path)
69 }
70}
71
72fn collect_tests(paths: &[&str]) -> Vec<PathBuf> {
73 paths
74 .iter()
75 .flat_map(|path| {
76 let path = test_data_dir().join(path);
77 test_from_dir(&path).into_iter()
78 })
79 .collect()
80}
81
82fn test_from_dir(dir: &Path) -> Vec<PathBuf> {
83 let mut acc = Vec::new();
84 for file in fs::read_dir(&dir).unwrap() {
85 let file = file.unwrap();
86 let path = file.path();
87 if path.extension().unwrap_or_default() == "rs" {
88 acc.push(path);
89 }
90 }
91 acc.sort();
92 acc
93}
94
95const REWRITE: bool = false;
96
97fn print_difference(expected: &str, actual: &str, path: &Path) {
98 let dir = project_dir();
99 let path = path.strip_prefix(&dir).unwrap_or_else(|_| path);
100 if expected.trim() == actual.trim() {
101 println!("whitespace difference, rewriting");
102 println!("file: {}\n", path.display());
103 fs::write(path, actual).unwrap();
104 return;
105 }
106 if REWRITE {
107 println!("rewriting {}", path.display());
108 fs::write(path, actual).unwrap();
109 return;
110 }
111 let changeset = Changeset::new(actual, expected, "\n");
112 println!("Expected:\n{}\n\nActual:\n{}\n", expected, actual);
113 print!("{}", changeset);
114 println!("file: {}\n", path.display());
115 panic!("Comparison failed")
116}
117
118fn project_dir() -> PathBuf {
119 let dir = env!("CARGO_MANIFEST_DIR");
120 PathBuf::from(dir)
121 .parent()
122 .unwrap()
123 .parent()
124 .unwrap()
125 .to_owned()
126}
127
128fn test_data_dir() -> PathBuf {
129 project_dir().join("crates/libsyntax2/tests/data")
130}
131
132fn dump_tokens(tokens: &[libsyntax2::Token], text: &str) -> String {
133 let mut acc = String::new();
134 let mut offset = 0;
135 for token in tokens {
136 let len: u32 = token.len.into();
137 let len = len as usize;
138 let token_text = &text[offset..offset + len];
139 offset += len;
140 write!(acc, "{:?} {} {:?}\n", token.kind, token.len, token_text).unwrap()
141 }
142 acc
143}