diff options
Diffstat (limited to 'crates/test_utils')
-rw-r--r-- | crates/test_utils/Cargo.toml | 1 | ||||
-rw-r--r-- | crates/test_utils/src/fixture.rs | 10 | ||||
-rw-r--r-- | crates/test_utils/src/lib.rs | 221 | ||||
-rw-r--r-- | crates/test_utils/src/mark.rs | 4 |
4 files changed, 112 insertions, 124 deletions
diff --git a/crates/test_utils/Cargo.toml b/crates/test_utils/Cargo.toml index 6821db1e8..e719f4f7c 100644 --- a/crates/test_utils/Cargo.toml +++ b/crates/test_utils/Cargo.toml | |||
@@ -3,6 +3,7 @@ edition = "2018" | |||
3 | name = "test_utils" | 3 | name = "test_utils" |
4 | version = "0.1.0" | 4 | version = "0.1.0" |
5 | authors = ["rust-analyzer developers"] | 5 | authors = ["rust-analyzer developers"] |
6 | license = "MIT OR Apache-2.0" | ||
6 | 7 | ||
7 | [lib] | 8 | [lib] |
8 | doctest = false | 9 | doctest = false |
diff --git a/crates/test_utils/src/fixture.rs b/crates/test_utils/src/fixture.rs index fad8f7e2c..e40b61a94 100644 --- a/crates/test_utils/src/fixture.rs +++ b/crates/test_utils/src/fixture.rs | |||
@@ -2,7 +2,7 @@ | |||
2 | //! rust-analyzer database from a single string. | 2 | //! rust-analyzer database from a single string. |
3 | 3 | ||
4 | use rustc_hash::FxHashMap; | 4 | use rustc_hash::FxHashMap; |
5 | use stdx::{lines_with_ends, split_delim, trim_indent}; | 5 | use stdx::{lines_with_ends, split_once, trim_indent}; |
6 | 6 | ||
7 | #[derive(Debug, Eq, PartialEq)] | 7 | #[derive(Debug, Eq, PartialEq)] |
8 | pub struct Fixture { | 8 | pub struct Fixture { |
@@ -62,7 +62,7 @@ impl Fixture { | |||
62 | let components = meta.split_ascii_whitespace().collect::<Vec<_>>(); | 62 | let components = meta.split_ascii_whitespace().collect::<Vec<_>>(); |
63 | 63 | ||
64 | let path = components[0].to_string(); | 64 | let path = components[0].to_string(); |
65 | assert!(path.starts_with("/")); | 65 | assert!(path.starts_with('/')); |
66 | 66 | ||
67 | let mut krate = None; | 67 | let mut krate = None; |
68 | let mut deps = Vec::new(); | 68 | let mut deps = Vec::new(); |
@@ -71,14 +71,14 @@ impl Fixture { | |||
71 | let mut cfg_key_values = Vec::new(); | 71 | let mut cfg_key_values = Vec::new(); |
72 | let mut env = FxHashMap::default(); | 72 | let mut env = FxHashMap::default(); |
73 | for component in components[1..].iter() { | 73 | for component in components[1..].iter() { |
74 | let (key, value) = split_delim(component, ':').unwrap(); | 74 | let (key, value) = split_once(component, ':').unwrap(); |
75 | match key { | 75 | match key { |
76 | "crate" => krate = Some(value.to_string()), | 76 | "crate" => krate = Some(value.to_string()), |
77 | "deps" => deps = value.split(',').map(|it| it.to_string()).collect(), | 77 | "deps" => deps = value.split(',').map(|it| it.to_string()).collect(), |
78 | "edition" => edition = Some(value.to_string()), | 78 | "edition" => edition = Some(value.to_string()), |
79 | "cfg" => { | 79 | "cfg" => { |
80 | for entry in value.split(',') { | 80 | for entry in value.split(',') { |
81 | match split_delim(entry, '=') { | 81 | match split_once(entry, '=') { |
82 | Some((k, v)) => cfg_key_values.push((k.to_string(), v.to_string())), | 82 | Some((k, v)) => cfg_key_values.push((k.to_string(), v.to_string())), |
83 | None => cfg_atoms.push(entry.to_string()), | 83 | None => cfg_atoms.push(entry.to_string()), |
84 | } | 84 | } |
@@ -86,7 +86,7 @@ impl Fixture { | |||
86 | } | 86 | } |
87 | "env" => { | 87 | "env" => { |
88 | for key in value.split(',') { | 88 | for key in value.split(',') { |
89 | if let Some((k, v)) = split_delim(key, '=') { | 89 | if let Some((k, v)) = split_once(key, '=') { |
90 | env.insert(k.into(), v.into()); | 90 | env.insert(k.into(), v.into()); |
91 | } | 91 | } |
92 | } | 92 | } |
diff --git a/crates/test_utils/src/lib.rs b/crates/test_utils/src/lib.rs index caf847273..ad586c882 100644 --- a/crates/test_utils/src/lib.rs +++ b/crates/test_utils/src/lib.rs | |||
@@ -11,8 +11,9 @@ pub mod mark; | |||
11 | mod fixture; | 11 | mod fixture; |
12 | 12 | ||
13 | use std::{ | 13 | use std::{ |
14 | convert::{TryFrom, TryInto}, | ||
14 | env, fs, | 15 | env, fs, |
15 | path::{Path, PathBuf}, | 16 | path::PathBuf, |
16 | }; | 17 | }; |
17 | 18 | ||
18 | use serde_json::Value; | 19 | use serde_json::Value; |
@@ -117,8 +118,8 @@ pub fn extract_range_or_offset(text: &str) -> (RangeOrOffset, String) { | |||
117 | } | 118 | } |
118 | 119 | ||
119 | /// Extracts ranges, marked with `<tag> </tag>` pairs from the `text` | 120 | /// Extracts ranges, marked with `<tag> </tag>` pairs from the `text` |
120 | pub fn extract_ranges(mut text: &str, tag: &str) -> (Vec<TextRange>, String) { | 121 | pub fn extract_tags(mut text: &str, tag: &str) -> (Vec<(TextRange, Option<String>)>, String) { |
121 | let open = format!("<{}>", tag); | 122 | let open = format!("<{}", tag); |
122 | let close = format!("</{}>", tag); | 123 | let close = format!("</{}>", tag); |
123 | let mut ranges = Vec::new(); | 124 | let mut ranges = Vec::new(); |
124 | let mut res = String::new(); | 125 | let mut res = String::new(); |
@@ -133,22 +134,35 @@ pub fn extract_ranges(mut text: &str, tag: &str) -> (Vec<TextRange>, String) { | |||
133 | res.push_str(&text[..i]); | 134 | res.push_str(&text[..i]); |
134 | text = &text[i..]; | 135 | text = &text[i..]; |
135 | if text.starts_with(&open) { | 136 | if text.starts_with(&open) { |
136 | text = &text[open.len()..]; | 137 | let close_open = text.find('>').unwrap(); |
138 | let attr = text[open.len()..close_open].trim(); | ||
139 | let attr = if attr.is_empty() { None } else { Some(attr.to_string()) }; | ||
140 | text = &text[close_open + '>'.len_utf8()..]; | ||
137 | let from = TextSize::of(&res); | 141 | let from = TextSize::of(&res); |
138 | stack.push(from); | 142 | stack.push((from, attr)); |
139 | } else if text.starts_with(&close) { | 143 | } else if text.starts_with(&close) { |
140 | text = &text[close.len()..]; | 144 | text = &text[close.len()..]; |
141 | let from = stack.pop().unwrap_or_else(|| panic!("unmatched </{}>", tag)); | 145 | let (from, attr) = |
146 | stack.pop().unwrap_or_else(|| panic!("unmatched </{}>", tag)); | ||
142 | let to = TextSize::of(&res); | 147 | let to = TextSize::of(&res); |
143 | ranges.push(TextRange::new(from, to)); | 148 | ranges.push((TextRange::new(from, to), attr)); |
149 | } else { | ||
150 | res.push('<'); | ||
151 | text = &text['<'.len_utf8()..]; | ||
144 | } | 152 | } |
145 | } | 153 | } |
146 | } | 154 | } |
147 | } | 155 | } |
148 | assert!(stack.is_empty(), "unmatched <{}>", tag); | 156 | assert!(stack.is_empty(), "unmatched <{}>", tag); |
149 | ranges.sort_by_key(|r| (r.start(), r.end())); | 157 | ranges.sort_by_key(|r| (r.0.start(), r.0.end())); |
150 | (ranges, res) | 158 | (ranges, res) |
151 | } | 159 | } |
160 | #[test] | ||
161 | fn test_extract_tags() { | ||
162 | let (tags, text) = extract_tags(r#"<tag fn>fn <tag>main</tag>() {}</tag>"#, "tag"); | ||
163 | let actual = tags.into_iter().map(|(range, attr)| (&text[range], attr)).collect::<Vec<_>>(); | ||
164 | assert_eq!(actual, vec![("fn main() {}", Some("fn".into())), ("main", None),]); | ||
165 | } | ||
152 | 166 | ||
153 | /// Inserts `<|>` marker into the `text` at `offset`. | 167 | /// Inserts `<|>` marker into the `text` at `offset`. |
154 | pub fn add_cursor(text: &str, offset: TextSize) -> String { | 168 | pub fn add_cursor(text: &str, offset: TextSize) -> String { |
@@ -165,15 +179,80 @@ pub fn extract_annotations(text: &str) -> Vec<(TextRange, String)> { | |||
165 | let mut res = Vec::new(); | 179 | let mut res = Vec::new(); |
166 | let mut prev_line_start: Option<TextSize> = None; | 180 | let mut prev_line_start: Option<TextSize> = None; |
167 | let mut line_start: TextSize = 0.into(); | 181 | let mut line_start: TextSize = 0.into(); |
182 | let mut prev_line_annotations: Vec<(TextSize, usize)> = Vec::new(); | ||
168 | for line in lines_with_ends(text) { | 183 | for line in lines_with_ends(text) { |
169 | if let Some(idx) = line.find("//^") { | 184 | let mut this_line_annotations = Vec::new(); |
170 | let offset = prev_line_start.unwrap() + TextSize::of(&line[..idx + "//".len()]); | 185 | if let Some(idx) = line.find("//") { |
171 | let data = line[idx + "//^".len()..].trim().to_string(); | 186 | let annotation_offset = TextSize::of(&line[..idx + "//".len()]); |
172 | res.push((TextRange::at(offset, 1.into()), data)) | 187 | for annotation in extract_line_annotations(&line[idx + "//".len()..]) { |
188 | match annotation { | ||
189 | LineAnnotation::Annotation { mut range, content } => { | ||
190 | range += annotation_offset; | ||
191 | this_line_annotations.push((range.end(), res.len())); | ||
192 | res.push((range + prev_line_start.unwrap(), content)) | ||
193 | } | ||
194 | LineAnnotation::Continuation { mut offset, content } => { | ||
195 | offset += annotation_offset; | ||
196 | let &(_, idx) = prev_line_annotations | ||
197 | .iter() | ||
198 | .find(|&&(off, _idx)| off == offset) | ||
199 | .unwrap(); | ||
200 | res[idx].1.push('\n'); | ||
201 | res[idx].1.push_str(&content); | ||
202 | res[idx].1.push('\n'); | ||
203 | } | ||
204 | } | ||
205 | } | ||
173 | } | 206 | } |
207 | |||
174 | prev_line_start = Some(line_start); | 208 | prev_line_start = Some(line_start); |
175 | line_start += TextSize::of(line); | 209 | line_start += TextSize::of(line); |
210 | |||
211 | prev_line_annotations = this_line_annotations; | ||
212 | } | ||
213 | res | ||
214 | } | ||
215 | |||
216 | enum LineAnnotation { | ||
217 | Annotation { range: TextRange, content: String }, | ||
218 | Continuation { offset: TextSize, content: String }, | ||
219 | } | ||
220 | |||
221 | fn extract_line_annotations(mut line: &str) -> Vec<LineAnnotation> { | ||
222 | let mut res = Vec::new(); | ||
223 | let mut offset: TextSize = 0.into(); | ||
224 | let marker: fn(char) -> bool = if line.contains('^') { |c| c == '^' } else { |c| c == '|' }; | ||
225 | loop { | ||
226 | match line.find(marker) { | ||
227 | Some(idx) => { | ||
228 | offset += TextSize::try_from(idx).unwrap(); | ||
229 | line = &line[idx..]; | ||
230 | } | ||
231 | None => break, | ||
232 | }; | ||
233 | |||
234 | let mut len = line.chars().take_while(|&it| it == '^').count(); | ||
235 | let mut continuation = false; | ||
236 | if len == 0 { | ||
237 | assert!(line.starts_with('|')); | ||
238 | continuation = true; | ||
239 | len = 1; | ||
240 | } | ||
241 | let range = TextRange::at(offset, len.try_into().unwrap()); | ||
242 | let next = line[len..].find(marker).map_or(line.len(), |it| it + len); | ||
243 | let content = line[len..][..next - len].trim().to_string(); | ||
244 | |||
245 | let annotation = if continuation { | ||
246 | LineAnnotation::Continuation { offset: range.end(), content } | ||
247 | } else { | ||
248 | LineAnnotation::Annotation { range, content } | ||
249 | }; | ||
250 | res.push(annotation); | ||
251 | |||
252 | line = &line[next..]; | ||
253 | offset += TextSize::try_from(next).unwrap(); | ||
176 | } | 254 | } |
255 | |||
177 | res | 256 | res |
178 | } | 257 | } |
179 | 258 | ||
@@ -182,17 +261,21 @@ fn test_extract_annotations() { | |||
182 | let text = stdx::trim_indent( | 261 | let text = stdx::trim_indent( |
183 | r#" | 262 | r#" |
184 | fn main() { | 263 | fn main() { |
185 | let x = 92; | 264 | let (x, y) = (9, 2); |
186 | //^ def | 265 | //^ def ^ def |
187 | z + 1 | 266 | zoo + 1 |
188 | } //^ i32 | 267 | } //^^^ type: |
268 | // | i32 | ||
189 | "#, | 269 | "#, |
190 | ); | 270 | ); |
191 | let res = extract_annotations(&text) | 271 | let res = extract_annotations(&text) |
192 | .into_iter() | 272 | .into_iter() |
193 | .map(|(range, ann)| (&text[range], ann)) | 273 | .map(|(range, ann)| (&text[range], ann)) |
194 | .collect::<Vec<_>>(); | 274 | .collect::<Vec<_>>(); |
195 | assert_eq!(res, vec![("x", "def".into()), ("z", "i32".into()),]); | 275 | assert_eq!( |
276 | res, | ||
277 | vec![("x", "def".into()), ("y", "def".into()), ("zoo", "type:\ni32\n".into()),] | ||
278 | ); | ||
196 | } | 279 | } |
197 | 280 | ||
198 | // Comparison functionality borrowed from cargo: | 281 | // Comparison functionality borrowed from cargo: |
@@ -282,85 +365,6 @@ pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a | |||
282 | } | 365 | } |
283 | } | 366 | } |
284 | 367 | ||
285 | /// Calls callback `f` with input code and file paths for each `.rs` file in `test_data_dir` | ||
286 | /// subdirectories defined by `paths`. | ||
287 | /// | ||
288 | /// If the content of the matching output file differs from the output of `f()` | ||
289 | /// the test will fail. | ||
290 | /// | ||
291 | /// If there is no matching output file it will be created and filled with the | ||
292 | /// output of `f()`, but the test will fail. | ||
293 | pub fn dir_tests<F>(test_data_dir: &Path, paths: &[&str], outfile_extension: &str, f: F) | ||
294 | where | ||
295 | F: Fn(&str, &Path) -> String, | ||
296 | { | ||
297 | for (path, input_code) in collect_rust_files(test_data_dir, paths) { | ||
298 | let actual = f(&input_code, &path); | ||
299 | let path = path.with_extension(outfile_extension); | ||
300 | if !path.exists() { | ||
301 | println!("\nfile: {}", path.display()); | ||
302 | println!("No .txt file with expected result, creating...\n"); | ||
303 | println!("{}\n{}", input_code, actual); | ||
304 | fs::write(&path, &actual).unwrap(); | ||
305 | panic!("No expected result"); | ||
306 | } | ||
307 | let expected = read_text(&path); | ||
308 | assert_equal_text(&expected, &actual, &path); | ||
309 | } | ||
310 | } | ||
311 | |||
312 | /// Collects all `.rs` files from `dir` subdirectories defined by `paths`. | ||
313 | pub fn collect_rust_files(root_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, String)> { | ||
314 | paths | ||
315 | .iter() | ||
316 | .flat_map(|path| { | ||
317 | let path = root_dir.to_owned().join(path); | ||
318 | rust_files_in_dir(&path).into_iter() | ||
319 | }) | ||
320 | .map(|path| { | ||
321 | let text = read_text(&path); | ||
322 | (path, text) | ||
323 | }) | ||
324 | .collect() | ||
325 | } | ||
326 | |||
327 | /// Collects paths to all `.rs` files from `dir` in a sorted `Vec<PathBuf>`. | ||
328 | fn rust_files_in_dir(dir: &Path) -> Vec<PathBuf> { | ||
329 | let mut acc = Vec::new(); | ||
330 | for file in fs::read_dir(&dir).unwrap() { | ||
331 | let file = file.unwrap(); | ||
332 | let path = file.path(); | ||
333 | if path.extension().unwrap_or_default() == "rs" { | ||
334 | acc.push(path); | ||
335 | } | ||
336 | } | ||
337 | acc.sort(); | ||
338 | acc | ||
339 | } | ||
340 | |||
341 | /// Returns the path to the root directory of `rust-analyzer` project. | ||
342 | pub fn project_dir() -> PathBuf { | ||
343 | let dir = env!("CARGO_MANIFEST_DIR"); | ||
344 | PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned() | ||
345 | } | ||
346 | |||
347 | /// Read file and normalize newlines. | ||
348 | /// | ||
349 | /// `rustc` seems to always normalize `\r\n` newlines to `\n`: | ||
350 | /// | ||
351 | /// ``` | ||
352 | /// let s = " | ||
353 | /// "; | ||
354 | /// assert_eq!(s.as_bytes(), &[10]); | ||
355 | /// ``` | ||
356 | /// | ||
357 | /// so this should always be correct. | ||
358 | pub fn read_text(path: &Path) -> String { | ||
359 | fs::read_to_string(path) | ||
360 | .unwrap_or_else(|_| panic!("File at {:?} should be valid", path)) | ||
361 | .replace("\r\n", "\n") | ||
362 | } | ||
363 | |||
364 | /// Returns `false` if slow tests should not run, otherwise returns `true` and | 368 | /// Returns `false` if slow tests should not run, otherwise returns `true` and |
365 | /// also creates a file at `./target/.slow_tests_cookie` which serves as a flag | 369 | /// also creates a file at `./target/.slow_tests_cookie` which serves as a flag |
366 | /// that slow tests did run. | 370 | /// that slow tests did run. |
@@ -375,25 +379,8 @@ pub fn skip_slow_tests() -> bool { | |||
375 | should_skip | 379 | should_skip |
376 | } | 380 | } |
377 | 381 | ||
378 | /// Asserts that `expected` and `actual` strings are equal. If they differ only | 382 | /// Returns the path to the root directory of `rust-analyzer` project. |
379 | /// in trailing or leading whitespace the test won't fail and | 383 | pub fn project_dir() -> PathBuf { |
380 | /// the contents of `actual` will be written to the file located at `path`. | 384 | let dir = env!("CARGO_MANIFEST_DIR"); |
381 | fn assert_equal_text(expected: &str, actual: &str, path: &Path) { | 385 | PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned() |
382 | if expected == actual { | ||
383 | return; | ||
384 | } | ||
385 | let dir = project_dir(); | ||
386 | let pretty_path = path.strip_prefix(&dir).unwrap_or_else(|_| path); | ||
387 | if expected.trim() == actual.trim() { | ||
388 | println!("whitespace difference, rewriting"); | ||
389 | println!("file: {}\n", pretty_path.display()); | ||
390 | fs::write(path, actual).unwrap(); | ||
391 | return; | ||
392 | } | ||
393 | if env::var("UPDATE_EXPECTATIONS").is_ok() { | ||
394 | println!("rewriting {}", pretty_path.display()); | ||
395 | fs::write(path, actual).unwrap(); | ||
396 | return; | ||
397 | } | ||
398 | assert_eq_text!(expected, actual, "file: {}", pretty_path.display()); | ||
399 | } | 386 | } |
diff --git a/crates/test_utils/src/mark.rs b/crates/test_utils/src/mark.rs index 7c309a894..97f5a93ad 100644 --- a/crates/test_utils/src/mark.rs +++ b/crates/test_utils/src/mark.rs | |||
@@ -62,7 +62,7 @@ pub struct MarkChecker { | |||
62 | 62 | ||
63 | impl MarkChecker { | 63 | impl MarkChecker { |
64 | pub fn new(mark: &'static AtomicUsize) -> MarkChecker { | 64 | pub fn new(mark: &'static AtomicUsize) -> MarkChecker { |
65 | let value_on_entry = mark.load(Ordering::SeqCst); | 65 | let value_on_entry = mark.load(Ordering::Relaxed); |
66 | MarkChecker { mark, value_on_entry } | 66 | MarkChecker { mark, value_on_entry } |
67 | } | 67 | } |
68 | } | 68 | } |
@@ -72,7 +72,7 @@ impl Drop for MarkChecker { | |||
72 | if std::thread::panicking() { | 72 | if std::thread::panicking() { |
73 | return; | 73 | return; |
74 | } | 74 | } |
75 | let value_on_exit = self.mark.load(Ordering::SeqCst); | 75 | let value_on_exit = self.mark.load(Ordering::Relaxed); |
76 | assert!(value_on_exit > self.value_on_entry, "mark was not hit") | 76 | assert!(value_on_exit > self.value_on_entry, "mark was not hit") |
77 | } | 77 | } |
78 | } | 78 | } |