From da091b130347c4d6d8c75acb8e65c30a17dc1f5e Mon Sep 17 00:00:00 2001 From: veetaha Date: Mon, 6 Apr 2020 14:04:26 +0300 Subject: Migrate tests .txt -> .rast The sytax tree output files now use .rast extension (rust-analyzer syntax tree or rust abstract syntax tree (whatever)). This format has a editors/code/ra_syntax_tree.tmGrammar.json declaration that supplies nice syntax highlighting for .rast files. --- crates/ra_syntax/src/tests.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) (limited to 'crates/ra_syntax/src/tests.rs') diff --git a/crates/ra_syntax/src/tests.rs b/crates/ra_syntax/src/tests.rs index 6a8cb6bb5..355843b94 100644 --- a/crates/ra_syntax/src/tests.rs +++ b/crates/ra_syntax/src/tests.rs @@ -3,7 +3,7 @@ use std::{ path::{Component, Path, PathBuf}, }; -use test_utils::{collect_tests, dir_tests, project_dir, read_text}; +use test_utils::{collect_rust_files, dir_tests, project_dir, read_text}; use crate::{fuzz, tokenize, SourceFile, SyntaxError, TextRange, TextUnit, Token}; @@ -13,12 +13,12 @@ fn lexer_tests() { // * Add tests for unicode escapes in byte-character and [raw]-byte-string literals // * Add tests for unescape errors - dir_tests(&test_data_dir(), &["lexer/ok"], |text, path| { + dir_tests(&test_data_dir(), &["lexer/ok"], "txt", |text, path| { let (tokens, errors) = tokenize(text); assert_errors_are_absent(&errors, path); dump_tokens_and_errors(&tokens, &errors, text) }); - dir_tests(&test_data_dir(), &["lexer/err"], |text, path| { + dir_tests(&test_data_dir(), &["lexer/err"], "txt", |text, path| { let (tokens, errors) = tokenize(text); assert_errors_are_present(&errors, path); dump_tokens_and_errors(&tokens, &errors, text) @@ -40,13 +40,13 @@ fn main() { #[test] fn parser_tests() { - dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| { + dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], "rast", |text, path| { let parse = SourceFile::parse(text); let errors = parse.errors(); assert_errors_are_absent(&errors, path); parse.debug_dump() }); - dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| { + dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], "rast", |text, path| { let parse = SourceFile::parse(text); let errors = parse.errors(); assert_errors_are_present(&errors, path); @@ -56,14 +56,14 @@ fn parser_tests() { #[test] fn parser_fuzz_tests() { - for (_, text) in collect_tests(&test_data_dir(), &["parser/fuzz-failures"]) { + for (_, text) in collect_rust_files(&test_data_dir(), &["parser/fuzz-failures"]) { fuzz::check_parser(&text) } } #[test] fn reparse_fuzz_tests() { - for (_, text) in collect_tests(&test_data_dir(), &["reparse/fuzz-failures"]) { + for (_, text) in collect_rust_files(&test_data_dir(), &["reparse/fuzz-failures"]) { let check = fuzz::CheckReparse::from_data(text.as_bytes()).unwrap(); println!("{:?}", check); check.run(); -- cgit v1.2.3