diff options
author | Benjamin Coenen <[email protected]> | 2020-04-07 16:59:09 +0100 |
---|---|---|
committer | Benjamin Coenen <[email protected]> | 2020-04-07 16:59:09 +0100 |
commit | 18a5e164838e1dc2abcc6b79d4fc2f96ffd2507c (patch) | |
tree | bc80b5c49c3b7ba31c7fe967bb34fe14bac9d5ed /crates/ra_syntax/src | |
parent | ab864ed259c10ff51f7c9c3421d098eeea7b0245 (diff) | |
parent | 33c364b545350134b945fbca834194fd1a28fe08 (diff) |
Merge branch 'master' of github.com:rust-analyzer/rust-analyzer
Diffstat (limited to 'crates/ra_syntax/src')
-rw-r--r-- | crates/ra_syntax/src/tests.rs | 14 | ||||
-rw-r--r-- | crates/ra_syntax/src/validation.rs | 12 |
2 files changed, 13 insertions, 13 deletions
diff --git a/crates/ra_syntax/src/tests.rs b/crates/ra_syntax/src/tests.rs index 6a8cb6bb5..355843b94 100644 --- a/crates/ra_syntax/src/tests.rs +++ b/crates/ra_syntax/src/tests.rs | |||
@@ -3,7 +3,7 @@ use std::{ | |||
3 | path::{Component, Path, PathBuf}, | 3 | path::{Component, Path, PathBuf}, |
4 | }; | 4 | }; |
5 | 5 | ||
6 | use test_utils::{collect_tests, dir_tests, project_dir, read_text}; | 6 | use test_utils::{collect_rust_files, dir_tests, project_dir, read_text}; |
7 | 7 | ||
8 | use crate::{fuzz, tokenize, SourceFile, SyntaxError, TextRange, TextUnit, Token}; | 8 | use crate::{fuzz, tokenize, SourceFile, SyntaxError, TextRange, TextUnit, Token}; |
9 | 9 | ||
@@ -13,12 +13,12 @@ fn lexer_tests() { | |||
13 | // * Add tests for unicode escapes in byte-character and [raw]-byte-string literals | 13 | // * Add tests for unicode escapes in byte-character and [raw]-byte-string literals |
14 | // * Add tests for unescape errors | 14 | // * Add tests for unescape errors |
15 | 15 | ||
16 | dir_tests(&test_data_dir(), &["lexer/ok"], |text, path| { | 16 | dir_tests(&test_data_dir(), &["lexer/ok"], "txt", |text, path| { |
17 | let (tokens, errors) = tokenize(text); | 17 | let (tokens, errors) = tokenize(text); |
18 | assert_errors_are_absent(&errors, path); | 18 | assert_errors_are_absent(&errors, path); |
19 | dump_tokens_and_errors(&tokens, &errors, text) | 19 | dump_tokens_and_errors(&tokens, &errors, text) |
20 | }); | 20 | }); |
21 | dir_tests(&test_data_dir(), &["lexer/err"], |text, path| { | 21 | dir_tests(&test_data_dir(), &["lexer/err"], "txt", |text, path| { |
22 | let (tokens, errors) = tokenize(text); | 22 | let (tokens, errors) = tokenize(text); |
23 | assert_errors_are_present(&errors, path); | 23 | assert_errors_are_present(&errors, path); |
24 | dump_tokens_and_errors(&tokens, &errors, text) | 24 | dump_tokens_and_errors(&tokens, &errors, text) |
@@ -40,13 +40,13 @@ fn main() { | |||
40 | 40 | ||
41 | #[test] | 41 | #[test] |
42 | fn parser_tests() { | 42 | fn parser_tests() { |
43 | dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| { | 43 | dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], "rast", |text, path| { |
44 | let parse = SourceFile::parse(text); | 44 | let parse = SourceFile::parse(text); |
45 | let errors = parse.errors(); | 45 | let errors = parse.errors(); |
46 | assert_errors_are_absent(&errors, path); | 46 | assert_errors_are_absent(&errors, path); |
47 | parse.debug_dump() | 47 | parse.debug_dump() |
48 | }); | 48 | }); |
49 | dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| { | 49 | dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], "rast", |text, path| { |
50 | let parse = SourceFile::parse(text); | 50 | let parse = SourceFile::parse(text); |
51 | let errors = parse.errors(); | 51 | let errors = parse.errors(); |
52 | assert_errors_are_present(&errors, path); | 52 | assert_errors_are_present(&errors, path); |
@@ -56,14 +56,14 @@ fn parser_tests() { | |||
56 | 56 | ||
57 | #[test] | 57 | #[test] |
58 | fn parser_fuzz_tests() { | 58 | fn parser_fuzz_tests() { |
59 | for (_, text) in collect_tests(&test_data_dir(), &["parser/fuzz-failures"]) { | 59 | for (_, text) in collect_rust_files(&test_data_dir(), &["parser/fuzz-failures"]) { |
60 | fuzz::check_parser(&text) | 60 | fuzz::check_parser(&text) |
61 | } | 61 | } |
62 | } | 62 | } |
63 | 63 | ||
64 | #[test] | 64 | #[test] |
65 | fn reparse_fuzz_tests() { | 65 | fn reparse_fuzz_tests() { |
66 | for (_, text) in collect_tests(&test_data_dir(), &["reparse/fuzz-failures"]) { | 66 | for (_, text) in collect_rust_files(&test_data_dir(), &["reparse/fuzz-failures"]) { |
67 | let check = fuzz::CheckReparse::from_data(text.as_bytes()).unwrap(); | 67 | let check = fuzz::CheckReparse::from_data(text.as_bytes()).unwrap(); |
68 | println!("{:?}", check); | 68 | println!("{:?}", check); |
69 | check.run(); | 69 | check.run(); |
diff --git a/crates/ra_syntax/src/validation.rs b/crates/ra_syntax/src/validation.rs index 7915cf8cb..f85b3e61b 100644 --- a/crates/ra_syntax/src/validation.rs +++ b/crates/ra_syntax/src/validation.rs | |||
@@ -88,12 +88,12 @@ pub(crate) fn validate(root: &SyntaxNode) -> Vec<SyntaxError> { | |||
88 | for node in root.descendants() { | 88 | for node in root.descendants() { |
89 | match_ast! { | 89 | match_ast! { |
90 | match node { | 90 | match node { |
91 | ast::Literal(it) => { validate_literal(it, &mut errors) }, | 91 | ast::Literal(it) => validate_literal(it, &mut errors), |
92 | ast::BlockExpr(it) => { block::validate_block_expr(it, &mut errors) }, | 92 | ast::BlockExpr(it) => block::validate_block_expr(it, &mut errors), |
93 | ast::FieldExpr(it) => { validate_numeric_name(it.name_ref(), &mut errors) }, | 93 | ast::FieldExpr(it) => validate_numeric_name(it.name_ref(), &mut errors), |
94 | ast::RecordField(it) => { validate_numeric_name(it.name_ref(), &mut errors) }, | 94 | ast::RecordField(it) => validate_numeric_name(it.name_ref(), &mut errors), |
95 | ast::Visibility(it) => { validate_visibility(it, &mut errors) }, | 95 | ast::Visibility(it) => validate_visibility(it, &mut errors), |
96 | ast::RangeExpr(it) => { validate_range_expr(it, &mut errors) }, | 96 | ast::RangeExpr(it) => validate_range_expr(it, &mut errors), |
97 | _ => (), | 97 | _ => (), |
98 | } | 98 | } |
99 | } | 99 | } |