diff options
author | bors[bot] <bors[bot]@users.noreply.github.com> | 2018-02-03 19:44:37 +0000 |
---|---|---|
committer | bors[bot] <bors[bot]@users.noreply.github.com> | 2018-02-03 19:44:37 +0000 |
commit | 75b59bf519b8dcf1a89327d871dab2c98af61ed9 (patch) | |
tree | 6322bf271a4fd5e838098a0f523739fc9a0c9416 | |
parent | 3c70ae2e26f654ce536f00fde8c159e4ebe6901a (diff) | |
parent | b072e68ad5bf1687aebd2ff1c7bf327d38a6a2f2 (diff) |
Merge #39
39: Tools r=matklad a=matklad
closes #34
bors r+
-rw-r--r-- | .cargo/config | 1 | ||||
-rw-r--r-- | appveyor.yml | 1 | ||||
-rw-r--r-- | docs/TESTS.md | 16 | ||||
-rw-r--r-- | docs/TOOLS.md | 10 | ||||
-rw-r--r-- | src/parser/event_parser/grammar/items/mod.rs | 4 | ||||
-rw-r--r-- | tests/data/parser/inline/0001_const_unsafe_fn.rs | 1 | ||||
-rw-r--r-- | tests/data/parser/inline/0001_const_unsafe_fn.txt | 15 | ||||
-rw-r--r-- | tests/data/parser/inline/0002_const_fn.rs | 1 | ||||
-rw-r--r-- | tests/data/parser/inline/0002_const_fn.txt | 13 | ||||
-rw-r--r-- | tests/data/parser/ok/0024_const_fn.rs | 5 | ||||
-rw-r--r-- | tests/data/parser/ok/0024_const_fn.txt | 29 | ||||
-rw-r--r-- | tests/data/parser/ok/0024_const_item.rs (renamed from tests/data/parser/ok/0025_const_item.rs) | 0 | ||||
-rw-r--r-- | tests/data/parser/ok/0024_const_item.txt (renamed from tests/data/parser/ok/0025_const_item.txt) | 0 | ||||
-rw-r--r-- | tests/parser.rs | 2 | ||||
-rw-r--r-- | tools/Cargo.toml | 2 | ||||
-rw-r--r-- | tools/src/bin/collect-tests.rs | 130 |
16 files changed, 192 insertions, 38 deletions
diff --git a/.cargo/config b/.cargo/config index 1ebc0f748..7d89cf490 100644 --- a/.cargo/config +++ b/.cargo/config | |||
@@ -1,3 +1,4 @@ | |||
1 | [alias] | 1 | [alias] |
2 | parse = "run --package tools --bin parse" | 2 | parse = "run --package tools --bin parse" |
3 | gen = "run --package tools --bin gen" | 3 | gen = "run --package tools --bin gen" |
4 | collect-tests = "run --package tools --bin collect-tests --" | ||
diff --git a/appveyor.yml b/appveyor.yml index a6ba3b0e1..8c7d118c8 100644 --- a/appveyor.yml +++ b/appveyor.yml | |||
@@ -10,6 +10,7 @@ install: | |||
10 | build: false | 10 | build: false |
11 | 11 | ||
12 | test_script: | 12 | test_script: |
13 | - cargo collect-tests --verify | ||
13 | - cargo test | 14 | - cargo test |
14 | 15 | ||
15 | branches: | 16 | branches: |
diff --git a/docs/TESTS.md b/docs/TESTS.md index 8005ec9da..db06dbebc 100644 --- a/docs/TESTS.md +++ b/docs/TESTS.md | |||
@@ -19,12 +19,26 @@ files to have the same name except for the leading number. In general, | |||
19 | test suite should be append-only: old tests should not be modified, | 19 | test suite should be append-only: old tests should not be modified, |
20 | new tests should be created instead. | 20 | new tests should be created instead. |
21 | 21 | ||
22 | |||
23 | Note that only `ok` tests are normative: `err` tests test error | 22 | Note that only `ok` tests are normative: `err` tests test error |
24 | recovery and it is totally ok for a parser to not implement any error | 23 | recovery and it is totally ok for a parser to not implement any error |
25 | recovery at all. However, for libsyntax2.0 we do care about error | 24 | recovery at all. However, for libsyntax2.0 we do care about error |
26 | recovery, and we do care about precise and useful error messages. | 25 | recovery, and we do care about precise and useful error messages. |
27 | 26 | ||
27 | There are also so-called "inline tests". They appear as the comments | ||
28 | with a `test` header in the source code, like this: | ||
29 | |||
30 | ```rust | ||
31 | // test fn_basic | ||
32 | // fn foo() {} | ||
33 | fn fn_item(p: &mut Parser) { | ||
34 | // ... | ||
35 | } | ||
36 | ``` | ||
37 | |||
38 | You can run `cargo collect-tests` command to collect all inline tests | ||
39 | into `tests/data/inline` directory. The main advantage of inline tests | ||
40 | is that they help to illustrate what the relevant code is doing. | ||
41 | |||
28 | 42 | ||
29 | Contribution opportunity: design and implement testing infrastructure | 43 | Contribution opportunity: design and implement testing infrastructure |
30 | for validators. | 44 | for validators. |
diff --git a/docs/TOOLS.md b/docs/TOOLS.md index 1fcfa2dec..f8754c06f 100644 --- a/docs/TOOLS.md +++ b/docs/TOOLS.md | |||
@@ -17,14 +17,20 @@ cargo tool | |||
17 | ``` | 17 | ``` |
18 | 18 | ||
19 | 19 | ||
20 | # Tool: `gen` | 20 | ## Tool: `gen` |
21 | 21 | ||
22 | This tool reads a "grammar" from [grammar.ron](../grammar.ron) and | 22 | This tool reads a "grammar" from [grammar.ron](../grammar.ron) and |
23 | generates the `syntax_kinds.rs` file. You should run this tool if you | 23 | generates the `syntax_kinds.rs` file. You should run this tool if you |
24 | add new keywords or syntax elements. | 24 | add new keywords or syntax elements. |
25 | 25 | ||
26 | 26 | ||
27 | # Tool: 'parse' | 27 | ## Tool: `parse` |
28 | 28 | ||
29 | This tool reads rust source code from the standard input, parses it, | 29 | This tool reads rust source code from the standard input, parses it, |
30 | and prints the result to stdout. | 30 | and prints the result to stdout. |
31 | |||
32 | |||
33 | ## Tool: `collect-tests` | ||
34 | |||
35 | This tools collect inline tests from comments in libsyntax2 source code | ||
36 | and places them into `tests/data/inline` directory. | ||
diff --git a/src/parser/event_parser/grammar/items/mod.rs b/src/parser/event_parser/grammar/items/mod.rs index 8ccf8f90f..5cf2fc39a 100644 --- a/src/parser/event_parser/grammar/items/mod.rs +++ b/src/parser/event_parser/grammar/items/mod.rs | |||
@@ -52,11 +52,15 @@ fn item(p: &mut Parser) { | |||
52 | STATIC_ITEM | 52 | STATIC_ITEM |
53 | } | 53 | } |
54 | CONST_KW => match p.nth(1) { | 54 | CONST_KW => match p.nth(1) { |
55 | // test const_fn | ||
56 | // const fn foo() {} | ||
55 | FN_KW => { | 57 | FN_KW => { |
56 | p.bump(); | 58 | p.bump(); |
57 | fn_item(p); | 59 | fn_item(p); |
58 | FN_ITEM | 60 | FN_ITEM |
59 | } | 61 | } |
62 | // test const_unsafe_fn | ||
63 | // const unsafe fn foo() {} | ||
60 | UNSAFE_KW if p.nth(2) == FN_KW => { | 64 | UNSAFE_KW if p.nth(2) == FN_KW => { |
61 | p.bump(); | 65 | p.bump(); |
62 | p.bump(); | 66 | p.bump(); |
diff --git a/tests/data/parser/inline/0001_const_unsafe_fn.rs b/tests/data/parser/inline/0001_const_unsafe_fn.rs new file mode 100644 index 000000000..31a1e435f --- /dev/null +++ b/tests/data/parser/inline/0001_const_unsafe_fn.rs | |||
@@ -0,0 +1 @@ | |||
const unsafe fn foo() {} | |||
diff --git a/tests/data/parser/inline/0001_const_unsafe_fn.txt b/tests/data/parser/inline/0001_const_unsafe_fn.txt new file mode 100644 index 000000000..1f0865cb0 --- /dev/null +++ b/tests/data/parser/inline/0001_const_unsafe_fn.txt | |||
@@ -0,0 +1,15 @@ | |||
1 | FILE@[0; 25) | ||
2 | FN_ITEM@[0; 25) | ||
3 | CONST_KW@[0; 5) | ||
4 | WHITESPACE@[5; 6) | ||
5 | UNSAFE_KW@[6; 12) | ||
6 | WHITESPACE@[12; 13) | ||
7 | FN_KW@[13; 15) | ||
8 | WHITESPACE@[15; 16) | ||
9 | IDENT@[16; 19) "foo" | ||
10 | L_PAREN@[19; 20) | ||
11 | R_PAREN@[20; 21) | ||
12 | WHITESPACE@[21; 22) | ||
13 | L_CURLY@[22; 23) | ||
14 | R_CURLY@[23; 24) | ||
15 | WHITESPACE@[24; 25) | ||
diff --git a/tests/data/parser/inline/0002_const_fn.rs b/tests/data/parser/inline/0002_const_fn.rs new file mode 100644 index 000000000..8c84d9cd7 --- /dev/null +++ b/tests/data/parser/inline/0002_const_fn.rs | |||
@@ -0,0 +1 @@ | |||
const fn foo() {} | |||
diff --git a/tests/data/parser/inline/0002_const_fn.txt b/tests/data/parser/inline/0002_const_fn.txt new file mode 100644 index 000000000..2d360d78b --- /dev/null +++ b/tests/data/parser/inline/0002_const_fn.txt | |||
@@ -0,0 +1,13 @@ | |||
1 | FILE@[0; 18) | ||
2 | FN_ITEM@[0; 18) | ||
3 | CONST_KW@[0; 5) | ||
4 | WHITESPACE@[5; 6) | ||
5 | FN_KW@[6; 8) | ||
6 | WHITESPACE@[8; 9) | ||
7 | IDENT@[9; 12) "foo" | ||
8 | L_PAREN@[12; 13) | ||
9 | R_PAREN@[13; 14) | ||
10 | WHITESPACE@[14; 15) | ||
11 | L_CURLY@[15; 16) | ||
12 | R_CURLY@[16; 17) | ||
13 | WHITESPACE@[17; 18) | ||
diff --git a/tests/data/parser/ok/0024_const_fn.rs b/tests/data/parser/ok/0024_const_fn.rs deleted file mode 100644 index eba9322a1..000000000 --- a/tests/data/parser/ok/0024_const_fn.rs +++ /dev/null | |||
@@ -1,5 +0,0 @@ | |||
1 | const fn foo() { | ||
2 | } | ||
3 | |||
4 | const unsafe fn foo() { | ||
5 | } | ||
diff --git a/tests/data/parser/ok/0024_const_fn.txt b/tests/data/parser/ok/0024_const_fn.txt deleted file mode 100644 index 0fd485997..000000000 --- a/tests/data/parser/ok/0024_const_fn.txt +++ /dev/null | |||
@@ -1,29 +0,0 @@ | |||
1 | FILE@[0; 46) | ||
2 | FN_ITEM@[0; 20) | ||
3 | CONST_KW@[0; 5) | ||
4 | WHITESPACE@[5; 6) | ||
5 | FN_KW@[6; 8) | ||
6 | WHITESPACE@[8; 9) | ||
7 | IDENT@[9; 12) "foo" | ||
8 | L_PAREN@[12; 13) | ||
9 | R_PAREN@[13; 14) | ||
10 | WHITESPACE@[14; 15) | ||
11 | L_CURLY@[15; 16) | ||
12 | WHITESPACE@[16; 17) | ||
13 | R_CURLY@[17; 18) | ||
14 | WHITESPACE@[18; 20) | ||
15 | FN_ITEM@[20; 46) | ||
16 | CONST_KW@[20; 25) | ||
17 | WHITESPACE@[25; 26) | ||
18 | UNSAFE_KW@[26; 32) | ||
19 | WHITESPACE@[32; 33) | ||
20 | FN_KW@[33; 35) | ||
21 | WHITESPACE@[35; 36) | ||
22 | IDENT@[36; 39) "foo" | ||
23 | L_PAREN@[39; 40) | ||
24 | R_PAREN@[40; 41) | ||
25 | WHITESPACE@[41; 42) | ||
26 | L_CURLY@[42; 43) | ||
27 | WHITESPACE@[43; 44) | ||
28 | R_CURLY@[44; 45) | ||
29 | WHITESPACE@[45; 46) | ||
diff --git a/tests/data/parser/ok/0025_const_item.rs b/tests/data/parser/ok/0024_const_item.rs index 7446859b5..7446859b5 100644 --- a/tests/data/parser/ok/0025_const_item.rs +++ b/tests/data/parser/ok/0024_const_item.rs | |||
diff --git a/tests/data/parser/ok/0025_const_item.txt b/tests/data/parser/ok/0024_const_item.txt index 588e001f5..588e001f5 100644 --- a/tests/data/parser/ok/0025_const_item.txt +++ b/tests/data/parser/ok/0024_const_item.txt | |||
diff --git a/tests/parser.rs b/tests/parser.rs index f681c066f..68a6434be 100644 --- a/tests/parser.rs +++ b/tests/parser.rs | |||
@@ -7,7 +7,7 @@ use testutils::dir_tests; | |||
7 | 7 | ||
8 | #[test] | 8 | #[test] |
9 | fn parser_tests() { | 9 | fn parser_tests() { |
10 | dir_tests(&["parser/ok", "parser/err"], |text| { | 10 | dir_tests(&["parser/inline", "parser/ok", "parser/err"], |text| { |
11 | let tokens = tokenize(text); | 11 | let tokens = tokenize(text); |
12 | let file = parse(text.to_string(), &tokens); | 12 | let file = parse(text.to_string(), &tokens); |
13 | dump_tree(&file) | 13 | dump_tree(&file) |
diff --git a/tools/Cargo.toml b/tools/Cargo.toml index e46874929..8cbc2fc93 100644 --- a/tools/Cargo.toml +++ b/tools/Cargo.toml | |||
@@ -9,4 +9,6 @@ serde = "1.0.26" | |||
9 | serde_derive = "1.0.26" | 9 | serde_derive = "1.0.26" |
10 | file = "1.1.1" | 10 | file = "1.1.1" |
11 | ron = "0.1.5" | 11 | ron = "0.1.5" |
12 | walkdir = "2" | ||
13 | itertools = "0.7" | ||
12 | libsyntax2 = { path = "../" } | 14 | libsyntax2 = { path = "../" } |
diff --git a/tools/src/bin/collect-tests.rs b/tools/src/bin/collect-tests.rs new file mode 100644 index 000000000..df9d2db81 --- /dev/null +++ b/tools/src/bin/collect-tests.rs | |||
@@ -0,0 +1,130 @@ | |||
1 | extern crate file; | ||
2 | extern crate itertools; | ||
3 | extern crate walkdir; | ||
4 | |||
5 | use walkdir::WalkDir; | ||
6 | use itertools::Itertools; | ||
7 | |||
8 | use std::path::{Path, PathBuf}; | ||
9 | use std::collections::HashSet; | ||
10 | use std::fs; | ||
11 | |||
12 | fn main() { | ||
13 | let verify = ::std::env::args().any(|arg| arg == "--verify"); | ||
14 | |||
15 | let d = grammar_dir(); | ||
16 | let tests = tests_from_dir(&d); | ||
17 | let existing = existing_tests(); | ||
18 | |||
19 | for t in existing.difference(&tests) { | ||
20 | panic!("Test is deleted: {}\n{}", t.name, t.text); | ||
21 | } | ||
22 | |||
23 | let new_tests = tests.difference(&existing); | ||
24 | for (i, t) in new_tests.enumerate() { | ||
25 | if verify { | ||
26 | panic!("Inline test is not recorded: {}", t.name); | ||
27 | } | ||
28 | |||
29 | let name = format!("{:04}_{}.rs", existing.len() + i + 1, t.name); | ||
30 | println!("Creating {}", name); | ||
31 | let path = inline_tests_dir().join(name); | ||
32 | file::put_text(&path, &t.text).unwrap(); | ||
33 | } | ||
34 | } | ||
35 | |||
36 | #[derive(Debug, Eq)] | ||
37 | struct Test { | ||
38 | name: String, | ||
39 | text: String, | ||
40 | } | ||
41 | |||
42 | impl PartialEq for Test { | ||
43 | fn eq(&self, other: &Test) -> bool { | ||
44 | self.name.eq(&other.name) | ||
45 | } | ||
46 | } | ||
47 | |||
48 | impl ::std::hash::Hash for Test { | ||
49 | fn hash<H: ::std::hash::Hasher>(&self, state: &mut H) { | ||
50 | self.name.hash(state) | ||
51 | } | ||
52 | } | ||
53 | |||
54 | fn tests_from_dir(dir: &Path) -> HashSet<Test> { | ||
55 | let mut res = HashSet::new(); | ||
56 | for entry in WalkDir::new(dir) { | ||
57 | let entry = entry.unwrap(); | ||
58 | if !entry.file_type().is_file() { | ||
59 | continue; | ||
60 | } | ||
61 | if entry.path().extension().unwrap_or_default() != "rs" { | ||
62 | continue; | ||
63 | } | ||
64 | let text = file::get_text(entry.path()).unwrap(); | ||
65 | |||
66 | for test in collect_tests(&text) { | ||
67 | if let Some(old_test) = res.replace(test) { | ||
68 | panic!("Duplicate test: {}", old_test.name) | ||
69 | } | ||
70 | } | ||
71 | } | ||
72 | res | ||
73 | } | ||
74 | |||
75 | fn collect_tests(s: &str) -> Vec<Test> { | ||
76 | let mut res = vec![]; | ||
77 | let prefix = "// "; | ||
78 | let comment_blocks = s.lines() | ||
79 | .map(str::trim_left) | ||
80 | .group_by(|line| line.starts_with(prefix)); | ||
81 | |||
82 | for (is_comment, block) in comment_blocks.into_iter() { | ||
83 | if !is_comment { | ||
84 | continue; | ||
85 | } | ||
86 | let mut block = block.map(|line| &line[prefix.len()..]); | ||
87 | let first = block.next().unwrap(); | ||
88 | if !first.starts_with("test ") { | ||
89 | continue; | ||
90 | } | ||
91 | let name = first["test ".len()..].to_string(); | ||
92 | let text: String = itertools::join(block.chain(::std::iter::once("")), "\n"); | ||
93 | assert!(!text.trim().is_empty() && text.ends_with("\n")); | ||
94 | res.push(Test { name, text }) | ||
95 | } | ||
96 | res | ||
97 | } | ||
98 | |||
99 | fn existing_tests() -> HashSet<Test> { | ||
100 | let mut res = HashSet::new(); | ||
101 | for file in fs::read_dir(&inline_tests_dir()).unwrap() { | ||
102 | let file = file.unwrap(); | ||
103 | let path = file.path(); | ||
104 | if path.extension().unwrap_or_default() != "rs" { | ||
105 | continue; | ||
106 | } | ||
107 | let name = path.file_name().unwrap().to_str().unwrap(); | ||
108 | let name = name["0000_".len()..name.len() - 3].to_string(); | ||
109 | let text = file::get_text(&path).unwrap(); | ||
110 | res.insert(Test { name, text }); | ||
111 | } | ||
112 | res | ||
113 | } | ||
114 | |||
115 | fn inline_tests_dir() -> PathBuf { | ||
116 | let res = base_dir().join("tests/data/parser/inline"); | ||
117 | if !res.is_dir() { | ||
118 | fs::create_dir_all(&res).unwrap(); | ||
119 | } | ||
120 | res | ||
121 | } | ||
122 | |||
123 | fn grammar_dir() -> PathBuf { | ||
124 | base_dir().join("src/parser/event_parser/grammar") | ||
125 | } | ||
126 | |||
127 | fn base_dir() -> PathBuf { | ||
128 | let dir = env!("CARGO_MANIFEST_DIR"); | ||
129 | PathBuf::from(dir).parent().unwrap().to_owned() | ||
130 | } | ||