aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--crates/libsyntax2/Cargo.toml2
-rw-r--r--crates/libsyntax2/tests/lexer.rs28
-rw-r--r--crates/libsyntax2/tests/parser.rs14
-rw-r--r--crates/libsyntax2/tests/test/main.rs (renamed from crates/libsyntax2/tests/testutils/src/lib.rs)38
-rw-r--r--crates/libsyntax2/tests/testutils/Cargo.toml7
5 files changed, 36 insertions, 53 deletions
diff --git a/crates/libsyntax2/Cargo.toml b/crates/libsyntax2/Cargo.toml
index f67735540..5a76ea82b 100644
--- a/crates/libsyntax2/Cargo.toml
+++ b/crates/libsyntax2/Cargo.toml
@@ -12,4 +12,4 @@ drop_bomb = "0.1.4"
12parking_lot = "0.6.0" 12parking_lot = "0.6.0"
13 13
14[dev-dependencies] 14[dev-dependencies]
15testutils = { path = "./tests/testutils" } 15difference = "2.0.0"
diff --git a/crates/libsyntax2/tests/lexer.rs b/crates/libsyntax2/tests/lexer.rs
deleted file mode 100644
index 46ac9fedd..000000000
--- a/crates/libsyntax2/tests/lexer.rs
+++ /dev/null
@@ -1,28 +0,0 @@
1extern crate libsyntax2;
2extern crate testutils;
3
4use std::fmt::Write;
5
6use libsyntax2::{tokenize, Token};
7use testutils::dir_tests;
8
9#[test]
10fn lexer_tests() {
11 dir_tests(&["lexer"], |text| {
12 let tokens = tokenize(text);
13 dump_tokens(&tokens, text)
14 })
15}
16
17fn dump_tokens(tokens: &[Token], text: &str) -> String {
18 let mut acc = String::new();
19 let mut offset = 0;
20 for token in tokens {
21 let len: u32 = token.len.into();
22 let len = len as usize;
23 let token_text = &text[offset..offset + len];
24 offset += len;
25 write!(acc, "{:?} {} {:?}\n", token.kind, token.len, token_text).unwrap()
26 }
27 acc
28}
diff --git a/crates/libsyntax2/tests/parser.rs b/crates/libsyntax2/tests/parser.rs
deleted file mode 100644
index af2ae11bb..000000000
--- a/crates/libsyntax2/tests/parser.rs
+++ /dev/null
@@ -1,14 +0,0 @@
1extern crate libsyntax2;
2extern crate testutils;
3
4use libsyntax2::parse;
5use libsyntax2::utils::dump_tree;
6use testutils::dir_tests;
7
8#[test]
9fn parser_tests() {
10 dir_tests(&["parser/inline", "parser/ok", "parser/err"], |text| {
11 let file = parse(text);
12 dump_tree(&file)
13 })
14}
diff --git a/crates/libsyntax2/tests/testutils/src/lib.rs b/crates/libsyntax2/tests/test/main.rs
index 39c821661..18e5bc4d4 100644
--- a/crates/libsyntax2/tests/testutils/src/lib.rs
+++ b/crates/libsyntax2/tests/test/main.rs
@@ -1,12 +1,31 @@
1extern crate libsyntax2;
1extern crate difference; 2extern crate difference;
2 3
3use std::{ 4use std::{
4 fs, 5 fs,
5 path::{Path, PathBuf}, 6 path::{Path, PathBuf},
7 fmt::Write,
6}; 8};
7 9
8use difference::Changeset; 10use difference::Changeset;
9 11
12#[test]
13fn lexer_tests() {
14 dir_tests(&["lexer"], |text| {
15 let tokens = libsyntax2::tokenize(text);
16 dump_tokens(&tokens, text)
17 })
18}
19
20#[test]
21fn parser_tests() {
22 dir_tests(&["parser/inline", "parser/ok", "parser/err"], |text| {
23 let file = libsyntax2::parse(text);
24 libsyntax2::utils::dump_tree(&file)
25 })
26}
27
28
10/// Read file and normalize newlines. 29/// Read file and normalize newlines.
11/// 30///
12/// `rustc` seems to always normalize `\r\n` newlines to `\n`: 31/// `rustc` seems to always normalize `\r\n` newlines to `\n`:
@@ -23,8 +42,8 @@ fn read_text(path: &Path) -> String {
23} 42}
24 43
25pub fn dir_tests<F>(paths: &[&str], f: F) 44pub fn dir_tests<F>(paths: &[&str], f: F)
26where 45 where
27 F: Fn(&str) -> String, 46 F: Fn(&str) -> String,
28{ 47{
29 for path in collect_tests(paths) { 48 for path in collect_tests(paths) {
30 let input_code = read_text(&path); 49 let input_code = read_text(&path);
@@ -107,5 +126,18 @@ fn project_dir() -> PathBuf {
107} 126}
108 127
109fn test_data_dir() -> PathBuf { 128fn test_data_dir() -> PathBuf {
110 project_dir().join("tests/data") 129 project_dir().join("crates/libsyntax2/tests/data")
130}
131
132fn dump_tokens(tokens: &[libsyntax2::Token], text: &str) -> String {
133 let mut acc = String::new();
134 let mut offset = 0;
135 for token in tokens {
136 let len: u32 = token.len.into();
137 let len = len as usize;
138 let token_text = &text[offset..offset + len];
139 offset += len;
140 write!(acc, "{:?} {} {:?}\n", token.kind, token.len, token_text).unwrap()
141 }
142 acc
111} 143}
diff --git a/crates/libsyntax2/tests/testutils/Cargo.toml b/crates/libsyntax2/tests/testutils/Cargo.toml
deleted file mode 100644
index 53b20f17b..000000000
--- a/crates/libsyntax2/tests/testutils/Cargo.toml
+++ /dev/null
@@ -1,7 +0,0 @@
1[package]
2name = "testutils"
3version = "0.1.0"
4authors = ["Aleksey Kladov <[email protected]>"]
5
6[dependencies]
7difference = "2.0.0"