aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Cargo.toml4
-rw-r--r--tests/data/lexer/0001_hello.rs1
-rw-r--r--tests/data/lexer/0001_hello.txt3
-rw-r--r--tests/lexer.rs55
4 files changed, 62 insertions, 1 deletions
diff --git a/Cargo.toml b/Cargo.toml
index c94b99fad..b16ca4c7e 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -3,4 +3,6 @@ name = "libsyntax2"
3version = "0.1.0" 3version = "0.1.0"
4authors = ["Aleksey Kladov <[email protected]>"] 4authors = ["Aleksey Kladov <[email protected]>"]
5 5
6[dependencies] 6[dev-dependencies]
7file = "1.1.1"
8difference = "1.0.0"
diff --git a/tests/data/lexer/0001_hello.rs b/tests/data/lexer/0001_hello.rs
new file mode 100644
index 000000000..95d09f2b1
--- /dev/null
+++ b/tests/data/lexer/0001_hello.rs
@@ -0,0 +1 @@
hello world \ No newline at end of file
diff --git a/tests/data/lexer/0001_hello.txt b/tests/data/lexer/0001_hello.txt
new file mode 100644
index 000000000..5bec9be80
--- /dev/null
+++ b/tests/data/lexer/0001_hello.txt
@@ -0,0 +1,3 @@
1IDENT 5
2WHITESPACE 1
3IDENT 5 \ No newline at end of file
diff --git a/tests/lexer.rs b/tests/lexer.rs
new file mode 100644
index 000000000..de76f0a15
--- /dev/null
+++ b/tests/lexer.rs
@@ -0,0 +1,55 @@
1extern crate file;
2#[macro_use(assert_diff)]
3extern crate difference;
4
5use std::path::{PathBuf, Path};
6use std::fs::read_dir;
7
8#[test]
9fn lexer_tests() {
10 for test_case in lexer_test_cases() {
11 lexer_test_case(&test_case);
12 }
13}
14
15fn lexer_test_dir() -> PathBuf {
16 let dir = env!("CARGO_MANIFEST_DIR");
17 PathBuf::from(dir).join("tests/data/lexer")
18}
19
20fn lexer_test_cases() -> Vec<PathBuf> {
21 let mut acc = Vec::new();
22 let dir = lexer_test_dir();
23 for file in read_dir(&dir).unwrap() {
24 let file = file.unwrap();
25 let path = file.path();
26 if path.extension().unwrap_or_default() == "rs" {
27 acc.push(path);
28 }
29 }
30 acc
31}
32
33fn lexer_test_case(path: &Path) {
34 let actual = {
35 let text = file::get_text(path).unwrap();
36 let tokens = tokenize(&text);
37 dump_tokens(&tokens)
38 };
39 let expected = file::get_text(&path.with_extension("txt")).unwrap();
40
41 assert_diff!(
42 expected.as_str(),
43 actual.as_str(),
44 "\n",
45 0
46 )
47}
48
49fn tokenize(text: &str) -> Vec<()> {
50 Vec::new()
51}
52
53fn dump_tokens(tokens: &[()]) -> String {
54 "IDENT 5\nKEYWORD 1\nIDENT 5\n".to_string()
55} \ No newline at end of file