aboutsummaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorAleksey Kladov <[email protected]>2017-12-28 21:56:36 +0000
committerAleksey Kladov <[email protected]>2017-12-28 21:56:36 +0000
commit5e1e8ed34a46738dda507a4a0f4e73065be74e57 (patch)
tree5191bbb793ce1a563026a9360380cbb207b43b9b /tests
parent45fce4b3ef53b0ffc78aac7064701f1f31792a5a (diff)
Lexer scaffold
Diffstat (limited to 'tests')
-rw-r--r--tests/lexer.rs24
1 files changed, 20 insertions, 4 deletions
diff --git a/tests/lexer.rs b/tests/lexer.rs
index de76f0a15..a27e7c395 100644
--- a/tests/lexer.rs
+++ b/tests/lexer.rs
@@ -1,9 +1,13 @@
1extern crate file; 1extern crate file;
2#[macro_use(assert_diff)] 2#[macro_use(assert_diff)]
3extern crate difference; 3extern crate difference;
4extern crate libsyntax2;
4 5
5use std::path::{PathBuf, Path}; 6use std::path::{PathBuf, Path};
6use std::fs::read_dir; 7use std::fs::read_dir;
8use std::fmt::Write;
9
10use libsyntax2::{Token, next_token};
7 11
8#[test] 12#[test]
9fn lexer_tests() { 13fn lexer_tests() {
@@ -46,10 +50,22 @@ fn lexer_test_case(path: &Path) {
46 ) 50 )
47} 51}
48 52
49fn tokenize(text: &str) -> Vec<()> { 53fn tokenize(text: &str) -> Vec<Token> {
50 Vec::new() 54 let mut text = text;
55 let mut acc = Vec::new();
56 while !text.is_empty() {
57 let token = next_token(text);
58 acc.push(token);
59 let len: u32 = token.len.into();
60 text = &text[len as usize..];
61 }
62 acc
51} 63}
52 64
53fn dump_tokens(tokens: &[()]) -> String { 65fn dump_tokens(tokens: &[Token]) -> String {
54 "IDENT 5\nKEYWORD 1\nIDENT 5\n".to_string() 66 let mut acc = String::new();
67 for token in tokens {
68 write!(acc, "{:?} {}\n", token.kind, token.len).unwrap()
69 }
70 acc
55} \ No newline at end of file 71} \ No newline at end of file