From 5e1e8ed34a46738dda507a4a0f4e73065be74e57 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Fri, 29 Dec 2017 00:56:36 +0300 Subject: Lexer scaffold --- tests/lexer.rs | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) (limited to 'tests') diff --git a/tests/lexer.rs b/tests/lexer.rs index de76f0a15..a27e7c395 100644 --- a/tests/lexer.rs +++ b/tests/lexer.rs @@ -1,9 +1,13 @@ extern crate file; #[macro_use(assert_diff)] extern crate difference; +extern crate libsyntax2; use std::path::{PathBuf, Path}; use std::fs::read_dir; +use std::fmt::Write; + +use libsyntax2::{Token, next_token}; #[test] fn lexer_tests() { @@ -46,10 +50,22 @@ fn lexer_test_case(path: &Path) { ) } -fn tokenize(text: &str) -> Vec<()> { - Vec::new() +fn tokenize(text: &str) -> Vec { + let mut text = text; + let mut acc = Vec::new(); + while !text.is_empty() { + let token = next_token(text); + acc.push(token); + let len: u32 = token.len.into(); + text = &text[len as usize..]; + } + acc } -fn dump_tokens(tokens: &[()]) -> String { - "IDENT 5\nKEYWORD 1\nIDENT 5\n".to_string() +fn dump_tokens(tokens: &[Token]) -> String { + let mut acc = String::new(); + for token in tokens { + write!(acc, "{:?} {}\n", token.kind, token.len).unwrap() + } + acc } \ No newline at end of file -- cgit v1.2.3