diff options
Diffstat (limited to 'crates/ra_syntax/src')
-rw-r--r-- | crates/ra_syntax/src/parsing/lexer.rs | 4 | ||||
-rw-r--r-- | crates/ra_syntax/src/tests.rs | 10 |
2 files changed, 7 insertions, 7 deletions
diff --git a/crates/ra_syntax/src/parsing/lexer.rs b/crates/ra_syntax/src/parsing/lexer.rs index f2684c852..d1baaa607 100644 --- a/crates/ra_syntax/src/parsing/lexer.rs +++ b/crates/ra_syntax/src/parsing/lexer.rs | |||
@@ -65,7 +65,7 @@ pub fn tokenize(text: &str) -> (Vec<Token>, Vec<SyntaxError>) { | |||
65 | /// Beware that unescape errors are not checked at tokenization time. | 65 | /// Beware that unescape errors are not checked at tokenization time. |
66 | pub fn lex_single_syntax_kind(text: &str) -> Option<(SyntaxKind, Option<SyntaxError>)> { | 66 | pub fn lex_single_syntax_kind(text: &str) -> Option<(SyntaxKind, Option<SyntaxError>)> { |
67 | lex_first_token(text) | 67 | lex_first_token(text) |
68 | .filter(|(token, _)| token.len.to_usize() == text.len()) | 68 | .filter(|(token, _)| token.len == TextUnit::of_str(text)) |
69 | .map(|(token, error)| (token.kind, error)) | 69 | .map(|(token, error)| (token.kind, error)) |
70 | } | 70 | } |
71 | 71 | ||
@@ -75,7 +75,7 @@ pub fn lex_single_syntax_kind(text: &str) -> Option<(SyntaxKind, Option<SyntaxEr | |||
75 | /// Beware that unescape errors are not checked at tokenization time. | 75 | /// Beware that unescape errors are not checked at tokenization time. |
76 | pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> { | 76 | pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> { |
77 | lex_first_token(text) | 77 | lex_first_token(text) |
78 | .filter(|(token, error)| !error.is_some() && token.len.to_usize() == text.len()) | 78 | .filter(|(token, error)| !error.is_some() && token.len == TextUnit::of_str(text)) |
79 | .map(|(token, _error)| token.kind) | 79 | .map(|(token, _error)| token.kind) |
80 | } | 80 | } |
81 | 81 | ||
diff --git a/crates/ra_syntax/src/tests.rs b/crates/ra_syntax/src/tests.rs index 912e6aec0..d331d541e 100644 --- a/crates/ra_syntax/src/tests.rs +++ b/crates/ra_syntax/src/tests.rs | |||
@@ -5,7 +5,7 @@ use std::{ | |||
5 | 5 | ||
6 | use test_utils::{collect_tests, dir_tests, project_dir, read_text}; | 6 | use test_utils::{collect_tests, dir_tests, project_dir, read_text}; |
7 | 7 | ||
8 | use crate::{fuzz, tokenize, SourceFile, SyntaxError, Token}; | 8 | use crate::{fuzz, tokenize, SourceFile, SyntaxError, TextRange, TextUnit, Token}; |
9 | 9 | ||
10 | #[test] | 10 | #[test] |
11 | fn lexer_tests() { | 11 | fn lexer_tests() { |
@@ -120,11 +120,11 @@ fn assert_errors_are_absent(errors: &[SyntaxError], path: &Path) { | |||
120 | 120 | ||
121 | fn dump_tokens_and_errors(tokens: &[Token], errors: &[SyntaxError], text: &str) -> String { | 121 | fn dump_tokens_and_errors(tokens: &[Token], errors: &[SyntaxError], text: &str) -> String { |
122 | let mut acc = String::new(); | 122 | let mut acc = String::new(); |
123 | let mut offset = 0; | 123 | let mut offset = TextUnit::from_usize(0); |
124 | for token in tokens { | 124 | for token in tokens { |
125 | let token_len = token.len.to_usize(); | 125 | let token_len = token.len; |
126 | let token_text = &text[offset..offset + token_len]; | 126 | let token_text = &text[TextRange::offset_len(offset, token.len)]; |
127 | offset += token_len; | 127 | offset += token.len; |
128 | writeln!(acc, "{:?} {} {:?}", token.kind, token_len, token_text).unwrap(); | 128 | writeln!(acc, "{:?} {} {:?}", token.kind, token_len, token_text).unwrap(); |
129 | } | 129 | } |
130 | for err in errors { | 130 | for err in errors { |