aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_syntax/src/tests.rs
diff options
context:
space:
mode:
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>2020-03-13 08:24:19 +0000
committerGitHub <[email protected]>2020-03-13 08:24:19 +0000
commitbe3cf01c15cd48877046e675f4a0ff31b8a08798 (patch)
tree23e3903d5d9c7d72c32eb73649dd36b24eba6d38 /crates/ra_syntax/src/tests.rs
parent2f9f409538553fc709bbcad1a5c76968f36e5968 (diff)
parent88c944f96b426955933b77ca68c92990734769be (diff)
Merge #3570
3570: Remove some TextUnit->usize escapees r=matklad a=CAD97 As spotted during [a review of all uses of `text_unit::TextUnit::to_usize`](https://github.com/rust-analyzer/text_unit/pull/12#issuecomment-598512370). Legitimate uses do remain. Co-authored-by: CAD97 <[email protected]>
Diffstat (limited to 'crates/ra_syntax/src/tests.rs')
-rw-r--r--crates/ra_syntax/src/tests.rs10
1 files changed, 5 insertions, 5 deletions
diff --git a/crates/ra_syntax/src/tests.rs b/crates/ra_syntax/src/tests.rs
index 912e6aec0..d331d541e 100644
--- a/crates/ra_syntax/src/tests.rs
+++ b/crates/ra_syntax/src/tests.rs
@@ -5,7 +5,7 @@ use std::{
5 5
6use test_utils::{collect_tests, dir_tests, project_dir, read_text}; 6use test_utils::{collect_tests, dir_tests, project_dir, read_text};
7 7
8use crate::{fuzz, tokenize, SourceFile, SyntaxError, Token}; 8use crate::{fuzz, tokenize, SourceFile, SyntaxError, TextRange, TextUnit, Token};
9 9
10#[test] 10#[test]
11fn lexer_tests() { 11fn lexer_tests() {
@@ -120,11 +120,11 @@ fn assert_errors_are_absent(errors: &[SyntaxError], path: &Path) {
120 120
121fn dump_tokens_and_errors(tokens: &[Token], errors: &[SyntaxError], text: &str) -> String { 121fn dump_tokens_and_errors(tokens: &[Token], errors: &[SyntaxError], text: &str) -> String {
122 let mut acc = String::new(); 122 let mut acc = String::new();
123 let mut offset = 0; 123 let mut offset = TextUnit::from_usize(0);
124 for token in tokens { 124 for token in tokens {
125 let token_len = token.len.to_usize(); 125 let token_len = token.len;
126 let token_text = &text[offset..offset + token_len]; 126 let token_text = &text[TextRange::offset_len(offset, token.len)];
127 offset += token_len; 127 offset += token.len;
128 writeln!(acc, "{:?} {} {:?}", token.kind, token_len, token_text).unwrap(); 128 writeln!(acc, "{:?} {} {:?}", token.kind, token_len, token_text).unwrap();
129 } 129 }
130 for err in errors { 130 for err in errors {