From 63a462f37ca584e1a585a69e30823ce25d4d252f Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Sat, 25 Apr 2020 00:57:47 +0200 Subject: Switch to TryFrom --- crates/ra_syntax/src/ast/tokens.rs | 14 ++++++-------- crates/ra_syntax/src/fuzz.rs | 11 ++++++++--- crates/ra_syntax/src/parsing/lexer.rs | 24 +++++++++++++----------- crates/ra_syntax/src/tests.rs | 2 +- crates/ra_syntax/src/validation.rs | 4 +++- 5 files changed, 31 insertions(+), 24 deletions(-) (limited to 'crates/ra_syntax/src') diff --git a/crates/ra_syntax/src/ast/tokens.rs b/crates/ra_syntax/src/ast/tokens.rs index 26b8f9c36..8e04b0bbd 100644 --- a/crates/ra_syntax/src/ast/tokens.rs +++ b/crates/ra_syntax/src/ast/tokens.rs @@ -1,5 +1,7 @@ //! There are many AstNodes, but only a few tokens, so we hand-write them here. +use std::convert::{TryFrom, TryInto}; + use crate::{ ast::{AstToken, Comment, RawString, String, Whitespace}, TextRange, TextSize, @@ -95,8 +97,8 @@ impl QuoteOffsets { } let start = TextSize::from(0); - let left_quote = TextSize::from_usize(left_quote) + TextSize::of('"'); - let right_quote = TextSize::from_usize(right_quote); + let left_quote = TextSize::try_from(left_quote).unwrap() + TextSize::of('"'); + let right_quote = TextSize::try_from(right_quote).unwrap(); let end = TextSize::of(literal); let res = QuoteOffsets { @@ -498,7 +500,7 @@ impl HasFormatSpecifier for String { let mut res = Vec::with_capacity(text.len()); rustc_lexer::unescape::unescape_str(text, &mut |range, unescaped_char| { res.push(( - TextRange::new(TextSize::from_usize(range.start), TextSize::from_usize(range.end)) + TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap()) + offset, unescaped_char, )) @@ -518,11 +520,7 @@ impl HasFormatSpecifier for RawString { let mut res = Vec::with_capacity(text.len()); for (idx, c) in text.char_indices() { - res.push(( - TextRange::new(TextSize::from_usize(idx), TextSize::from_usize(idx + c.len_utf8())) - + offset, - Ok(c), - )); + res.push((TextRange::at(idx.try_into().unwrap(), TextSize::of(c)) + offset, Ok(c))); } Some(res) } diff --git a/crates/ra_syntax/src/fuzz.rs b/crates/ra_syntax/src/fuzz.rs index 15aad2205..10fbe3176 100644 --- a/crates/ra_syntax/src/fuzz.rs +++ b/crates/ra_syntax/src/fuzz.rs @@ -1,8 +1,13 @@ //! FIXME: write short doc here -use crate::{validation, AstNode, SourceFile, TextRange, TextSize}; +use std::{ + convert::TryInto, + str::{self, FromStr}, +}; + use ra_text_edit::AtomTextEdit; -use std::str::{self, FromStr}; + +use crate::{validation, AstNode, SourceFile, TextRange}; fn check_file_invariants(file: &SourceFile) { let root = file.syntax(); @@ -35,7 +40,7 @@ impl CheckReparse { let text = format!("{}{}{}", PREFIX, text, SUFFIX); text.get(delete_start..delete_start.checked_add(delete_len)?)?; // make sure delete is a valid range let delete = - TextRange::at(TextSize::from_usize(delete_start), TextSize::from_usize(delete_len)); + TextRange::at(delete_start.try_into().unwrap(), delete_len.try_into().unwrap()); let edited_text = format!("{}{}{}", &text[..delete_start], &insert, &text[delete_start + delete_len..]); let edit = AtomTextEdit { delete, insert }; diff --git a/crates/ra_syntax/src/parsing/lexer.rs b/crates/ra_syntax/src/parsing/lexer.rs index 1fdc76d98..f450ef4a2 100644 --- a/crates/ra_syntax/src/parsing/lexer.rs +++ b/crates/ra_syntax/src/parsing/lexer.rs @@ -1,6 +1,8 @@ //! Lexer analyzes raw input string and produces lexemes (tokens). //! It is just a bridge to `rustc_lexer`. +use std::convert::TryInto; + use crate::{ SyntaxError, SyntaxKind::{self, *}, @@ -28,18 +30,19 @@ pub fn tokenize(text: &str) -> (Vec, Vec) { let mut tokens = Vec::new(); let mut errors = Vec::new(); - let mut offset: usize = rustc_lexer::strip_shebang(text) - .map(|shebang_len| { - tokens.push(Token { kind: SHEBANG, len: TextSize::from_usize(shebang_len) }); + let mut offset = match rustc_lexer::strip_shebang(text) { + Some(shebang_len) => { + tokens.push(Token { kind: SHEBANG, len: shebang_len.try_into().unwrap() }); shebang_len - }) - .unwrap_or(0); + } + None => 0, + }; let text_without_shebang = &text[offset..]; for rustc_token in rustc_lexer::tokenize(text_without_shebang) { - let token_len = TextSize::from_usize(rustc_token.len); - let token_range = TextRange::at(TextSize::from_usize(offset), token_len); + let token_len: TextSize = rustc_token.len.try_into().unwrap(); + let token_range = TextRange::at(offset.try_into().unwrap(), token_len); let (syntax_kind, err_message) = rustc_token_kind_to_syntax_kind(&rustc_token.kind, &text[token_range]); @@ -96,10 +99,9 @@ fn lex_first_token(text: &str) -> Option<(Token, Option)> { let rustc_token = rustc_lexer::first_token(text); let (syntax_kind, err_message) = rustc_token_kind_to_syntax_kind(&rustc_token.kind, text); - let token = Token { kind: syntax_kind, len: TextSize::from_usize(rustc_token.len) }; - let optional_error = err_message.map(|err_message| { - SyntaxError::new(err_message, TextRange::new(0.into(), TextSize::of(text))) - }); + let token = Token { kind: syntax_kind, len: rustc_token.len.try_into().unwrap() }; + let optional_error = err_message + .map(|err_message| SyntaxError::new(err_message, TextRange::up_to(TextSize::of(text)))); Some((token, optional_error)) } diff --git a/crates/ra_syntax/src/tests.rs b/crates/ra_syntax/src/tests.rs index 4f2b67feb..aee57db62 100644 --- a/crates/ra_syntax/src/tests.rs +++ b/crates/ra_syntax/src/tests.rs @@ -121,7 +121,7 @@ fn assert_errors_are_absent(errors: &[SyntaxError], path: &Path) { fn dump_tokens_and_errors(tokens: &[Token], errors: &[SyntaxError], text: &str) -> String { let mut acc = String::new(); - let mut offset = TextSize::from_usize(0); + let mut offset: TextSize = 0.into(); for token in tokens { let token_len = token.len; let token_text = &text[TextRange::at(offset, token.len)]; diff --git a/crates/ra_syntax/src/validation.rs b/crates/ra_syntax/src/validation.rs index 77d7e132d..5e93895ec 100644 --- a/crates/ra_syntax/src/validation.rs +++ b/crates/ra_syntax/src/validation.rs @@ -2,6 +2,8 @@ mod block; +use std::convert::TryFrom; + use rustc_lexer::unescape; use crate::{ @@ -112,7 +114,7 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec) { // FIXME: lift this lambda refactor to `fn` (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366199205) let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| { - let off = token.text_range().start() + TextSize::from_usize(off + prefix_len); + let off = token.text_range().start() + TextSize::try_from(off + prefix_len).unwrap(); acc.push(SyntaxError::new_at_offset(rustc_unescape_error_to_string(err), off)); }; -- cgit v1.2.3