diff options
author | Veetaha <[email protected]> | 2020-01-28 05:18:35 +0000 |
---|---|---|
committer | Veetaha <[email protected]> | 2020-02-03 22:00:55 +0000 |
commit | 58e01d875477234c132061e3072ac19f4dfb7a32 (patch) | |
tree | 6de5660fae3b7b538e2017f44bbd2af90236abdc /crates/ra_syntax/src/parsing | |
parent | b1764d85fced5f3bc1db82063fca9369f9e1740b (diff) |
ra_syntax: rename first_token() -> lex_first_token()
Diffstat (limited to 'crates/ra_syntax/src/parsing')
-rw-r--r-- | crates/ra_syntax/src/parsing/lexer.rs | 6 |
1 files changed, 3 insertions, 3 deletions
diff --git a/crates/ra_syntax/src/parsing/lexer.rs b/crates/ra_syntax/src/parsing/lexer.rs index d1315e604..f889e6a1d 100644 --- a/crates/ra_syntax/src/parsing/lexer.rs +++ b/crates/ra_syntax/src/parsing/lexer.rs | |||
@@ -64,7 +64,7 @@ pub fn tokenize(text: &str) -> (Vec<Token>, Vec<SyntaxError>) { | |||
64 | /// | 64 | /// |
65 | /// Beware that unescape errors are not checked at tokenization time. | 65 | /// Beware that unescape errors are not checked at tokenization time. |
66 | pub fn lex_single_syntax_kind(text: &str) -> Option<(SyntaxKind, Option<SyntaxError>)> { | 66 | pub fn lex_single_syntax_kind(text: &str) -> Option<(SyntaxKind, Option<SyntaxError>)> { |
67 | first_token(text) | 67 | lex_first_token(text) |
68 | .filter(|(token, _)| token.len.to_usize() == text.len()) | 68 | .filter(|(token, _)| token.len.to_usize() == text.len()) |
69 | .map(|(token, error)| (token.kind, error)) | 69 | .map(|(token, error)| (token.kind, error)) |
70 | } | 70 | } |
@@ -74,7 +74,7 @@ pub fn lex_single_syntax_kind(text: &str) -> Option<(SyntaxKind, Option<SyntaxEr | |||
74 | /// | 74 | /// |
75 | /// Beware that unescape errors are not checked at tokenization time. | 75 | /// Beware that unescape errors are not checked at tokenization time. |
76 | pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> { | 76 | pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> { |
77 | first_token(text) | 77 | lex_first_token(text) |
78 | .filter(|(token, error)| !error.is_some() && token.len.to_usize() == text.len()) | 78 | .filter(|(token, error)| !error.is_some() && token.len.to_usize() == text.len()) |
79 | .map(|(token, _error)| token.kind) | 79 | .map(|(token, _error)| token.kind) |
80 | } | 80 | } |
@@ -87,7 +87,7 @@ pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> { | |||
87 | /// The token is malformed if the returned error is not `None`. | 87 | /// The token is malformed if the returned error is not `None`. |
88 | /// | 88 | /// |
89 | /// Beware that unescape errors are not checked at tokenization time. | 89 | /// Beware that unescape errors are not checked at tokenization time. |
90 | fn first_token(text: &str) -> Option<(Token, Option<SyntaxError>)> { | 90 | fn lex_first_token(text: &str) -> Option<(Token, Option<SyntaxError>)> { |
91 | // non-empty string is a precondtion of `rustc_lexer::first_token()`. | 91 | // non-empty string is a precondtion of `rustc_lexer::first_token()`. |
92 | if text.is_empty() { | 92 | if text.is_empty() { |
93 | return None; | 93 | return None; |