diff options
-rw-r--r-- | crates/ra_syntax/src/parsing/lexer.rs | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/crates/ra_syntax/src/parsing/lexer.rs b/crates/ra_syntax/src/parsing/lexer.rs index 6d96f8400..9f321cd06 100644 --- a/crates/ra_syntax/src/parsing/lexer.rs +++ b/crates/ra_syntax/src/parsing/lexer.rs | |||
@@ -53,7 +53,7 @@ pub fn tokenize(text: &str) -> ParsedTokens { | |||
53 | } | 53 | } |
54 | 54 | ||
55 | /// Break a string up into its component tokens. | 55 | /// Break a string up into its component tokens. |
56 | /// Returns `ParsedTokens` which are basically a pair `(Vec<Token>, Vec<SyntaxError>)`. | 56 | /// Writes to `ParsedTokens` which are basically a pair `(Vec<Token>, Vec<SyntaxError>)`. |
57 | /// Beware that it checks for shebang first and its length contributes to resulting | 57 | /// Beware that it checks for shebang first and its length contributes to resulting |
58 | /// tokens offsets. | 58 | /// tokens offsets. |
59 | pub fn tokenize_append(text: &str, parsed: &mut ParsedTokens) { | 59 | pub fn tokenize_append(text: &str, parsed: &mut ParsedTokens) { |