aboutsummaryrefslogtreecommitdiff
path: root/crates/syntax/src/parsing
diff options
context:
space:
mode:
authorAleksey Kladov <[email protected]>2020-11-06 21:21:56 +0000
committerAleksey Kladov <[email protected]>2020-11-06 21:23:14 +0000
commit5ba4f949c23dcf53f34995c90b7c01e6c641b1f0 (patch)
treefe5064dde4e948a776c87d38fba972903acad3ec /crates/syntax/src/parsing
parent6725dcf847300b9cddcbb061b159317113860f31 (diff)
Kill RAW_ literals
Syntactically, they are indistinguishable from non-raw versions, so it doesn't make sense to separate then *at the syntax* level.
Diffstat (limited to 'crates/syntax/src/parsing')
-rw-r--r--crates/syntax/src/parsing/lexer.rs4
-rw-r--r--crates/syntax/src/parsing/reparsing.rs2
2 files changed, 3 insertions, 3 deletions
diff --git a/crates/syntax/src/parsing/lexer.rs b/crates/syntax/src/parsing/lexer.rs
index 5674ecb84..8afd7e53b 100644
--- a/crates/syntax/src/parsing/lexer.rs
+++ b/crates/syntax/src/parsing/lexer.rs
@@ -235,7 +235,7 @@ fn rustc_token_kind_to_syntax_kind(
235 RawStrError::TooManyDelimiters { .. } => "Too many `#` symbols: raw strings may be delimited by up to 65535 `#` symbols", 235 RawStrError::TooManyDelimiters { .. } => "Too many `#` symbols: raw strings may be delimited by up to 65535 `#` symbols",
236 }; 236 };
237 }; 237 };
238 RAW_STRING 238 STRING
239 } 239 }
240 rustc_lexer::LiteralKind::RawByteStr { err: raw_str_err, .. } => { 240 rustc_lexer::LiteralKind::RawByteStr { err: raw_str_err, .. } => {
241 if let Some(raw_str_err) = raw_str_err { 241 if let Some(raw_str_err) = raw_str_err {
@@ -250,7 +250,7 @@ fn rustc_token_kind_to_syntax_kind(
250 }; 250 };
251 }; 251 };
252 252
253 RAW_BYTE_STRING 253 BYTE_STRING
254 } 254 }
255 }; 255 };
256 256
diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs
index 4149f856a..190f5f67a 100644
--- a/crates/syntax/src/parsing/reparsing.rs
+++ b/crates/syntax/src/parsing/reparsing.rs
@@ -44,7 +44,7 @@ fn reparse_token<'node>(
44 let prev_token = algo::find_covering_element(root, edit.delete).as_token()?.clone(); 44 let prev_token = algo::find_covering_element(root, edit.delete).as_token()?.clone();
45 let prev_token_kind = prev_token.kind(); 45 let prev_token_kind = prev_token.kind();
46 match prev_token_kind { 46 match prev_token_kind {
47 WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => { 47 WHITESPACE | COMMENT | IDENT | STRING => {
48 if prev_token_kind == WHITESPACE || prev_token_kind == COMMENT { 48 if prev_token_kind == WHITESPACE || prev_token_kind == COMMENT {
49 // removing a new line may extends previous token 49 // removing a new line may extends previous token
50 let deleted_range = edit.delete - prev_token.text_range().start(); 50 let deleted_range = edit.delete - prev_token.text_range().start();