aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>2020-11-06 21:39:02 +0000
committerGitHub <[email protected]>2020-11-06 21:39:02 +0000
commit7f12a1f225c7d3397f27964ce039b55d680772d3 (patch)
tree26043b20588eae4510e28249f11a094aacaf190d /crates
parentcdddcaee851be1cff1eeb23599f5a58f1b30a927 (diff)
parent6158304f8b64ef7cdf58b14bc675baf33a27a853 (diff)
Merge #6485
6485: Remove RAW literals r=matklad a=matklad bors r+ 🤖 closes https://github.com/rust-analyzer/rust-analyzer/issues/6308 Co-authored-by: Aleksey Kladov <[email protected]>
Diffstat (limited to 'crates')
-rw-r--r--crates/assists/src/assist_context.rs7
-rw-r--r--crates/assists/src/handlers/add_turbo_fish.rs2
-rw-r--r--crates/assists/src/handlers/expand_glob_import.rs2
-rw-r--r--crates/assists/src/handlers/flip_comma.rs2
-rw-r--r--crates/assists/src/handlers/flip_trait_bound.rs2
-rw-r--r--crates/assists/src/handlers/introduce_named_lifetime.rs2
-rw-r--r--crates/assists/src/handlers/invert_if.rs2
-rw-r--r--crates/assists/src/handlers/raw_string.rs34
-rw-r--r--crates/assists/src/handlers/remove_mut.rs2
-rw-r--r--crates/assists/src/handlers/replace_let_with_if_let.rs2
-rw-r--r--crates/assists/src/handlers/replace_string_with_char.rs8
-rw-r--r--crates/assists/src/handlers/split_import.rs2
-rw-r--r--crates/assists/src/handlers/unwrap_block.rs2
-rw-r--r--crates/hir_expand/src/builtin_macro.rs2
-rw-r--r--crates/ide/src/extend_selection.rs2
-rw-r--r--crates/ide/src/syntax_highlighting.rs16
-rw-r--r--crates/ide/src/syntax_highlighting/format.rs4
-rw-r--r--crates/ide/src/syntax_highlighting/injection.rs3
-rw-r--r--crates/ide/src/syntax_tree.rs6
-rw-r--r--crates/parser/src/grammar.rs5
-rw-r--r--crates/parser/src/grammar/expressions/atom.rs14
-rw-r--r--crates/parser/src/grammar/items.rs4
-rw-r--r--crates/parser/src/syntax_kind/generated.rs5
-rw-r--r--crates/syntax/src/ast/expr_ext.rs11
-rw-r--r--crates/syntax/src/ast/generated/tokens.rs8
-rw-r--r--crates/syntax/src/ast/node_ext.rs10
-rw-r--r--crates/syntax/src/ast/token_ext.rs90
-rw-r--r--crates/syntax/src/parsing/lexer.rs4
-rw-r--r--crates/syntax/src/parsing/reparsing.rs2
-rw-r--r--crates/syntax/src/validation.rs41
-rw-r--r--crates/syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt2
-rw-r--r--crates/syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt2
-rw-r--r--crates/syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt2
-rw-r--r--crates/syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt2
-rw-r--r--crates/syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt2
-rw-r--r--crates/syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt2
-rw-r--r--crates/syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt2
-rw-r--r--crates/syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt2
-rw-r--r--crates/syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt2
-rw-r--r--crates/syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt2
-rw-r--r--crates/syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt2
-rw-r--r--crates/syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt2
-rw-r--r--crates/syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt2
-rw-r--r--crates/syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt2
-rw-r--r--crates/syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt2
-rw-r--r--crates/syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt2
-rw-r--r--crates/syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt2
-rw-r--r--crates/syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt2
-rw-r--r--crates/syntax/test_data/lexer/ok/0008_byte_strings.txt4
-rw-r--r--crates/syntax/test_data/lexer/ok/0009_strings.txt2
-rw-r--r--crates/syntax/test_data/lexer/ok/0013_raw_strings.txt2
-rw-r--r--crates/syntax/test_data/parser/inline/ok/0085_expr_literals.rast4
52 files changed, 155 insertions, 189 deletions
diff --git a/crates/assists/src/assist_context.rs b/crates/assists/src/assist_context.rs
index d11fee196..fcfe2d6ee 100644
--- a/crates/assists/src/assist_context.rs
+++ b/crates/assists/src/assist_context.rs
@@ -12,7 +12,7 @@ use ide_db::{
12}; 12};
13use syntax::{ 13use syntax::{
14 algo::{self, find_node_at_offset, SyntaxRewriter}, 14 algo::{self, find_node_at_offset, SyntaxRewriter},
15 AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxToken, TextRange, TextSize, 15 AstNode, AstToken, SourceFile, SyntaxElement, SyntaxKind, SyntaxToken, TextRange, TextSize,
16 TokenAtOffset, 16 TokenAtOffset,
17}; 17};
18use text_edit::{TextEdit, TextEditBuilder}; 18use text_edit::{TextEdit, TextEditBuilder};
@@ -81,9 +81,12 @@ impl<'a> AssistContext<'a> {
81 pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> { 81 pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> {
82 self.source_file.syntax().token_at_offset(self.offset()) 82 self.source_file.syntax().token_at_offset(self.offset())
83 } 83 }
84 pub(crate) fn find_token_at_offset(&self, kind: SyntaxKind) -> Option<SyntaxToken> { 84 pub(crate) fn find_token_syntax_at_offset(&self, kind: SyntaxKind) -> Option<SyntaxToken> {
85 self.token_at_offset().find(|it| it.kind() == kind) 85 self.token_at_offset().find(|it| it.kind() == kind)
86 } 86 }
87 pub(crate) fn find_token_at_offset<T: AstToken>(&self) -> Option<T> {
88 self.token_at_offset().find_map(T::cast)
89 }
87 pub(crate) fn find_node_at_offset<N: AstNode>(&self) -> Option<N> { 90 pub(crate) fn find_node_at_offset<N: AstNode>(&self) -> Option<N> {
88 find_node_at_offset(self.source_file.syntax(), self.offset()) 91 find_node_at_offset(self.source_file.syntax(), self.offset())
89 } 92 }
diff --git a/crates/assists/src/handlers/add_turbo_fish.rs b/crates/assists/src/handlers/add_turbo_fish.rs
index e3d84d698..1f486c013 100644
--- a/crates/assists/src/handlers/add_turbo_fish.rs
+++ b/crates/assists/src/handlers/add_turbo_fish.rs
@@ -25,7 +25,7 @@ use crate::{
25// } 25// }
26// ``` 26// ```
27pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { 27pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
28 let ident = ctx.find_token_at_offset(SyntaxKind::IDENT).or_else(|| { 28 let ident = ctx.find_token_syntax_at_offset(SyntaxKind::IDENT).or_else(|| {
29 let arg_list = ctx.find_node_at_offset::<ast::ArgList>()?; 29 let arg_list = ctx.find_node_at_offset::<ast::ArgList>()?;
30 if arg_list.args().count() > 0 { 30 if arg_list.args().count() > 0 {
31 return None; 31 return None;
diff --git a/crates/assists/src/handlers/expand_glob_import.rs b/crates/assists/src/handlers/expand_glob_import.rs
index 316a58d88..853266395 100644
--- a/crates/assists/src/handlers/expand_glob_import.rs
+++ b/crates/assists/src/handlers/expand_glob_import.rs
@@ -41,7 +41,7 @@ use crate::{
41// fn qux(bar: Bar, baz: Baz) {} 41// fn qux(bar: Bar, baz: Baz) {}
42// ``` 42// ```
43pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { 43pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
44 let star = ctx.find_token_at_offset(T![*])?; 44 let star = ctx.find_token_syntax_at_offset(T![*])?;
45 let (parent, mod_path) = find_parent_and_path(&star)?; 45 let (parent, mod_path) = find_parent_and_path(&star)?;
46 let target_module = match ctx.sema.resolve_path(&mod_path)? { 46 let target_module = match ctx.sema.resolve_path(&mod_path)? {
47 PathResolution::Def(ModuleDef::Module(it)) => it, 47 PathResolution::Def(ModuleDef::Module(it)) => it,
diff --git a/crates/assists/src/handlers/flip_comma.rs b/crates/assists/src/handlers/flip_comma.rs
index 5c69db53e..64b4b1a76 100644
--- a/crates/assists/src/handlers/flip_comma.rs
+++ b/crates/assists/src/handlers/flip_comma.rs
@@ -18,7 +18,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
18// } 18// }
19// ``` 19// ```
20pub(crate) fn flip_comma(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { 20pub(crate) fn flip_comma(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
21 let comma = ctx.find_token_at_offset(T![,])?; 21 let comma = ctx.find_token_syntax_at_offset(T![,])?;
22 let prev = non_trivia_sibling(comma.clone().into(), Direction::Prev)?; 22 let prev = non_trivia_sibling(comma.clone().into(), Direction::Prev)?;
23 let next = non_trivia_sibling(comma.clone().into(), Direction::Next)?; 23 let next = non_trivia_sibling(comma.clone().into(), Direction::Next)?;
24 24
diff --git a/crates/assists/src/handlers/flip_trait_bound.rs b/crates/assists/src/handlers/flip_trait_bound.rs
index 347e79b1d..92ee42181 100644
--- a/crates/assists/src/handlers/flip_trait_bound.rs
+++ b/crates/assists/src/handlers/flip_trait_bound.rs
@@ -20,7 +20,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
20pub(crate) fn flip_trait_bound(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { 20pub(crate) fn flip_trait_bound(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
21 // We want to replicate the behavior of `flip_binexpr` by only suggesting 21 // We want to replicate the behavior of `flip_binexpr` by only suggesting
22 // the assist when the cursor is on a `+` 22 // the assist when the cursor is on a `+`
23 let plus = ctx.find_token_at_offset(T![+])?; 23 let plus = ctx.find_token_syntax_at_offset(T![+])?;
24 24
25 // Make sure we're in a `TypeBoundList` 25 // Make sure we're in a `TypeBoundList`
26 if ast::TypeBoundList::cast(plus.parent()).is_none() { 26 if ast::TypeBoundList::cast(plus.parent()).is_none() {
diff --git a/crates/assists/src/handlers/introduce_named_lifetime.rs b/crates/assists/src/handlers/introduce_named_lifetime.rs
index 5f623e5f7..4cc8dae65 100644
--- a/crates/assists/src/handlers/introduce_named_lifetime.rs
+++ b/crates/assists/src/handlers/introduce_named_lifetime.rs
@@ -36,7 +36,7 @@ static ASSIST_LABEL: &str = "Introduce named lifetime";
36// FIXME: should also add support for the case fun(f: &Foo) -> &<|>Foo 36// FIXME: should also add support for the case fun(f: &Foo) -> &<|>Foo
37pub(crate) fn introduce_named_lifetime(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { 37pub(crate) fn introduce_named_lifetime(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
38 let lifetime_token = ctx 38 let lifetime_token = ctx
39 .find_token_at_offset(SyntaxKind::LIFETIME) 39 .find_token_syntax_at_offset(SyntaxKind::LIFETIME)
40 .filter(|lifetime| lifetime.text() == "'_")?; 40 .filter(|lifetime| lifetime.text() == "'_")?;
41 if let Some(fn_def) = lifetime_token.ancestors().find_map(ast::Fn::cast) { 41 if let Some(fn_def) = lifetime_token.ancestors().find_map(ast::Fn::cast) {
42 generate_fn_def_assist(acc, &fn_def, lifetime_token.text_range()) 42 generate_fn_def_assist(acc, &fn_def, lifetime_token.text_range())
diff --git a/crates/assists/src/handlers/invert_if.rs b/crates/assists/src/handlers/invert_if.rs
index 461fcf862..ea722b91b 100644
--- a/crates/assists/src/handlers/invert_if.rs
+++ b/crates/assists/src/handlers/invert_if.rs
@@ -29,7 +29,7 @@ use crate::{
29// ``` 29// ```
30 30
31pub(crate) fn invert_if(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { 31pub(crate) fn invert_if(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
32 let if_keyword = ctx.find_token_at_offset(T![if])?; 32 let if_keyword = ctx.find_token_syntax_at_offset(T![if])?;
33 let expr = ast::IfExpr::cast(if_keyword.parent())?; 33 let expr = ast::IfExpr::cast(if_keyword.parent())?;
34 let if_range = if_keyword.text_range(); 34 let if_range = if_keyword.text_range();
35 let cursor_in_range = if_range.contains_range(ctx.frange.range); 35 let cursor_in_range = if_range.contains_range(ctx.frange.range);
diff --git a/crates/assists/src/handlers/raw_string.rs b/crates/assists/src/handlers/raw_string.rs
index 9ddd116e0..4c759cc25 100644
--- a/crates/assists/src/handlers/raw_string.rs
+++ b/crates/assists/src/handlers/raw_string.rs
@@ -1,11 +1,6 @@
1use std::borrow::Cow; 1use std::borrow::Cow;
2 2
3use syntax::{ 3use syntax::{ast, AstToken, TextRange, TextSize};
4 ast::{self, HasQuotes, HasStringValue},
5 AstToken,
6 SyntaxKind::{RAW_STRING, STRING},
7 TextRange, TextSize,
8};
9use test_utils::mark; 4use test_utils::mark;
10 5
11use crate::{AssistContext, AssistId, AssistKind, Assists}; 6use crate::{AssistContext, AssistId, AssistKind, Assists};
@@ -26,7 +21,10 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
26// } 21// }
27// ``` 22// ```
28pub(crate) fn make_raw_string(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { 23pub(crate) fn make_raw_string(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
29 let token = ctx.find_token_at_offset(STRING).and_then(ast::String::cast)?; 24 let token = ctx.find_token_at_offset::<ast::String>()?;
25 if token.is_raw() {
26 return None;
27 }
30 let value = token.value()?; 28 let value = token.value()?;
31 let target = token.syntax().text_range(); 29 let target = token.syntax().text_range();
32 acc.add( 30 acc.add(
@@ -65,7 +63,10 @@ pub(crate) fn make_raw_string(acc: &mut Assists, ctx: &AssistContext) -> Option<
65// } 63// }
66// ``` 64// ```
67pub(crate) fn make_usual_string(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { 65pub(crate) fn make_usual_string(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
68 let token = ctx.find_token_at_offset(RAW_STRING).and_then(ast::RawString::cast)?; 66 let token = ctx.find_token_at_offset::<ast::String>()?;
67 if !token.is_raw() {
68 return None;
69 }
69 let value = token.value()?; 70 let value = token.value()?;
70 let target = token.syntax().text_range(); 71 let target = token.syntax().text_range();
71 acc.add( 72 acc.add(
@@ -104,11 +105,15 @@ pub(crate) fn make_usual_string(acc: &mut Assists, ctx: &AssistContext) -> Optio
104// } 105// }
105// ``` 106// ```
106pub(crate) fn add_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { 107pub(crate) fn add_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
107 let token = ctx.find_token_at_offset(RAW_STRING)?; 108 let token = ctx.find_token_at_offset::<ast::String>()?;
108 let target = token.text_range(); 109 if !token.is_raw() {
110 return None;
111 }
112 let text_range = token.syntax().text_range();
113 let target = text_range;
109 acc.add(AssistId("add_hash", AssistKind::Refactor), "Add #", target, |edit| { 114 acc.add(AssistId("add_hash", AssistKind::Refactor), "Add #", target, |edit| {
110 edit.insert(token.text_range().start() + TextSize::of('r'), "#"); 115 edit.insert(text_range.start() + TextSize::of('r'), "#");
111 edit.insert(token.text_range().end(), "#"); 116 edit.insert(text_range.end(), "#");
112 }) 117 })
113} 118}
114 119
@@ -128,7 +133,10 @@ pub(crate) fn add_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
128// } 133// }
129// ``` 134// ```
130pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { 135pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
131 let token = ctx.find_token_at_offset(RAW_STRING).and_then(ast::RawString::cast)?; 136 let token = ctx.find_token_at_offset::<ast::String>()?;
137 if !token.is_raw() {
138 return None;
139 }
132 140
133 let text = token.text().as_str(); 141 let text = token.text().as_str();
134 if !text.starts_with("r#") && text.ends_with('#') { 142 if !text.starts_with("r#") && text.ends_with('#') {
diff --git a/crates/assists/src/handlers/remove_mut.rs b/crates/assists/src/handlers/remove_mut.rs
index 44f41daa9..575b271f7 100644
--- a/crates/assists/src/handlers/remove_mut.rs
+++ b/crates/assists/src/handlers/remove_mut.rs
@@ -18,7 +18,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
18// } 18// }
19// ``` 19// ```
20pub(crate) fn remove_mut(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { 20pub(crate) fn remove_mut(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
21 let mut_token = ctx.find_token_at_offset(T![mut])?; 21 let mut_token = ctx.find_token_syntax_at_offset(T![mut])?;
22 let delete_from = mut_token.text_range().start(); 22 let delete_from = mut_token.text_range().start();
23 let delete_to = match mut_token.next_token() { 23 let delete_to = match mut_token.next_token() {
24 Some(it) if it.kind() == SyntaxKind::WHITESPACE => it.text_range().end(), 24 Some(it) if it.kind() == SyntaxKind::WHITESPACE => it.text_range().end(),
diff --git a/crates/assists/src/handlers/replace_let_with_if_let.rs b/crates/assists/src/handlers/replace_let_with_if_let.rs
index a5bcbda24..69d3b08d3 100644
--- a/crates/assists/src/handlers/replace_let_with_if_let.rs
+++ b/crates/assists/src/handlers/replace_let_with_if_let.rs
@@ -37,7 +37,7 @@ use ide_db::ty_filter::TryEnum;
37// fn compute() -> Option<i32> { None } 37// fn compute() -> Option<i32> { None }
38// ``` 38// ```
39pub(crate) fn replace_let_with_if_let(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { 39pub(crate) fn replace_let_with_if_let(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
40 let let_kw = ctx.find_token_at_offset(T![let])?; 40 let let_kw = ctx.find_token_syntax_at_offset(T![let])?;
41 let let_stmt = let_kw.ancestors().find_map(ast::LetStmt::cast)?; 41 let let_stmt = let_kw.ancestors().find_map(ast::LetStmt::cast)?;
42 let init = let_stmt.initializer()?; 42 let init = let_stmt.initializer()?;
43 let original_pat = let_stmt.pat()?; 43 let original_pat = let_stmt.pat()?;
diff --git a/crates/assists/src/handlers/replace_string_with_char.rs b/crates/assists/src/handlers/replace_string_with_char.rs
index 4ca87a8ec..b4b898846 100644
--- a/crates/assists/src/handlers/replace_string_with_char.rs
+++ b/crates/assists/src/handlers/replace_string_with_char.rs
@@ -1,8 +1,4 @@
1use syntax::{ 1use syntax::{ast, AstToken, SyntaxKind::STRING};
2 ast::{self, HasStringValue},
3 AstToken,
4 SyntaxKind::STRING,
5};
6 2
7use crate::{AssistContext, AssistId, AssistKind, Assists}; 3use crate::{AssistContext, AssistId, AssistKind, Assists};
8 4
@@ -22,7 +18,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
22// } 18// }
23// ``` 19// ```
24pub(crate) fn replace_string_with_char(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { 20pub(crate) fn replace_string_with_char(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
25 let token = ctx.find_token_at_offset(STRING).and_then(ast::String::cast)?; 21 let token = ctx.find_token_syntax_at_offset(STRING).and_then(ast::String::cast)?;
26 let value = token.value()?; 22 let value = token.value()?;
27 let target = token.syntax().text_range(); 23 let target = token.syntax().text_range();
28 24
diff --git a/crates/assists/src/handlers/split_import.rs b/crates/assists/src/handlers/split_import.rs
index 15e67eaa1..ef1f6b8a1 100644
--- a/crates/assists/src/handlers/split_import.rs
+++ b/crates/assists/src/handlers/split_import.rs
@@ -16,7 +16,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
16// use std::{collections::HashMap}; 16// use std::{collections::HashMap};
17// ``` 17// ```
18pub(crate) fn split_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { 18pub(crate) fn split_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
19 let colon_colon = ctx.find_token_at_offset(T![::])?; 19 let colon_colon = ctx.find_token_syntax_at_offset(T![::])?;
20 let path = ast::Path::cast(colon_colon.parent())?.qualifier()?; 20 let path = ast::Path::cast(colon_colon.parent())?.qualifier()?;
21 let top_path = successors(Some(path.clone()), |it| it.parent_path()).last()?; 21 let top_path = successors(Some(path.clone()), |it| it.parent_path()).last()?;
22 22
diff --git a/crates/assists/src/handlers/unwrap_block.rs b/crates/assists/src/handlers/unwrap_block.rs
index 3851aeb3e..36ef871b9 100644
--- a/crates/assists/src/handlers/unwrap_block.rs
+++ b/crates/assists/src/handlers/unwrap_block.rs
@@ -29,7 +29,7 @@ pub(crate) fn unwrap_block(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
29 let assist_id = AssistId("unwrap_block", AssistKind::RefactorRewrite); 29 let assist_id = AssistId("unwrap_block", AssistKind::RefactorRewrite);
30 let assist_label = "Unwrap block"; 30 let assist_label = "Unwrap block";
31 31
32 let l_curly_token = ctx.find_token_at_offset(T!['{'])?; 32 let l_curly_token = ctx.find_token_syntax_at_offset(T!['{'])?;
33 let mut block = ast::BlockExpr::cast(l_curly_token.parent())?; 33 let mut block = ast::BlockExpr::cast(l_curly_token.parent())?;
34 let mut parent = block.syntax().parent()?; 34 let mut parent = block.syntax().parent()?;
35 if ast::MatchArm::can_cast(parent.kind()) { 35 if ast::MatchArm::can_cast(parent.kind()) {
diff --git a/crates/hir_expand/src/builtin_macro.rs b/crates/hir_expand/src/builtin_macro.rs
index 86918b626..aebbfc4df 100644
--- a/crates/hir_expand/src/builtin_macro.rs
+++ b/crates/hir_expand/src/builtin_macro.rs
@@ -8,7 +8,7 @@ use base_db::FileId;
8use either::Either; 8use either::Either;
9use mbe::parse_to_token_tree; 9use mbe::parse_to_token_tree;
10use parser::FragmentKind; 10use parser::FragmentKind;
11use syntax::ast::{self, AstToken, HasStringValue}; 11use syntax::ast::{self, AstToken};
12 12
13macro_rules! register_builtin { 13macro_rules! register_builtin {
14 ( LAZY: $(($name:ident, $kind: ident) => $expand:ident),* , EAGER: $(($e_name:ident, $e_kind: ident) => $e_expand:ident),* ) => { 14 ( LAZY: $(($name:ident, $kind: ident) => $expand:ident),* , EAGER: $(($e_name:ident, $e_kind: ident) => $e_expand:ident),* ) => {
diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs
index 3ee0af8ad..0971f7701 100644
--- a/crates/ide/src/extend_selection.rs
+++ b/crates/ide/src/extend_selection.rs
@@ -35,7 +35,7 @@ fn try_extend_selection(
35) -> Option<TextRange> { 35) -> Option<TextRange> {
36 let range = frange.range; 36 let range = frange.range;
37 37
38 let string_kinds = [COMMENT, STRING, RAW_STRING, BYTE_STRING, RAW_BYTE_STRING]; 38 let string_kinds = [COMMENT, STRING, BYTE_STRING];
39 let list_kinds = [ 39 let list_kinds = [
40 RECORD_PAT_FIELD_LIST, 40 RECORD_PAT_FIELD_LIST,
41 MATCH_ARM_LIST, 41 MATCH_ARM_LIST,
diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs
index efcc8ecfe..05bafe9c8 100644
--- a/crates/ide/src/syntax_highlighting.rs
+++ b/crates/ide/src/syntax_highlighting.rs
@@ -179,10 +179,12 @@ pub(crate) fn highlight(
179 element.clone() 179 element.clone()
180 }; 180 };
181 181
182 if let Some(token) = element.as_token().cloned().and_then(ast::RawString::cast) { 182 if let Some(token) = element.as_token().cloned().and_then(ast::String::cast) {
183 let expanded = element_to_highlight.as_token().unwrap().clone(); 183 if token.is_raw() {
184 if injection::highlight_injection(&mut stack, &sema, token, expanded).is_some() { 184 let expanded = element_to_highlight.as_token().unwrap().clone();
185 continue; 185 if injection::highlight_injection(&mut stack, &sema, token, expanded).is_some() {
186 continue;
187 }
186 } 188 }
187 } 189 }
188 190
@@ -214,10 +216,6 @@ pub(crate) fn highlight(
214 } 216 }
215 stack.pop_and_inject(None); 217 stack.pop_and_inject(None);
216 } 218 }
217 } else if let Some(string) =
218 element_to_highlight.as_token().cloned().and_then(ast::RawString::cast)
219 {
220 format_string_highlighter.highlight_format_string(&mut stack, &string, range);
221 } 219 }
222 } 220 }
223 } 221 }
@@ -532,7 +530,7 @@ fn highlight_element(
532 None => h.into(), 530 None => h.into(),
533 } 531 }
534 } 532 }
535 STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => HighlightTag::StringLiteral.into(), 533 STRING | BYTE_STRING => HighlightTag::StringLiteral.into(),
536 ATTR => HighlightTag::Attribute.into(), 534 ATTR => HighlightTag::Attribute.into(),
537 INT_NUMBER | FLOAT_NUMBER => HighlightTag::NumericLiteral.into(), 535 INT_NUMBER | FLOAT_NUMBER => HighlightTag::NumericLiteral.into(),
538 BYTE => HighlightTag::ByteLiteral.into(), 536 BYTE => HighlightTag::ByteLiteral.into(),
diff --git a/crates/ide/src/syntax_highlighting/format.rs b/crates/ide/src/syntax_highlighting/format.rs
index 71bde24f0..42f27df5d 100644
--- a/crates/ide/src/syntax_highlighting/format.rs
+++ b/crates/ide/src/syntax_highlighting/format.rs
@@ -29,9 +29,7 @@ impl FormatStringHighlighter {
29 .children_with_tokens() 29 .children_with_tokens()
30 .filter(|t| t.kind() != SyntaxKind::WHITESPACE) 30 .filter(|t| t.kind() != SyntaxKind::WHITESPACE)
31 .nth(1) 31 .nth(1)
32 .filter(|e| { 32 .filter(|e| ast::String::can_cast(e.kind()))
33 ast::String::can_cast(e.kind()) || ast::RawString::can_cast(e.kind())
34 })
35 } 33 }
36 _ => {} 34 _ => {}
37 } 35 }
diff --git a/crates/ide/src/syntax_highlighting/injection.rs b/crates/ide/src/syntax_highlighting/injection.rs
index 59a74bc02..e97d1be1a 100644
--- a/crates/ide/src/syntax_highlighting/injection.rs
+++ b/crates/ide/src/syntax_highlighting/injection.rs
@@ -2,7 +2,6 @@
2 2
3use std::{collections::BTreeMap, convert::TryFrom}; 3use std::{collections::BTreeMap, convert::TryFrom};
4 4
5use ast::{HasQuotes, HasStringValue};
6use hir::Semantics; 5use hir::Semantics;
7use ide_db::call_info::ActiveParameter; 6use ide_db::call_info::ActiveParameter;
8use itertools::Itertools; 7use itertools::Itertools;
@@ -15,7 +14,7 @@ use super::HighlightedRangeStack;
15pub(super) fn highlight_injection( 14pub(super) fn highlight_injection(
16 acc: &mut HighlightedRangeStack, 15 acc: &mut HighlightedRangeStack,
17 sema: &Semantics<RootDatabase>, 16 sema: &Semantics<RootDatabase>,
18 literal: ast::RawString, 17 literal: ast::String,
19 expanded: SyntaxToken, 18 expanded: SyntaxToken,
20) -> Option<()> { 19) -> Option<()> {
21 let active_parameter = ActiveParameter::at_token(&sema, expanded)?; 20 let active_parameter = ActiveParameter::at_token(&sema, expanded)?;
diff --git a/crates/ide/src/syntax_tree.rs b/crates/ide/src/syntax_tree.rs
index 7941610d6..6dd05c05d 100644
--- a/crates/ide/src/syntax_tree.rs
+++ b/crates/ide/src/syntax_tree.rs
@@ -1,9 +1,7 @@
1use ide_db::base_db::{FileId, SourceDatabase}; 1use ide_db::base_db::{FileId, SourceDatabase};
2use ide_db::RootDatabase; 2use ide_db::RootDatabase;
3use syntax::{ 3use syntax::{
4 algo, AstNode, NodeOrToken, SourceFile, 4 algo, AstNode, NodeOrToken, SourceFile, SyntaxKind::STRING, SyntaxToken, TextRange, TextSize,
5 SyntaxKind::{RAW_STRING, STRING},
6 SyntaxToken, TextRange, TextSize,
7}; 5};
8 6
9// Feature: Show Syntax Tree 7// Feature: Show Syntax Tree
@@ -46,7 +44,7 @@ fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option<
46 // we'll attempt parsing it as rust syntax 44 // we'll attempt parsing it as rust syntax
47 // to provide the syntax tree of the contents of the string 45 // to provide the syntax tree of the contents of the string
48 match token.kind() { 46 match token.kind() {
49 STRING | RAW_STRING => syntax_tree_for_token(token, text_range), 47 STRING => syntax_tree_for_token(token, text_range),
50 _ => None, 48 _ => None,
51 } 49 }
52} 50}
diff --git a/crates/parser/src/grammar.rs b/crates/parser/src/grammar.rs
index 4ab206a83..116b991a8 100644
--- a/crates/parser/src/grammar.rs
+++ b/crates/parser/src/grammar.rs
@@ -236,10 +236,7 @@ fn abi(p: &mut Parser) {
236 assert!(p.at(T![extern])); 236 assert!(p.at(T![extern]));
237 let abi = p.start(); 237 let abi = p.start();
238 p.bump(T![extern]); 238 p.bump(T![extern]);
239 match p.current() { 239 p.eat(STRING);
240 STRING | RAW_STRING => p.bump_any(),
241 _ => (),
242 }
243 abi.complete(p, ABI); 240 abi.complete(p, ABI);
244} 241}
245 242
diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs
index 66a92a4e1..31f42f161 100644
--- a/crates/parser/src/grammar/expressions/atom.rs
+++ b/crates/parser/src/grammar/expressions/atom.rs
@@ -15,18 +15,8 @@ use super::*;
15// let _ = b"e"; 15// let _ = b"e";
16// let _ = br"f"; 16// let _ = br"f";
17// } 17// }
18pub(crate) const LITERAL_FIRST: TokenSet = TokenSet::new(&[ 18pub(crate) const LITERAL_FIRST: TokenSet =
19 TRUE_KW, 19 TokenSet::new(&[TRUE_KW, FALSE_KW, INT_NUMBER, FLOAT_NUMBER, BYTE, CHAR, STRING, BYTE_STRING]);
20 FALSE_KW,
21 INT_NUMBER,
22 FLOAT_NUMBER,
23 BYTE,
24 CHAR,
25 STRING,
26 RAW_STRING,
27 BYTE_STRING,
28 RAW_BYTE_STRING,
29]);
30 20
31pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> { 21pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> {
32 if !p.at_ts(LITERAL_FIRST) { 22 if !p.at_ts(LITERAL_FIRST) {
diff --git a/crates/parser/src/grammar/items.rs b/crates/parser/src/grammar/items.rs
index 22810e6fb..780bc470a 100644
--- a/crates/parser/src/grammar/items.rs
+++ b/crates/parser/src/grammar/items.rs
@@ -239,9 +239,7 @@ fn items_without_modifiers(p: &mut Parser, m: Marker) -> Result<(), Marker> {
239 T![static] => consts::static_(p, m), 239 T![static] => consts::static_(p, m),
240 // test extern_block 240 // test extern_block
241 // extern {} 241 // extern {}
242 T![extern] 242 T![extern] if la == T!['{'] || (la == STRING && p.nth(2) == T!['{']) => {
243 if la == T!['{'] || ((la == STRING || la == RAW_STRING) && p.nth(2) == T!['{']) =>
244 {
245 abi(p); 243 abi(p);
246 extern_item_list(p); 244 extern_item_list(p);
247 m.complete(p, EXTERN_BLOCK); 245 m.complete(p, EXTERN_BLOCK);
diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs
index 935bd2c5e..8bc6688f3 100644
--- a/crates/parser/src/syntax_kind/generated.rs
+++ b/crates/parser/src/syntax_kind/generated.rs
@@ -111,9 +111,7 @@ pub enum SyntaxKind {
111 CHAR, 111 CHAR,
112 BYTE, 112 BYTE,
113 STRING, 113 STRING,
114 RAW_STRING,
115 BYTE_STRING, 114 BYTE_STRING,
116 RAW_BYTE_STRING,
117 ERROR, 115 ERROR,
118 IDENT, 116 IDENT,
119 WHITESPACE, 117 WHITESPACE,
@@ -277,8 +275,7 @@ impl SyntaxKind {
277 } 275 }
278 pub fn is_literal(self) -> bool { 276 pub fn is_literal(self) -> bool {
279 match self { 277 match self {
280 INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE | STRING | RAW_STRING | BYTE_STRING 278 INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE | STRING | BYTE_STRING => true,
281 | RAW_BYTE_STRING => true,
282 _ => false, 279 _ => false,
283 } 280 }
284 } 281 }
diff --git a/crates/syntax/src/ast/expr_ext.rs b/crates/syntax/src/ast/expr_ext.rs
index 3d33cd1cf..eb44bb2ab 100644
--- a/crates/syntax/src/ast/expr_ext.rs
+++ b/crates/syntax/src/ast/expr_ext.rs
@@ -320,6 +320,13 @@ impl ast::Literal {
320 ast::IntNumber::cast(self.token()) 320 ast::IntNumber::cast(self.token())
321 } 321 }
322 322
323 pub fn as_string(&self) -> Option<ast::String> {
324 ast::String::cast(self.token())
325 }
326 pub fn as_byte_string(&self) -> Option<ast::ByteString> {
327 ast::ByteString::cast(self.token())
328 }
329
323 fn find_suffix(text: &str, possible_suffixes: &[&str]) -> Option<SmolStr> { 330 fn find_suffix(text: &str, possible_suffixes: &[&str]) -> Option<SmolStr> {
324 possible_suffixes 331 possible_suffixes
325 .iter() 332 .iter()
@@ -351,10 +358,10 @@ impl ast::Literal {
351 suffix: Self::find_suffix(&text, &ast::FloatNumber::SUFFIXES), 358 suffix: Self::find_suffix(&text, &ast::FloatNumber::SUFFIXES),
352 } 359 }
353 } 360 }
354 STRING | RAW_STRING => LiteralKind::String, 361 STRING => LiteralKind::String,
355 T![true] => LiteralKind::Bool(true), 362 T![true] => LiteralKind::Bool(true),
356 T![false] => LiteralKind::Bool(false), 363 T![false] => LiteralKind::Bool(false),
357 BYTE_STRING | RAW_BYTE_STRING => LiteralKind::ByteString, 364 BYTE_STRING => LiteralKind::ByteString,
358 CHAR => LiteralKind::Char, 365 CHAR => LiteralKind::Char,
359 BYTE => LiteralKind::Byte, 366 BYTE => LiteralKind::Byte,
360 _ => unreachable!(), 367 _ => unreachable!(),
diff --git a/crates/syntax/src/ast/generated/tokens.rs b/crates/syntax/src/ast/generated/tokens.rs
index 1b8449221..728b72cd7 100644
--- a/crates/syntax/src/ast/generated/tokens.rs
+++ b/crates/syntax/src/ast/generated/tokens.rs
@@ -70,16 +70,16 @@ impl AstToken for String {
70} 70}
71 71
72#[derive(Debug, Clone, PartialEq, Eq, Hash)] 72#[derive(Debug, Clone, PartialEq, Eq, Hash)]
73pub struct RawString { 73pub struct ByteString {
74 pub(crate) syntax: SyntaxToken, 74 pub(crate) syntax: SyntaxToken,
75} 75}
76impl std::fmt::Display for RawString { 76impl std::fmt::Display for ByteString {
77 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 77 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
78 std::fmt::Display::fmt(&self.syntax, f) 78 std::fmt::Display::fmt(&self.syntax, f)
79 } 79 }
80} 80}
81impl AstToken for RawString { 81impl AstToken for ByteString {
82 fn can_cast(kind: SyntaxKind) -> bool { kind == RAW_STRING } 82 fn can_cast(kind: SyntaxKind) -> bool { kind == BYTE_STRING }
83 fn cast(syntax: SyntaxToken) -> Option<Self> { 83 fn cast(syntax: SyntaxToken) -> Option<Self> {
84 if Self::can_cast(syntax.kind()) { 84 if Self::can_cast(syntax.kind()) {
85 Some(Self { syntax }) 85 Some(Self { syntax })
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index c5cd1c504..ce35ac01a 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -7,7 +7,7 @@ use itertools::Itertools;
7use parser::SyntaxKind; 7use parser::SyntaxKind;
8 8
9use crate::{ 9use crate::{
10 ast::{self, support, token_ext::HasStringValue, AstNode, AstToken, NameOwner, SyntaxNode}, 10 ast::{self, support, AstNode, AstToken, NameOwner, SyntaxNode},
11 SmolStr, SyntaxElement, SyntaxToken, T, 11 SmolStr, SyntaxElement, SyntaxToken, T,
12}; 12};
13 13
@@ -55,13 +55,7 @@ impl ast::Attr {
55 let key = self.simple_name()?; 55 let key = self.simple_name()?;
56 let value_token = lit.syntax().first_token()?; 56 let value_token = lit.syntax().first_token()?;
57 57
58 let value: SmolStr = if let Some(s) = ast::String::cast(value_token.clone()) { 58 let value: SmolStr = ast::String::cast(value_token.clone())?.value()?.into();
59 s.value()?.into()
60 } else if let Some(s) = ast::RawString::cast(value_token) {
61 s.value()?.into()
62 } else {
63 return None;
64 };
65 59
66 Some((key, value)) 60 Some((key, value))
67 } 61 }
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs
index 8d3fad5a6..bf0035986 100644
--- a/crates/syntax/src/ast/token_ext.rs
+++ b/crates/syntax/src/ast/token_ext.rs
@@ -114,39 +114,24 @@ impl QuoteOffsets {
114 } 114 }
115} 115}
116 116
117pub trait HasQuotes: AstToken { 117impl ast::String {
118 fn quote_offsets(&self) -> Option<QuoteOffsets> { 118 pub fn is_raw(&self) -> bool {
119 let text = self.text().as_str(); 119 self.text().starts_with('r')
120 let offsets = QuoteOffsets::new(text)?;
121 let o = self.syntax().text_range().start();
122 let offsets = QuoteOffsets {
123 quotes: (offsets.quotes.0 + o, offsets.quotes.1 + o),
124 contents: offsets.contents + o,
125 };
126 Some(offsets)
127 }
128 fn open_quote_text_range(&self) -> Option<TextRange> {
129 self.quote_offsets().map(|it| it.quotes.0)
130 } 120 }
131 121 pub fn map_range_up(&self, range: TextRange) -> Option<TextRange> {
132 fn close_quote_text_range(&self) -> Option<TextRange> { 122 let contents_range = self.text_range_between_quotes()?;
133 self.quote_offsets().map(|it| it.quotes.1) 123 assert!(TextRange::up_to(contents_range.len()).contains_range(range));
134 } 124 Some(range + contents_range.start())
135
136 fn text_range_between_quotes(&self) -> Option<TextRange> {
137 self.quote_offsets().map(|it| it.contents)
138 } 125 }
139}
140
141impl HasQuotes for ast::String {}
142impl HasQuotes for ast::RawString {}
143 126
144pub trait HasStringValue: HasQuotes { 127 pub fn value(&self) -> Option<Cow<'_, str>> {
145 fn value(&self) -> Option<Cow<'_, str>>; 128 if self.is_raw() {
146} 129 let text = self.text().as_str();
130 let text =
131 &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
132 return Some(Cow::Borrowed(text));
133 }
147 134
148impl HasStringValue for ast::String {
149 fn value(&self) -> Option<Cow<'_, str>> {
150 let text = self.text().as_str(); 135 let text = self.text().as_str();
151 let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; 136 let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
152 137
@@ -164,22 +149,31 @@ impl HasStringValue for ast::String {
164 let res = if buf == text { Cow::Borrowed(text) } else { Cow::Owned(buf) }; 149 let res = if buf == text { Cow::Borrowed(text) } else { Cow::Owned(buf) };
165 Some(res) 150 Some(res)
166 } 151 }
167}
168 152
169// FIXME: merge `ast::RawString` and `ast::String`. 153 pub fn quote_offsets(&self) -> Option<QuoteOffsets> {
170impl HasStringValue for ast::RawString {
171 fn value(&self) -> Option<Cow<'_, str>> {
172 let text = self.text().as_str(); 154 let text = self.text().as_str();
173 let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; 155 let offsets = QuoteOffsets::new(text)?;
174 Some(Cow::Borrowed(text)) 156 let o = self.syntax().text_range().start();
157 let offsets = QuoteOffsets {
158 quotes: (offsets.quotes.0 + o, offsets.quotes.1 + o),
159 contents: offsets.contents + o,
160 };
161 Some(offsets)
162 }
163 pub fn text_range_between_quotes(&self) -> Option<TextRange> {
164 self.quote_offsets().map(|it| it.contents)
165 }
166 pub fn open_quote_text_range(&self) -> Option<TextRange> {
167 self.quote_offsets().map(|it| it.quotes.0)
168 }
169 pub fn close_quote_text_range(&self) -> Option<TextRange> {
170 self.quote_offsets().map(|it| it.quotes.1)
175 } 171 }
176} 172}
177 173
178impl ast::RawString { 174impl ast::ByteString {
179 pub fn map_range_up(&self, range: TextRange) -> Option<TextRange> { 175 pub fn is_raw(&self) -> bool {
180 let contents_range = self.text_range_between_quotes()?; 176 self.text().starts_with("br")
181 assert!(TextRange::up_to(contents_range.len()).contains_range(range));
182 Some(range + contents_range.start())
183 } 177 }
184} 178}
185 179
@@ -522,22 +516,6 @@ impl HasFormatSpecifier for ast::String {
522 } 516 }
523} 517}
524 518
525impl HasFormatSpecifier for ast::RawString {
526 fn char_ranges(
527 &self,
528 ) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>> {
529 let text = self.text().as_str();
530 let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
531 let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start();
532
533 let mut res = Vec::with_capacity(text.len());
534 for (idx, c) in text.char_indices() {
535 res.push((TextRange::at(idx.try_into().unwrap(), TextSize::of(c)) + offset, Ok(c)));
536 }
537 Some(res)
538 }
539}
540
541impl ast::IntNumber { 519impl ast::IntNumber {
542 #[rustfmt::skip] 520 #[rustfmt::skip]
543 pub(crate) const SUFFIXES: &'static [&'static str] = &[ 521 pub(crate) const SUFFIXES: &'static [&'static str] = &[
diff --git a/crates/syntax/src/parsing/lexer.rs b/crates/syntax/src/parsing/lexer.rs
index 5674ecb84..8afd7e53b 100644
--- a/crates/syntax/src/parsing/lexer.rs
+++ b/crates/syntax/src/parsing/lexer.rs
@@ -235,7 +235,7 @@ fn rustc_token_kind_to_syntax_kind(
235 RawStrError::TooManyDelimiters { .. } => "Too many `#` symbols: raw strings may be delimited by up to 65535 `#` symbols", 235 RawStrError::TooManyDelimiters { .. } => "Too many `#` symbols: raw strings may be delimited by up to 65535 `#` symbols",
236 }; 236 };
237 }; 237 };
238 RAW_STRING 238 STRING
239 } 239 }
240 rustc_lexer::LiteralKind::RawByteStr { err: raw_str_err, .. } => { 240 rustc_lexer::LiteralKind::RawByteStr { err: raw_str_err, .. } => {
241 if let Some(raw_str_err) = raw_str_err { 241 if let Some(raw_str_err) = raw_str_err {
@@ -250,7 +250,7 @@ fn rustc_token_kind_to_syntax_kind(
250 }; 250 };
251 }; 251 };
252 252
253 RAW_BYTE_STRING 253 BYTE_STRING
254 } 254 }
255 }; 255 };
256 256
diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs
index 4149f856a..190f5f67a 100644
--- a/crates/syntax/src/parsing/reparsing.rs
+++ b/crates/syntax/src/parsing/reparsing.rs
@@ -44,7 +44,7 @@ fn reparse_token<'node>(
44 let prev_token = algo::find_covering_element(root, edit.delete).as_token()?.clone(); 44 let prev_token = algo::find_covering_element(root, edit.delete).as_token()?.clone();
45 let prev_token_kind = prev_token.kind(); 45 let prev_token_kind = prev_token.kind();
46 match prev_token_kind { 46 match prev_token_kind {
47 WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => { 47 WHITESPACE | COMMENT | IDENT | STRING => {
48 if prev_token_kind == WHITESPACE || prev_token_kind == COMMENT { 48 if prev_token_kind == WHITESPACE || prev_token_kind == COMMENT {
49 // removing a new line may extends previous token 49 // removing a new line may extends previous token
50 let deleted_range = edit.delete - prev_token.text_range().start(); 50 let deleted_range = edit.delete - prev_token.text_range().start();
diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs
index 0f9a5e8ae..62a37c50a 100644
--- a/crates/syntax/src/validation.rs
+++ b/crates/syntax/src/validation.rs
@@ -4,7 +4,7 @@ mod block;
4 4
5use crate::{ 5use crate::{
6 algo, ast, match_ast, AstNode, SyntaxError, 6 algo, ast, match_ast, AstNode, SyntaxError,
7 SyntaxKind::{BYTE, BYTE_STRING, CHAR, CONST, FN, INT_NUMBER, STRING, TYPE_ALIAS}, 7 SyntaxKind::{BYTE, CHAR, CONST, FN, INT_NUMBER, TYPE_ALIAS},
8 SyntaxNode, SyntaxToken, TextSize, T, 8 SyntaxNode, SyntaxToken, TextSize, T,
9}; 9};
10use rowan::Direction; 10use rowan::Direction;
@@ -121,18 +121,19 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
121 acc.push(SyntaxError::new_at_offset(rustc_unescape_error_to_string(err), off)); 121 acc.push(SyntaxError::new_at_offset(rustc_unescape_error_to_string(err), off));
122 }; 122 };
123 123
124 match token.kind() { 124 if let Some(s) = literal.as_string() {
125 BYTE => { 125 if !s.is_raw() {
126 if let Some(Err(e)) = unquote(text, 2, '\'').map(unescape_byte) { 126 if let Some(without_quotes) = unquote(text, 1, '"') {
127 push_err(2, e); 127 unescape_literal(without_quotes, Mode::Str, &mut |range, char| {
128 } 128 if let Err(err) = char {
129 } 129 push_err(1, (range.start, err));
130 CHAR => { 130 }
131 if let Some(Err(e)) = unquote(text, 1, '\'').map(unescape_char) { 131 })
132 push_err(1, e);
133 } 132 }
134 } 133 }
135 BYTE_STRING => { 134 }
135 if let Some(s) = literal.as_byte_string() {
136 if !s.is_raw() {
136 if let Some(without_quotes) = unquote(text, 2, '"') { 137 if let Some(without_quotes) = unquote(text, 2, '"') {
137 unescape_byte_literal(without_quotes, Mode::ByteStr, &mut |range, char| { 138 unescape_byte_literal(without_quotes, Mode::ByteStr, &mut |range, char| {
138 if let Err(err) = char { 139 if let Err(err) = char {
@@ -141,13 +142,17 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
141 }) 142 })
142 } 143 }
143 } 144 }
144 STRING => { 145 }
145 if let Some(without_quotes) = unquote(text, 1, '"') { 146
146 unescape_literal(without_quotes, Mode::Str, &mut |range, char| { 147 match token.kind() {
147 if let Err(err) = char { 148 BYTE => {
148 push_err(1, (range.start, err)); 149 if let Some(Err(e)) = unquote(text, 2, '\'').map(unescape_byte) {
149 } 150 push_err(2, e);
150 }) 151 }
152 }
153 CHAR => {
154 if let Some(Err(e)) = unquote(text, 1, '\'').map(unescape_char) {
155 push_err(1, e);
151 } 156 }
152 } 157 }
153 _ => (), 158 _ => (),
diff --git a/crates/syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt b/crates/syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt
index 6fd59ccc0..54e707b73 100644
--- a/crates/syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt
+++ b/crates/syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt
@@ -1,2 +1,2 @@
1RAW_STRING 4 "r##\"" 1STRING 4 "r##\""
2> error0..4 token("r##\"") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) 2> error0..4 token("r##\"") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal)
diff --git a/crates/syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt b/crates/syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt
index 8d9ca0e8f..1f9889775 100644
--- a/crates/syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt
+++ b/crates/syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt
@@ -1,2 +1,2 @@
1RAW_STRING 8 "r##\"🦀" 1STRING 8 "r##\"🦀"
2> error0..8 token("r##\"🦀") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) 2> error0..8 token("r##\"🦀") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal)
diff --git a/crates/syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt b/crates/syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt
index a906380c7..93f6f72ae 100644
--- a/crates/syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt
+++ b/crates/syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt
@@ -1,2 +1,2 @@
1RAW_STRING 8 "r##\"\\x7f" 1STRING 8 "r##\"\\x7f"
2> error0..8 token("r##\"\\x7f") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) 2> error0..8 token("r##\"\\x7f") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal)
diff --git a/crates/syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt b/crates/syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt
index 5667c6149..1d2ebc60f 100644
--- a/crates/syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt
+++ b/crates/syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt
@@ -1,2 +1,2 @@
1RAW_STRING 12 "r##\"\\u{20AA}" 1STRING 12 "r##\"\\u{20AA}"
2> error0..12 token("r##\"\\u{20AA}") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) 2> error0..12 token("r##\"\\u{20AA}") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal)
diff --git a/crates/syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt b/crates/syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt
index 141c8268e..c567ab7e2 100644
--- a/crates/syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt
+++ b/crates/syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt
@@ -1,2 +1,2 @@
1RAW_STRING 5 "r##\" " 1STRING 5 "r##\" "
2> error0..5 token("r##\" ") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) 2> error0..5 token("r##\" ") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal)
diff --git a/crates/syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt b/crates/syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt
index f61d4cc91..343b20323 100644
--- a/crates/syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt
+++ b/crates/syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt
@@ -1,2 +1,2 @@
1RAW_STRING 5 "r##\"\\" 1STRING 5 "r##\"\\"
2> error0..5 token("r##\"\\") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) 2> error0..5 token("r##\"\\") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal)
diff --git a/crates/syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt b/crates/syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt
index 12e2c0fc0..041a42737 100644
--- a/crates/syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt
+++ b/crates/syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt
@@ -1,2 +1,2 @@
1RAW_STRING 6 "r##\"\\n" 1STRING 6 "r##\"\\n"
2> error0..6 token("r##\"\\n") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) 2> error0..6 token("r##\"\\n") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal)
diff --git a/crates/syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt b/crates/syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt
index fe12cb5fc..efaa1cafd 100644
--- a/crates/syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt
+++ b/crates/syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt
@@ -1,2 +1,2 @@
1RAW_BYTE_STRING 5 "br##\"" 1BYTE_STRING 5 "br##\""
2> error0..5 token("br##\"") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) 2> error0..5 token("br##\"") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal)
diff --git a/crates/syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt b/crates/syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt
index 5be2a7861..b6c938f94 100644
--- a/crates/syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt
+++ b/crates/syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt
@@ -1,2 +1,2 @@
1RAW_BYTE_STRING 9 "br##\"🦀" 1BYTE_STRING 9 "br##\"🦀"
2> error0..9 token("br##\"🦀") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) 2> error0..9 token("br##\"🦀") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal)
diff --git a/crates/syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt b/crates/syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt
index 6cbe08d07..f82efe49a 100644
--- a/crates/syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt
+++ b/crates/syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt
@@ -1,2 +1,2 @@
1RAW_BYTE_STRING 9 "br##\"\\x7f" 1BYTE_STRING 9 "br##\"\\x7f"
2> error0..9 token("br##\"\\x7f") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) 2> error0..9 token("br##\"\\x7f") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal)
diff --git a/crates/syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt b/crates/syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt
index f56a4f984..4e4a57696 100644
--- a/crates/syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt
+++ b/crates/syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt
@@ -1,2 +1,2 @@
1RAW_BYTE_STRING 13 "br##\"\\u{20AA}" 1BYTE_STRING 13 "br##\"\\u{20AA}"
2> error0..13 token("br##\"\\u{20AA}") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) 2> error0..13 token("br##\"\\u{20AA}") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal)
diff --git a/crates/syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt b/crates/syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt
index 3d32ce34e..0018c8623 100644
--- a/crates/syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt
+++ b/crates/syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt
@@ -1,2 +1,2 @@
1RAW_BYTE_STRING 6 "br##\" " 1BYTE_STRING 6 "br##\" "
2> error0..6 token("br##\" ") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) 2> error0..6 token("br##\" ") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal)
diff --git a/crates/syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt b/crates/syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt
index 320fea177..c3ba4ae82 100644
--- a/crates/syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt
+++ b/crates/syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt
@@ -1,2 +1,2 @@
1RAW_BYTE_STRING 6 "br##\"\\" 1BYTE_STRING 6 "br##\"\\"
2> error0..6 token("br##\"\\") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) 2> error0..6 token("br##\"\\") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal)
diff --git a/crates/syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt b/crates/syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt
index b3a56380c..7bda72276 100644
--- a/crates/syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt
+++ b/crates/syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt
@@ -1,2 +1,2 @@
1RAW_BYTE_STRING 7 "br##\"\\n" 1BYTE_STRING 7 "br##\"\\n"
2> error0..7 token("br##\"\\n") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) 2> error0..7 token("br##\"\\n") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal)
diff --git a/crates/syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt b/crates/syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt
index 5af1e2d97..ce92d2ff7 100644
--- a/crates/syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt
+++ b/crates/syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt
@@ -1,2 +1,2 @@
1RAW_STRING 3 "r##" 1STRING 3 "r##"
2> error0..3 token("r##") msg(Missing `"` symbol after `#` symbols to begin the raw string literal) 2> error0..3 token("r##") msg(Missing `"` symbol after `#` symbols to begin the raw string literal)
diff --git a/crates/syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt b/crates/syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt
index aec7afd92..a75d9030c 100644
--- a/crates/syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt
+++ b/crates/syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt
@@ -1,2 +1,2 @@
1RAW_BYTE_STRING 4 "br##" 1BYTE_STRING 4 "br##"
2> error0..4 token("br##") msg(Missing `"` symbol after `#` symbols to begin the raw byte string literal) 2> error0..4 token("br##") msg(Missing `"` symbol after `#` symbols to begin the raw byte string literal)
diff --git a/crates/syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt b/crates/syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt
index e22fe5374..516e0b78e 100644
--- a/crates/syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt
+++ b/crates/syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt
@@ -1,4 +1,4 @@
1RAW_STRING 4 "r## " 1STRING 4 "r## "
2IDENT 1 "I" 2IDENT 1 "I"
3WHITESPACE 1 " " 3WHITESPACE 1 " "
4IDENT 4 "lack" 4IDENT 4 "lack"
diff --git a/crates/syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt b/crates/syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt
index d74ea4c27..2f8a6f5f2 100644
--- a/crates/syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt
+++ b/crates/syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt
@@ -1,4 +1,4 @@
1RAW_BYTE_STRING 5 "br## " 1BYTE_STRING 5 "br## "
2IDENT 1 "I" 2IDENT 1 "I"
3WHITESPACE 1 " " 3WHITESPACE 1 " "
4IDENT 4 "lack" 4IDENT 4 "lack"
diff --git a/crates/syntax/test_data/lexer/ok/0008_byte_strings.txt b/crates/syntax/test_data/lexer/ok/0008_byte_strings.txt
index bc03b51a8..e61ad99be 100644
--- a/crates/syntax/test_data/lexer/ok/0008_byte_strings.txt
+++ b/crates/syntax/test_data/lexer/ok/0008_byte_strings.txt
@@ -4,13 +4,13 @@ BYTE 4 "b\'x\'"
4WHITESPACE 1 " " 4WHITESPACE 1 " "
5BYTE_STRING 6 "b\"foo\"" 5BYTE_STRING 6 "b\"foo\""
6WHITESPACE 1 " " 6WHITESPACE 1 " "
7RAW_BYTE_STRING 4 "br\"\"" 7BYTE_STRING 4 "br\"\""
8WHITESPACE 1 "\n" 8WHITESPACE 1 "\n"
9BYTE 6 "b\'\'suf" 9BYTE 6 "b\'\'suf"
10WHITESPACE 1 " " 10WHITESPACE 1 " "
11BYTE_STRING 5 "b\"\"ix" 11BYTE_STRING 5 "b\"\"ix"
12WHITESPACE 1 " " 12WHITESPACE 1 " "
13RAW_BYTE_STRING 6 "br\"\"br" 13BYTE_STRING 6 "br\"\"br"
14WHITESPACE 1 "\n" 14WHITESPACE 1 "\n"
15BYTE 5 "b\'\\n\'" 15BYTE 5 "b\'\\n\'"
16WHITESPACE 1 " " 16WHITESPACE 1 " "
diff --git a/crates/syntax/test_data/lexer/ok/0009_strings.txt b/crates/syntax/test_data/lexer/ok/0009_strings.txt
index 4cb4d711d..988a8877b 100644
--- a/crates/syntax/test_data/lexer/ok/0009_strings.txt
+++ b/crates/syntax/test_data/lexer/ok/0009_strings.txt
@@ -1,6 +1,6 @@
1STRING 7 "\"hello\"" 1STRING 7 "\"hello\""
2WHITESPACE 1 " " 2WHITESPACE 1 " "
3RAW_STRING 8 "r\"world\"" 3STRING 8 "r\"world\""
4WHITESPACE 1 " " 4WHITESPACE 1 " "
5STRING 17 "\"\\n\\\"\\\\no escape\"" 5STRING 17 "\"\\n\\\"\\\\no escape\""
6WHITESPACE 1 " " 6WHITESPACE 1 " "
diff --git a/crates/syntax/test_data/lexer/ok/0013_raw_strings.txt b/crates/syntax/test_data/lexer/ok/0013_raw_strings.txt
index 9cf0957d1..db0d5ffd1 100644
--- a/crates/syntax/test_data/lexer/ok/0013_raw_strings.txt
+++ b/crates/syntax/test_data/lexer/ok/0013_raw_strings.txt
@@ -1,2 +1,2 @@
1RAW_STRING 36 "r###\"this is a r##\"raw\"## string\"###" 1STRING 36 "r###\"this is a r##\"raw\"## string\"###"
2WHITESPACE 1 "\n" 2WHITESPACE 1 "\n"
diff --git a/crates/syntax/test_data/parser/inline/ok/0085_expr_literals.rast b/crates/syntax/test_data/parser/inline/ok/0085_expr_literals.rast
index 9a87b5b93..ae838105d 100644
--- a/crates/syntax/test_data/parser/inline/ok/0085_expr_literals.rast
+++ b/crates/syntax/test_data/parser/inline/ok/0085_expr_literals.rast
@@ -104,7 +104,7 @@ [email protected]
104 [email protected] "=" 104 [email protected] "="
105 [email protected] " " 105 [email protected] " "
106 [email protected] 106 [email protected]
107 RAW_[email protected] "r\"d\"" 107 [email protected] "r\"d\""
108 [email protected] ";" 108 [email protected] ";"
109 [email protected] "\n " 109 [email protected] "\n "
110 [email protected] 110 [email protected]
@@ -128,7 +128,7 @@ [email protected]
128 [email protected] "=" 128 [email protected] "="
129 [email protected] " " 129 [email protected] " "
130 [email protected] 130 [email protected]
131 RAW_[email protected] "br\"f\"" 131 [email protected] "br\"f\""
132 [email protected] ";" 132 [email protected] ";"
133 [email protected] "\n" 133 [email protected] "\n"
134 [email protected] "}" 134 [email protected] "}"