diff options
author | Aleksey Kladov <[email protected]> | 2020-11-06 21:21:56 +0000 |
---|---|---|
committer | Aleksey Kladov <[email protected]> | 2020-11-06 21:23:14 +0000 |
commit | 5ba4f949c23dcf53f34995c90b7c01e6c641b1f0 (patch) | |
tree | fe5064dde4e948a776c87d38fba972903acad3ec | |
parent | 6725dcf847300b9cddcbb061b159317113860f31 (diff) |
Kill RAW_ literals
Syntactically, they are indistinguishable from non-raw versions, so it
doesn't make sense to separate then *at the syntax* level.
53 files changed, 141 insertions, 164 deletions
diff --git a/crates/assists/src/assist_context.rs b/crates/assists/src/assist_context.rs index d11fee196..fcfe2d6ee 100644 --- a/crates/assists/src/assist_context.rs +++ b/crates/assists/src/assist_context.rs | |||
@@ -12,7 +12,7 @@ use ide_db::{ | |||
12 | }; | 12 | }; |
13 | use syntax::{ | 13 | use syntax::{ |
14 | algo::{self, find_node_at_offset, SyntaxRewriter}, | 14 | algo::{self, find_node_at_offset, SyntaxRewriter}, |
15 | AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxToken, TextRange, TextSize, | 15 | AstNode, AstToken, SourceFile, SyntaxElement, SyntaxKind, SyntaxToken, TextRange, TextSize, |
16 | TokenAtOffset, | 16 | TokenAtOffset, |
17 | }; | 17 | }; |
18 | use text_edit::{TextEdit, TextEditBuilder}; | 18 | use text_edit::{TextEdit, TextEditBuilder}; |
@@ -81,9 +81,12 @@ impl<'a> AssistContext<'a> { | |||
81 | pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> { | 81 | pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> { |
82 | self.source_file.syntax().token_at_offset(self.offset()) | 82 | self.source_file.syntax().token_at_offset(self.offset()) |
83 | } | 83 | } |
84 | pub(crate) fn find_token_at_offset(&self, kind: SyntaxKind) -> Option<SyntaxToken> { | 84 | pub(crate) fn find_token_syntax_at_offset(&self, kind: SyntaxKind) -> Option<SyntaxToken> { |
85 | self.token_at_offset().find(|it| it.kind() == kind) | 85 | self.token_at_offset().find(|it| it.kind() == kind) |
86 | } | 86 | } |
87 | pub(crate) fn find_token_at_offset<T: AstToken>(&self) -> Option<T> { | ||
88 | self.token_at_offset().find_map(T::cast) | ||
89 | } | ||
87 | pub(crate) fn find_node_at_offset<N: AstNode>(&self) -> Option<N> { | 90 | pub(crate) fn find_node_at_offset<N: AstNode>(&self) -> Option<N> { |
88 | find_node_at_offset(self.source_file.syntax(), self.offset()) | 91 | find_node_at_offset(self.source_file.syntax(), self.offset()) |
89 | } | 92 | } |
diff --git a/crates/assists/src/handlers/add_turbo_fish.rs b/crates/assists/src/handlers/add_turbo_fish.rs index e3d84d698..1f486c013 100644 --- a/crates/assists/src/handlers/add_turbo_fish.rs +++ b/crates/assists/src/handlers/add_turbo_fish.rs | |||
@@ -25,7 +25,7 @@ use crate::{ | |||
25 | // } | 25 | // } |
26 | // ``` | 26 | // ``` |
27 | pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 27 | pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
28 | let ident = ctx.find_token_at_offset(SyntaxKind::IDENT).or_else(|| { | 28 | let ident = ctx.find_token_syntax_at_offset(SyntaxKind::IDENT).or_else(|| { |
29 | let arg_list = ctx.find_node_at_offset::<ast::ArgList>()?; | 29 | let arg_list = ctx.find_node_at_offset::<ast::ArgList>()?; |
30 | if arg_list.args().count() > 0 { | 30 | if arg_list.args().count() > 0 { |
31 | return None; | 31 | return None; |
diff --git a/crates/assists/src/handlers/expand_glob_import.rs b/crates/assists/src/handlers/expand_glob_import.rs index 316a58d88..853266395 100644 --- a/crates/assists/src/handlers/expand_glob_import.rs +++ b/crates/assists/src/handlers/expand_glob_import.rs | |||
@@ -41,7 +41,7 @@ use crate::{ | |||
41 | // fn qux(bar: Bar, baz: Baz) {} | 41 | // fn qux(bar: Bar, baz: Baz) {} |
42 | // ``` | 42 | // ``` |
43 | pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 43 | pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
44 | let star = ctx.find_token_at_offset(T![*])?; | 44 | let star = ctx.find_token_syntax_at_offset(T![*])?; |
45 | let (parent, mod_path) = find_parent_and_path(&star)?; | 45 | let (parent, mod_path) = find_parent_and_path(&star)?; |
46 | let target_module = match ctx.sema.resolve_path(&mod_path)? { | 46 | let target_module = match ctx.sema.resolve_path(&mod_path)? { |
47 | PathResolution::Def(ModuleDef::Module(it)) => it, | 47 | PathResolution::Def(ModuleDef::Module(it)) => it, |
diff --git a/crates/assists/src/handlers/flip_comma.rs b/crates/assists/src/handlers/flip_comma.rs index 5c69db53e..64b4b1a76 100644 --- a/crates/assists/src/handlers/flip_comma.rs +++ b/crates/assists/src/handlers/flip_comma.rs | |||
@@ -18,7 +18,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; | |||
18 | // } | 18 | // } |
19 | // ``` | 19 | // ``` |
20 | pub(crate) fn flip_comma(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 20 | pub(crate) fn flip_comma(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
21 | let comma = ctx.find_token_at_offset(T![,])?; | 21 | let comma = ctx.find_token_syntax_at_offset(T![,])?; |
22 | let prev = non_trivia_sibling(comma.clone().into(), Direction::Prev)?; | 22 | let prev = non_trivia_sibling(comma.clone().into(), Direction::Prev)?; |
23 | let next = non_trivia_sibling(comma.clone().into(), Direction::Next)?; | 23 | let next = non_trivia_sibling(comma.clone().into(), Direction::Next)?; |
24 | 24 | ||
diff --git a/crates/assists/src/handlers/flip_trait_bound.rs b/crates/assists/src/handlers/flip_trait_bound.rs index 347e79b1d..92ee42181 100644 --- a/crates/assists/src/handlers/flip_trait_bound.rs +++ b/crates/assists/src/handlers/flip_trait_bound.rs | |||
@@ -20,7 +20,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; | |||
20 | pub(crate) fn flip_trait_bound(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 20 | pub(crate) fn flip_trait_bound(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
21 | // We want to replicate the behavior of `flip_binexpr` by only suggesting | 21 | // We want to replicate the behavior of `flip_binexpr` by only suggesting |
22 | // the assist when the cursor is on a `+` | 22 | // the assist when the cursor is on a `+` |
23 | let plus = ctx.find_token_at_offset(T![+])?; | 23 | let plus = ctx.find_token_syntax_at_offset(T![+])?; |
24 | 24 | ||
25 | // Make sure we're in a `TypeBoundList` | 25 | // Make sure we're in a `TypeBoundList` |
26 | if ast::TypeBoundList::cast(plus.parent()).is_none() { | 26 | if ast::TypeBoundList::cast(plus.parent()).is_none() { |
diff --git a/crates/assists/src/handlers/introduce_named_lifetime.rs b/crates/assists/src/handlers/introduce_named_lifetime.rs index 5f623e5f7..4cc8dae65 100644 --- a/crates/assists/src/handlers/introduce_named_lifetime.rs +++ b/crates/assists/src/handlers/introduce_named_lifetime.rs | |||
@@ -36,7 +36,7 @@ static ASSIST_LABEL: &str = "Introduce named lifetime"; | |||
36 | // FIXME: should also add support for the case fun(f: &Foo) -> &<|>Foo | 36 | // FIXME: should also add support for the case fun(f: &Foo) -> &<|>Foo |
37 | pub(crate) fn introduce_named_lifetime(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 37 | pub(crate) fn introduce_named_lifetime(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
38 | let lifetime_token = ctx | 38 | let lifetime_token = ctx |
39 | .find_token_at_offset(SyntaxKind::LIFETIME) | 39 | .find_token_syntax_at_offset(SyntaxKind::LIFETIME) |
40 | .filter(|lifetime| lifetime.text() == "'_")?; | 40 | .filter(|lifetime| lifetime.text() == "'_")?; |
41 | if let Some(fn_def) = lifetime_token.ancestors().find_map(ast::Fn::cast) { | 41 | if let Some(fn_def) = lifetime_token.ancestors().find_map(ast::Fn::cast) { |
42 | generate_fn_def_assist(acc, &fn_def, lifetime_token.text_range()) | 42 | generate_fn_def_assist(acc, &fn_def, lifetime_token.text_range()) |
diff --git a/crates/assists/src/handlers/invert_if.rs b/crates/assists/src/handlers/invert_if.rs index 461fcf862..ea722b91b 100644 --- a/crates/assists/src/handlers/invert_if.rs +++ b/crates/assists/src/handlers/invert_if.rs | |||
@@ -29,7 +29,7 @@ use crate::{ | |||
29 | // ``` | 29 | // ``` |
30 | 30 | ||
31 | pub(crate) fn invert_if(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 31 | pub(crate) fn invert_if(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
32 | let if_keyword = ctx.find_token_at_offset(T![if])?; | 32 | let if_keyword = ctx.find_token_syntax_at_offset(T![if])?; |
33 | let expr = ast::IfExpr::cast(if_keyword.parent())?; | 33 | let expr = ast::IfExpr::cast(if_keyword.parent())?; |
34 | let if_range = if_keyword.text_range(); | 34 | let if_range = if_keyword.text_range(); |
35 | let cursor_in_range = if_range.contains_range(ctx.frange.range); | 35 | let cursor_in_range = if_range.contains_range(ctx.frange.range); |
diff --git a/crates/assists/src/handlers/raw_string.rs b/crates/assists/src/handlers/raw_string.rs index 9ddd116e0..7f9f01c9c 100644 --- a/crates/assists/src/handlers/raw_string.rs +++ b/crates/assists/src/handlers/raw_string.rs | |||
@@ -2,9 +2,7 @@ use std::borrow::Cow; | |||
2 | 2 | ||
3 | use syntax::{ | 3 | use syntax::{ |
4 | ast::{self, HasQuotes, HasStringValue}, | 4 | ast::{self, HasQuotes, HasStringValue}, |
5 | AstToken, | 5 | AstToken, TextRange, TextSize, |
6 | SyntaxKind::{RAW_STRING, STRING}, | ||
7 | TextRange, TextSize, | ||
8 | }; | 6 | }; |
9 | use test_utils::mark; | 7 | use test_utils::mark; |
10 | 8 | ||
@@ -26,7 +24,10 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; | |||
26 | // } | 24 | // } |
27 | // ``` | 25 | // ``` |
28 | pub(crate) fn make_raw_string(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 26 | pub(crate) fn make_raw_string(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
29 | let token = ctx.find_token_at_offset(STRING).and_then(ast::String::cast)?; | 27 | let token = ctx.find_token_at_offset::<ast::String>()?; |
28 | if token.is_raw() { | ||
29 | return None; | ||
30 | } | ||
30 | let value = token.value()?; | 31 | let value = token.value()?; |
31 | let target = token.syntax().text_range(); | 32 | let target = token.syntax().text_range(); |
32 | acc.add( | 33 | acc.add( |
@@ -65,7 +66,10 @@ pub(crate) fn make_raw_string(acc: &mut Assists, ctx: &AssistContext) -> Option< | |||
65 | // } | 66 | // } |
66 | // ``` | 67 | // ``` |
67 | pub(crate) fn make_usual_string(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 68 | pub(crate) fn make_usual_string(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
68 | let token = ctx.find_token_at_offset(RAW_STRING).and_then(ast::RawString::cast)?; | 69 | let token = ctx.find_token_at_offset::<ast::String>()?; |
70 | if !token.is_raw() { | ||
71 | return None; | ||
72 | } | ||
69 | let value = token.value()?; | 73 | let value = token.value()?; |
70 | let target = token.syntax().text_range(); | 74 | let target = token.syntax().text_range(); |
71 | acc.add( | 75 | acc.add( |
@@ -104,11 +108,15 @@ pub(crate) fn make_usual_string(acc: &mut Assists, ctx: &AssistContext) -> Optio | |||
104 | // } | 108 | // } |
105 | // ``` | 109 | // ``` |
106 | pub(crate) fn add_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 110 | pub(crate) fn add_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
107 | let token = ctx.find_token_at_offset(RAW_STRING)?; | 111 | let token = ctx.find_token_at_offset::<ast::String>()?; |
108 | let target = token.text_range(); | 112 | if !token.is_raw() { |
113 | return None; | ||
114 | } | ||
115 | let text_range = token.syntax().text_range(); | ||
116 | let target = text_range; | ||
109 | acc.add(AssistId("add_hash", AssistKind::Refactor), "Add #", target, |edit| { | 117 | acc.add(AssistId("add_hash", AssistKind::Refactor), "Add #", target, |edit| { |
110 | edit.insert(token.text_range().start() + TextSize::of('r'), "#"); | 118 | edit.insert(text_range.start() + TextSize::of('r'), "#"); |
111 | edit.insert(token.text_range().end(), "#"); | 119 | edit.insert(text_range.end(), "#"); |
112 | }) | 120 | }) |
113 | } | 121 | } |
114 | 122 | ||
@@ -128,7 +136,10 @@ pub(crate) fn add_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | |||
128 | // } | 136 | // } |
129 | // ``` | 137 | // ``` |
130 | pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 138 | pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
131 | let token = ctx.find_token_at_offset(RAW_STRING).and_then(ast::RawString::cast)?; | 139 | let token = ctx.find_token_at_offset::<ast::String>()?; |
140 | if !token.is_raw() { | ||
141 | return None; | ||
142 | } | ||
132 | 143 | ||
133 | let text = token.text().as_str(); | 144 | let text = token.text().as_str(); |
134 | if !text.starts_with("r#") && text.ends_with('#') { | 145 | if !text.starts_with("r#") && text.ends_with('#') { |
diff --git a/crates/assists/src/handlers/remove_mut.rs b/crates/assists/src/handlers/remove_mut.rs index 44f41daa9..575b271f7 100644 --- a/crates/assists/src/handlers/remove_mut.rs +++ b/crates/assists/src/handlers/remove_mut.rs | |||
@@ -18,7 +18,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; | |||
18 | // } | 18 | // } |
19 | // ``` | 19 | // ``` |
20 | pub(crate) fn remove_mut(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 20 | pub(crate) fn remove_mut(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
21 | let mut_token = ctx.find_token_at_offset(T![mut])?; | 21 | let mut_token = ctx.find_token_syntax_at_offset(T![mut])?; |
22 | let delete_from = mut_token.text_range().start(); | 22 | let delete_from = mut_token.text_range().start(); |
23 | let delete_to = match mut_token.next_token() { | 23 | let delete_to = match mut_token.next_token() { |
24 | Some(it) if it.kind() == SyntaxKind::WHITESPACE => it.text_range().end(), | 24 | Some(it) if it.kind() == SyntaxKind::WHITESPACE => it.text_range().end(), |
diff --git a/crates/assists/src/handlers/replace_let_with_if_let.rs b/crates/assists/src/handlers/replace_let_with_if_let.rs index a5bcbda24..69d3b08d3 100644 --- a/crates/assists/src/handlers/replace_let_with_if_let.rs +++ b/crates/assists/src/handlers/replace_let_with_if_let.rs | |||
@@ -37,7 +37,7 @@ use ide_db::ty_filter::TryEnum; | |||
37 | // fn compute() -> Option<i32> { None } | 37 | // fn compute() -> Option<i32> { None } |
38 | // ``` | 38 | // ``` |
39 | pub(crate) fn replace_let_with_if_let(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 39 | pub(crate) fn replace_let_with_if_let(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
40 | let let_kw = ctx.find_token_at_offset(T![let])?; | 40 | let let_kw = ctx.find_token_syntax_at_offset(T![let])?; |
41 | let let_stmt = let_kw.ancestors().find_map(ast::LetStmt::cast)?; | 41 | let let_stmt = let_kw.ancestors().find_map(ast::LetStmt::cast)?; |
42 | let init = let_stmt.initializer()?; | 42 | let init = let_stmt.initializer()?; |
43 | let original_pat = let_stmt.pat()?; | 43 | let original_pat = let_stmt.pat()?; |
diff --git a/crates/assists/src/handlers/replace_string_with_char.rs b/crates/assists/src/handlers/replace_string_with_char.rs index 4ca87a8ec..6d227e883 100644 --- a/crates/assists/src/handlers/replace_string_with_char.rs +++ b/crates/assists/src/handlers/replace_string_with_char.rs | |||
@@ -22,7 +22,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; | |||
22 | // } | 22 | // } |
23 | // ``` | 23 | // ``` |
24 | pub(crate) fn replace_string_with_char(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 24 | pub(crate) fn replace_string_with_char(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
25 | let token = ctx.find_token_at_offset(STRING).and_then(ast::String::cast)?; | 25 | let token = ctx.find_token_syntax_at_offset(STRING).and_then(ast::String::cast)?; |
26 | let value = token.value()?; | 26 | let value = token.value()?; |
27 | let target = token.syntax().text_range(); | 27 | let target = token.syntax().text_range(); |
28 | 28 | ||
diff --git a/crates/assists/src/handlers/split_import.rs b/crates/assists/src/handlers/split_import.rs index 15e67eaa1..ef1f6b8a1 100644 --- a/crates/assists/src/handlers/split_import.rs +++ b/crates/assists/src/handlers/split_import.rs | |||
@@ -16,7 +16,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; | |||
16 | // use std::{collections::HashMap}; | 16 | // use std::{collections::HashMap}; |
17 | // ``` | 17 | // ``` |
18 | pub(crate) fn split_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 18 | pub(crate) fn split_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
19 | let colon_colon = ctx.find_token_at_offset(T![::])?; | 19 | let colon_colon = ctx.find_token_syntax_at_offset(T![::])?; |
20 | let path = ast::Path::cast(colon_colon.parent())?.qualifier()?; | 20 | let path = ast::Path::cast(colon_colon.parent())?.qualifier()?; |
21 | let top_path = successors(Some(path.clone()), |it| it.parent_path()).last()?; | 21 | let top_path = successors(Some(path.clone()), |it| it.parent_path()).last()?; |
22 | 22 | ||
diff --git a/crates/assists/src/handlers/unwrap_block.rs b/crates/assists/src/handlers/unwrap_block.rs index 3851aeb3e..36ef871b9 100644 --- a/crates/assists/src/handlers/unwrap_block.rs +++ b/crates/assists/src/handlers/unwrap_block.rs | |||
@@ -29,7 +29,7 @@ pub(crate) fn unwrap_block(acc: &mut Assists, ctx: &AssistContext) -> Option<()> | |||
29 | let assist_id = AssistId("unwrap_block", AssistKind::RefactorRewrite); | 29 | let assist_id = AssistId("unwrap_block", AssistKind::RefactorRewrite); |
30 | let assist_label = "Unwrap block"; | 30 | let assist_label = "Unwrap block"; |
31 | 31 | ||
32 | let l_curly_token = ctx.find_token_at_offset(T!['{'])?; | 32 | let l_curly_token = ctx.find_token_syntax_at_offset(T!['{'])?; |
33 | let mut block = ast::BlockExpr::cast(l_curly_token.parent())?; | 33 | let mut block = ast::BlockExpr::cast(l_curly_token.parent())?; |
34 | let mut parent = block.syntax().parent()?; | 34 | let mut parent = block.syntax().parent()?; |
35 | if ast::MatchArm::can_cast(parent.kind()) { | 35 | if ast::MatchArm::can_cast(parent.kind()) { |
diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs index 3ee0af8ad..0971f7701 100644 --- a/crates/ide/src/extend_selection.rs +++ b/crates/ide/src/extend_selection.rs | |||
@@ -35,7 +35,7 @@ fn try_extend_selection( | |||
35 | ) -> Option<TextRange> { | 35 | ) -> Option<TextRange> { |
36 | let range = frange.range; | 36 | let range = frange.range; |
37 | 37 | ||
38 | let string_kinds = [COMMENT, STRING, RAW_STRING, BYTE_STRING, RAW_BYTE_STRING]; | 38 | let string_kinds = [COMMENT, STRING, BYTE_STRING]; |
39 | let list_kinds = [ | 39 | let list_kinds = [ |
40 | RECORD_PAT_FIELD_LIST, | 40 | RECORD_PAT_FIELD_LIST, |
41 | MATCH_ARM_LIST, | 41 | MATCH_ARM_LIST, |
diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index efcc8ecfe..05bafe9c8 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs | |||
@@ -179,10 +179,12 @@ pub(crate) fn highlight( | |||
179 | element.clone() | 179 | element.clone() |
180 | }; | 180 | }; |
181 | 181 | ||
182 | if let Some(token) = element.as_token().cloned().and_then(ast::RawString::cast) { | 182 | if let Some(token) = element.as_token().cloned().and_then(ast::String::cast) { |
183 | let expanded = element_to_highlight.as_token().unwrap().clone(); | 183 | if token.is_raw() { |
184 | if injection::highlight_injection(&mut stack, &sema, token, expanded).is_some() { | 184 | let expanded = element_to_highlight.as_token().unwrap().clone(); |
185 | continue; | 185 | if injection::highlight_injection(&mut stack, &sema, token, expanded).is_some() { |
186 | continue; | ||
187 | } | ||
186 | } | 188 | } |
187 | } | 189 | } |
188 | 190 | ||
@@ -214,10 +216,6 @@ pub(crate) fn highlight( | |||
214 | } | 216 | } |
215 | stack.pop_and_inject(None); | 217 | stack.pop_and_inject(None); |
216 | } | 218 | } |
217 | } else if let Some(string) = | ||
218 | element_to_highlight.as_token().cloned().and_then(ast::RawString::cast) | ||
219 | { | ||
220 | format_string_highlighter.highlight_format_string(&mut stack, &string, range); | ||
221 | } | 219 | } |
222 | } | 220 | } |
223 | } | 221 | } |
@@ -532,7 +530,7 @@ fn highlight_element( | |||
532 | None => h.into(), | 530 | None => h.into(), |
533 | } | 531 | } |
534 | } | 532 | } |
535 | STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => HighlightTag::StringLiteral.into(), | 533 | STRING | BYTE_STRING => HighlightTag::StringLiteral.into(), |
536 | ATTR => HighlightTag::Attribute.into(), | 534 | ATTR => HighlightTag::Attribute.into(), |
537 | INT_NUMBER | FLOAT_NUMBER => HighlightTag::NumericLiteral.into(), | 535 | INT_NUMBER | FLOAT_NUMBER => HighlightTag::NumericLiteral.into(), |
538 | BYTE => HighlightTag::ByteLiteral.into(), | 536 | BYTE => HighlightTag::ByteLiteral.into(), |
diff --git a/crates/ide/src/syntax_highlighting/format.rs b/crates/ide/src/syntax_highlighting/format.rs index 71bde24f0..42f27df5d 100644 --- a/crates/ide/src/syntax_highlighting/format.rs +++ b/crates/ide/src/syntax_highlighting/format.rs | |||
@@ -29,9 +29,7 @@ impl FormatStringHighlighter { | |||
29 | .children_with_tokens() | 29 | .children_with_tokens() |
30 | .filter(|t| t.kind() != SyntaxKind::WHITESPACE) | 30 | .filter(|t| t.kind() != SyntaxKind::WHITESPACE) |
31 | .nth(1) | 31 | .nth(1) |
32 | .filter(|e| { | 32 | .filter(|e| ast::String::can_cast(e.kind())) |
33 | ast::String::can_cast(e.kind()) || ast::RawString::can_cast(e.kind()) | ||
34 | }) | ||
35 | } | 33 | } |
36 | _ => {} | 34 | _ => {} |
37 | } | 35 | } |
diff --git a/crates/ide/src/syntax_highlighting/injection.rs b/crates/ide/src/syntax_highlighting/injection.rs index 59a74bc02..79f6b5359 100644 --- a/crates/ide/src/syntax_highlighting/injection.rs +++ b/crates/ide/src/syntax_highlighting/injection.rs | |||
@@ -15,7 +15,7 @@ use super::HighlightedRangeStack; | |||
15 | pub(super) fn highlight_injection( | 15 | pub(super) fn highlight_injection( |
16 | acc: &mut HighlightedRangeStack, | 16 | acc: &mut HighlightedRangeStack, |
17 | sema: &Semantics<RootDatabase>, | 17 | sema: &Semantics<RootDatabase>, |
18 | literal: ast::RawString, | 18 | literal: ast::String, |
19 | expanded: SyntaxToken, | 19 | expanded: SyntaxToken, |
20 | ) -> Option<()> { | 20 | ) -> Option<()> { |
21 | let active_parameter = ActiveParameter::at_token(&sema, expanded)?; | 21 | let active_parameter = ActiveParameter::at_token(&sema, expanded)?; |
diff --git a/crates/ide/src/syntax_tree.rs b/crates/ide/src/syntax_tree.rs index 7941610d6..6dd05c05d 100644 --- a/crates/ide/src/syntax_tree.rs +++ b/crates/ide/src/syntax_tree.rs | |||
@@ -1,9 +1,7 @@ | |||
1 | use ide_db::base_db::{FileId, SourceDatabase}; | 1 | use ide_db::base_db::{FileId, SourceDatabase}; |
2 | use ide_db::RootDatabase; | 2 | use ide_db::RootDatabase; |
3 | use syntax::{ | 3 | use syntax::{ |
4 | algo, AstNode, NodeOrToken, SourceFile, | 4 | algo, AstNode, NodeOrToken, SourceFile, SyntaxKind::STRING, SyntaxToken, TextRange, TextSize, |
5 | SyntaxKind::{RAW_STRING, STRING}, | ||
6 | SyntaxToken, TextRange, TextSize, | ||
7 | }; | 5 | }; |
8 | 6 | ||
9 | // Feature: Show Syntax Tree | 7 | // Feature: Show Syntax Tree |
@@ -46,7 +44,7 @@ fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option< | |||
46 | // we'll attempt parsing it as rust syntax | 44 | // we'll attempt parsing it as rust syntax |
47 | // to provide the syntax tree of the contents of the string | 45 | // to provide the syntax tree of the contents of the string |
48 | match token.kind() { | 46 | match token.kind() { |
49 | STRING | RAW_STRING => syntax_tree_for_token(token, text_range), | 47 | STRING => syntax_tree_for_token(token, text_range), |
50 | _ => None, | 48 | _ => None, |
51 | } | 49 | } |
52 | } | 50 | } |
diff --git a/crates/parser/src/grammar.rs b/crates/parser/src/grammar.rs index 4ab206a83..116b991a8 100644 --- a/crates/parser/src/grammar.rs +++ b/crates/parser/src/grammar.rs | |||
@@ -236,10 +236,7 @@ fn abi(p: &mut Parser) { | |||
236 | assert!(p.at(T![extern])); | 236 | assert!(p.at(T![extern])); |
237 | let abi = p.start(); | 237 | let abi = p.start(); |
238 | p.bump(T![extern]); | 238 | p.bump(T![extern]); |
239 | match p.current() { | 239 | p.eat(STRING); |
240 | STRING | RAW_STRING => p.bump_any(), | ||
241 | _ => (), | ||
242 | } | ||
243 | abi.complete(p, ABI); | 240 | abi.complete(p, ABI); |
244 | } | 241 | } |
245 | 242 | ||
diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs index 66a92a4e1..31f42f161 100644 --- a/crates/parser/src/grammar/expressions/atom.rs +++ b/crates/parser/src/grammar/expressions/atom.rs | |||
@@ -15,18 +15,8 @@ use super::*; | |||
15 | // let _ = b"e"; | 15 | // let _ = b"e"; |
16 | // let _ = br"f"; | 16 | // let _ = br"f"; |
17 | // } | 17 | // } |
18 | pub(crate) const LITERAL_FIRST: TokenSet = TokenSet::new(&[ | 18 | pub(crate) const LITERAL_FIRST: TokenSet = |
19 | TRUE_KW, | 19 | TokenSet::new(&[TRUE_KW, FALSE_KW, INT_NUMBER, FLOAT_NUMBER, BYTE, CHAR, STRING, BYTE_STRING]); |
20 | FALSE_KW, | ||
21 | INT_NUMBER, | ||
22 | FLOAT_NUMBER, | ||
23 | BYTE, | ||
24 | CHAR, | ||
25 | STRING, | ||
26 | RAW_STRING, | ||
27 | BYTE_STRING, | ||
28 | RAW_BYTE_STRING, | ||
29 | ]); | ||
30 | 20 | ||
31 | pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> { | 21 | pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> { |
32 | if !p.at_ts(LITERAL_FIRST) { | 22 | if !p.at_ts(LITERAL_FIRST) { |
diff --git a/crates/parser/src/grammar/items.rs b/crates/parser/src/grammar/items.rs index 22810e6fb..780bc470a 100644 --- a/crates/parser/src/grammar/items.rs +++ b/crates/parser/src/grammar/items.rs | |||
@@ -239,9 +239,7 @@ fn items_without_modifiers(p: &mut Parser, m: Marker) -> Result<(), Marker> { | |||
239 | T![static] => consts::static_(p, m), | 239 | T![static] => consts::static_(p, m), |
240 | // test extern_block | 240 | // test extern_block |
241 | // extern {} | 241 | // extern {} |
242 | T![extern] | 242 | T![extern] if la == T!['{'] || (la == STRING && p.nth(2) == T!['{']) => { |
243 | if la == T!['{'] || ((la == STRING || la == RAW_STRING) && p.nth(2) == T!['{']) => | ||
244 | { | ||
245 | abi(p); | 243 | abi(p); |
246 | extern_item_list(p); | 244 | extern_item_list(p); |
247 | m.complete(p, EXTERN_BLOCK); | 245 | m.complete(p, EXTERN_BLOCK); |
diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs index 935bd2c5e..8bc6688f3 100644 --- a/crates/parser/src/syntax_kind/generated.rs +++ b/crates/parser/src/syntax_kind/generated.rs | |||
@@ -111,9 +111,7 @@ pub enum SyntaxKind { | |||
111 | CHAR, | 111 | CHAR, |
112 | BYTE, | 112 | BYTE, |
113 | STRING, | 113 | STRING, |
114 | RAW_STRING, | ||
115 | BYTE_STRING, | 114 | BYTE_STRING, |
116 | RAW_BYTE_STRING, | ||
117 | ERROR, | 115 | ERROR, |
118 | IDENT, | 116 | IDENT, |
119 | WHITESPACE, | 117 | WHITESPACE, |
@@ -277,8 +275,7 @@ impl SyntaxKind { | |||
277 | } | 275 | } |
278 | pub fn is_literal(self) -> bool { | 276 | pub fn is_literal(self) -> bool { |
279 | match self { | 277 | match self { |
280 | INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE | STRING | RAW_STRING | BYTE_STRING | 278 | INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE | STRING | BYTE_STRING => true, |
281 | | RAW_BYTE_STRING => true, | ||
282 | _ => false, | 279 | _ => false, |
283 | } | 280 | } |
284 | } | 281 | } |
diff --git a/crates/syntax/src/ast/expr_ext.rs b/crates/syntax/src/ast/expr_ext.rs index 3d33cd1cf..eb44bb2ab 100644 --- a/crates/syntax/src/ast/expr_ext.rs +++ b/crates/syntax/src/ast/expr_ext.rs | |||
@@ -320,6 +320,13 @@ impl ast::Literal { | |||
320 | ast::IntNumber::cast(self.token()) | 320 | ast::IntNumber::cast(self.token()) |
321 | } | 321 | } |
322 | 322 | ||
323 | pub fn as_string(&self) -> Option<ast::String> { | ||
324 | ast::String::cast(self.token()) | ||
325 | } | ||
326 | pub fn as_byte_string(&self) -> Option<ast::ByteString> { | ||
327 | ast::ByteString::cast(self.token()) | ||
328 | } | ||
329 | |||
323 | fn find_suffix(text: &str, possible_suffixes: &[&str]) -> Option<SmolStr> { | 330 | fn find_suffix(text: &str, possible_suffixes: &[&str]) -> Option<SmolStr> { |
324 | possible_suffixes | 331 | possible_suffixes |
325 | .iter() | 332 | .iter() |
@@ -351,10 +358,10 @@ impl ast::Literal { | |||
351 | suffix: Self::find_suffix(&text, &ast::FloatNumber::SUFFIXES), | 358 | suffix: Self::find_suffix(&text, &ast::FloatNumber::SUFFIXES), |
352 | } | 359 | } |
353 | } | 360 | } |
354 | STRING | RAW_STRING => LiteralKind::String, | 361 | STRING => LiteralKind::String, |
355 | T![true] => LiteralKind::Bool(true), | 362 | T![true] => LiteralKind::Bool(true), |
356 | T![false] => LiteralKind::Bool(false), | 363 | T![false] => LiteralKind::Bool(false), |
357 | BYTE_STRING | RAW_BYTE_STRING => LiteralKind::ByteString, | 364 | BYTE_STRING => LiteralKind::ByteString, |
358 | CHAR => LiteralKind::Char, | 365 | CHAR => LiteralKind::Char, |
359 | BYTE => LiteralKind::Byte, | 366 | BYTE => LiteralKind::Byte, |
360 | _ => unreachable!(), | 367 | _ => unreachable!(), |
diff --git a/crates/syntax/src/ast/generated/tokens.rs b/crates/syntax/src/ast/generated/tokens.rs index 1b8449221..728b72cd7 100644 --- a/crates/syntax/src/ast/generated/tokens.rs +++ b/crates/syntax/src/ast/generated/tokens.rs | |||
@@ -70,16 +70,16 @@ impl AstToken for String { | |||
70 | } | 70 | } |
71 | 71 | ||
72 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | 72 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
73 | pub struct RawString { | 73 | pub struct ByteString { |
74 | pub(crate) syntax: SyntaxToken, | 74 | pub(crate) syntax: SyntaxToken, |
75 | } | 75 | } |
76 | impl std::fmt::Display for RawString { | 76 | impl std::fmt::Display for ByteString { |
77 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { | 77 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { |
78 | std::fmt::Display::fmt(&self.syntax, f) | 78 | std::fmt::Display::fmt(&self.syntax, f) |
79 | } | 79 | } |
80 | } | 80 | } |
81 | impl AstToken for RawString { | 81 | impl AstToken for ByteString { |
82 | fn can_cast(kind: SyntaxKind) -> bool { kind == RAW_STRING } | 82 | fn can_cast(kind: SyntaxKind) -> bool { kind == BYTE_STRING } |
83 | fn cast(syntax: SyntaxToken) -> Option<Self> { | 83 | fn cast(syntax: SyntaxToken) -> Option<Self> { |
84 | if Self::can_cast(syntax.kind()) { | 84 | if Self::can_cast(syntax.kind()) { |
85 | Some(Self { syntax }) | 85 | Some(Self { syntax }) |
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs index c5cd1c504..5579f72b9 100644 --- a/crates/syntax/src/ast/node_ext.rs +++ b/crates/syntax/src/ast/node_ext.rs | |||
@@ -55,13 +55,7 @@ impl ast::Attr { | |||
55 | let key = self.simple_name()?; | 55 | let key = self.simple_name()?; |
56 | let value_token = lit.syntax().first_token()?; | 56 | let value_token = lit.syntax().first_token()?; |
57 | 57 | ||
58 | let value: SmolStr = if let Some(s) = ast::String::cast(value_token.clone()) { | 58 | let value: SmolStr = ast::String::cast(value_token.clone())?.value()?.into(); |
59 | s.value()?.into() | ||
60 | } else if let Some(s) = ast::RawString::cast(value_token) { | ||
61 | s.value()?.into() | ||
62 | } else { | ||
63 | return None; | ||
64 | }; | ||
65 | 59 | ||
66 | Some((key, value)) | 60 | Some((key, value)) |
67 | } | 61 | } |
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs index 8d3fad5a6..6cd20b6a6 100644 --- a/crates/syntax/src/ast/token_ext.rs +++ b/crates/syntax/src/ast/token_ext.rs | |||
@@ -139,14 +139,31 @@ pub trait HasQuotes: AstToken { | |||
139 | } | 139 | } |
140 | 140 | ||
141 | impl HasQuotes for ast::String {} | 141 | impl HasQuotes for ast::String {} |
142 | impl HasQuotes for ast::RawString {} | ||
143 | 142 | ||
144 | pub trait HasStringValue: HasQuotes { | 143 | pub trait HasStringValue: HasQuotes { |
145 | fn value(&self) -> Option<Cow<'_, str>>; | 144 | fn value(&self) -> Option<Cow<'_, str>>; |
146 | } | 145 | } |
147 | 146 | ||
147 | impl ast::String { | ||
148 | pub fn is_raw(&self) -> bool { | ||
149 | self.text().starts_with('r') | ||
150 | } | ||
151 | pub fn map_range_up(&self, range: TextRange) -> Option<TextRange> { | ||
152 | let contents_range = self.text_range_between_quotes()?; | ||
153 | assert!(TextRange::up_to(contents_range.len()).contains_range(range)); | ||
154 | Some(range + contents_range.start()) | ||
155 | } | ||
156 | } | ||
157 | |||
148 | impl HasStringValue for ast::String { | 158 | impl HasStringValue for ast::String { |
149 | fn value(&self) -> Option<Cow<'_, str>> { | 159 | fn value(&self) -> Option<Cow<'_, str>> { |
160 | if self.is_raw() { | ||
161 | let text = self.text().as_str(); | ||
162 | let text = | ||
163 | &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; | ||
164 | return Some(Cow::Borrowed(text)); | ||
165 | } | ||
166 | |||
150 | let text = self.text().as_str(); | 167 | let text = self.text().as_str(); |
151 | let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; | 168 | let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; |
152 | 169 | ||
@@ -166,20 +183,9 @@ impl HasStringValue for ast::String { | |||
166 | } | 183 | } |
167 | } | 184 | } |
168 | 185 | ||
169 | // FIXME: merge `ast::RawString` and `ast::String`. | 186 | impl ast::ByteString { |
170 | impl HasStringValue for ast::RawString { | 187 | pub fn is_raw(&self) -> bool { |
171 | fn value(&self) -> Option<Cow<'_, str>> { | 188 | self.text().starts_with("br") |
172 | let text = self.text().as_str(); | ||
173 | let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; | ||
174 | Some(Cow::Borrowed(text)) | ||
175 | } | ||
176 | } | ||
177 | |||
178 | impl ast::RawString { | ||
179 | pub fn map_range_up(&self, range: TextRange) -> Option<TextRange> { | ||
180 | let contents_range = self.text_range_between_quotes()?; | ||
181 | assert!(TextRange::up_to(contents_range.len()).contains_range(range)); | ||
182 | Some(range + contents_range.start()) | ||
183 | } | 189 | } |
184 | } | 190 | } |
185 | 191 | ||
@@ -522,22 +528,6 @@ impl HasFormatSpecifier for ast::String { | |||
522 | } | 528 | } |
523 | } | 529 | } |
524 | 530 | ||
525 | impl HasFormatSpecifier for ast::RawString { | ||
526 | fn char_ranges( | ||
527 | &self, | ||
528 | ) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>> { | ||
529 | let text = self.text().as_str(); | ||
530 | let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; | ||
531 | let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start(); | ||
532 | |||
533 | let mut res = Vec::with_capacity(text.len()); | ||
534 | for (idx, c) in text.char_indices() { | ||
535 | res.push((TextRange::at(idx.try_into().unwrap(), TextSize::of(c)) + offset, Ok(c))); | ||
536 | } | ||
537 | Some(res) | ||
538 | } | ||
539 | } | ||
540 | |||
541 | impl ast::IntNumber { | 531 | impl ast::IntNumber { |
542 | #[rustfmt::skip] | 532 | #[rustfmt::skip] |
543 | pub(crate) const SUFFIXES: &'static [&'static str] = &[ | 533 | pub(crate) const SUFFIXES: &'static [&'static str] = &[ |
diff --git a/crates/syntax/src/parsing/lexer.rs b/crates/syntax/src/parsing/lexer.rs index 5674ecb84..8afd7e53b 100644 --- a/crates/syntax/src/parsing/lexer.rs +++ b/crates/syntax/src/parsing/lexer.rs | |||
@@ -235,7 +235,7 @@ fn rustc_token_kind_to_syntax_kind( | |||
235 | RawStrError::TooManyDelimiters { .. } => "Too many `#` symbols: raw strings may be delimited by up to 65535 `#` symbols", | 235 | RawStrError::TooManyDelimiters { .. } => "Too many `#` symbols: raw strings may be delimited by up to 65535 `#` symbols", |
236 | }; | 236 | }; |
237 | }; | 237 | }; |
238 | RAW_STRING | 238 | STRING |
239 | } | 239 | } |
240 | rustc_lexer::LiteralKind::RawByteStr { err: raw_str_err, .. } => { | 240 | rustc_lexer::LiteralKind::RawByteStr { err: raw_str_err, .. } => { |
241 | if let Some(raw_str_err) = raw_str_err { | 241 | if let Some(raw_str_err) = raw_str_err { |
@@ -250,7 +250,7 @@ fn rustc_token_kind_to_syntax_kind( | |||
250 | }; | 250 | }; |
251 | }; | 251 | }; |
252 | 252 | ||
253 | RAW_BYTE_STRING | 253 | BYTE_STRING |
254 | } | 254 | } |
255 | }; | 255 | }; |
256 | 256 | ||
diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs index 4149f856a..190f5f67a 100644 --- a/crates/syntax/src/parsing/reparsing.rs +++ b/crates/syntax/src/parsing/reparsing.rs | |||
@@ -44,7 +44,7 @@ fn reparse_token<'node>( | |||
44 | let prev_token = algo::find_covering_element(root, edit.delete).as_token()?.clone(); | 44 | let prev_token = algo::find_covering_element(root, edit.delete).as_token()?.clone(); |
45 | let prev_token_kind = prev_token.kind(); | 45 | let prev_token_kind = prev_token.kind(); |
46 | match prev_token_kind { | 46 | match prev_token_kind { |
47 | WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => { | 47 | WHITESPACE | COMMENT | IDENT | STRING => { |
48 | if prev_token_kind == WHITESPACE || prev_token_kind == COMMENT { | 48 | if prev_token_kind == WHITESPACE || prev_token_kind == COMMENT { |
49 | // removing a new line may extends previous token | 49 | // removing a new line may extends previous token |
50 | let deleted_range = edit.delete - prev_token.text_range().start(); | 50 | let deleted_range = edit.delete - prev_token.text_range().start(); |
diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs index 0f9a5e8ae..62a37c50a 100644 --- a/crates/syntax/src/validation.rs +++ b/crates/syntax/src/validation.rs | |||
@@ -4,7 +4,7 @@ mod block; | |||
4 | 4 | ||
5 | use crate::{ | 5 | use crate::{ |
6 | algo, ast, match_ast, AstNode, SyntaxError, | 6 | algo, ast, match_ast, AstNode, SyntaxError, |
7 | SyntaxKind::{BYTE, BYTE_STRING, CHAR, CONST, FN, INT_NUMBER, STRING, TYPE_ALIAS}, | 7 | SyntaxKind::{BYTE, CHAR, CONST, FN, INT_NUMBER, TYPE_ALIAS}, |
8 | SyntaxNode, SyntaxToken, TextSize, T, | 8 | SyntaxNode, SyntaxToken, TextSize, T, |
9 | }; | 9 | }; |
10 | use rowan::Direction; | 10 | use rowan::Direction; |
@@ -121,18 +121,19 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) { | |||
121 | acc.push(SyntaxError::new_at_offset(rustc_unescape_error_to_string(err), off)); | 121 | acc.push(SyntaxError::new_at_offset(rustc_unescape_error_to_string(err), off)); |
122 | }; | 122 | }; |
123 | 123 | ||
124 | match token.kind() { | 124 | if let Some(s) = literal.as_string() { |
125 | BYTE => { | 125 | if !s.is_raw() { |
126 | if let Some(Err(e)) = unquote(text, 2, '\'').map(unescape_byte) { | 126 | if let Some(without_quotes) = unquote(text, 1, '"') { |
127 | push_err(2, e); | 127 | unescape_literal(without_quotes, Mode::Str, &mut |range, char| { |
128 | } | 128 | if let Err(err) = char { |
129 | } | 129 | push_err(1, (range.start, err)); |
130 | CHAR => { | 130 | } |
131 | if let Some(Err(e)) = unquote(text, 1, '\'').map(unescape_char) { | 131 | }) |
132 | push_err(1, e); | ||
133 | } | 132 | } |
134 | } | 133 | } |
135 | BYTE_STRING => { | 134 | } |
135 | if let Some(s) = literal.as_byte_string() { | ||
136 | if !s.is_raw() { | ||
136 | if let Some(without_quotes) = unquote(text, 2, '"') { | 137 | if let Some(without_quotes) = unquote(text, 2, '"') { |
137 | unescape_byte_literal(without_quotes, Mode::ByteStr, &mut |range, char| { | 138 | unescape_byte_literal(without_quotes, Mode::ByteStr, &mut |range, char| { |
138 | if let Err(err) = char { | 139 | if let Err(err) = char { |
@@ -141,13 +142,17 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) { | |||
141 | }) | 142 | }) |
142 | } | 143 | } |
143 | } | 144 | } |
144 | STRING => { | 145 | } |
145 | if let Some(without_quotes) = unquote(text, 1, '"') { | 146 | |
146 | unescape_literal(without_quotes, Mode::Str, &mut |range, char| { | 147 | match token.kind() { |
147 | if let Err(err) = char { | 148 | BYTE => { |
148 | push_err(1, (range.start, err)); | 149 | if let Some(Err(e)) = unquote(text, 2, '\'').map(unescape_byte) { |
149 | } | 150 | push_err(2, e); |
150 | }) | 151 | } |
152 | } | ||
153 | CHAR => { | ||
154 | if let Some(Err(e)) = unquote(text, 1, '\'').map(unescape_char) { | ||
155 | push_err(1, e); | ||
151 | } | 156 | } |
152 | } | 157 | } |
153 | _ => (), | 158 | _ => (), |
diff --git a/crates/syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt b/crates/syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt index 6fd59ccc0..54e707b73 100644 --- a/crates/syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt +++ b/crates/syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt | |||
@@ -1,2 +1,2 @@ | |||
1 | RAW_STRING 4 "r##\"" | 1 | STRING 4 "r##\"" |
2 | > error0..4 token("r##\"") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) | 2 | > error0..4 token("r##\"") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) |
diff --git a/crates/syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt b/crates/syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt index 8d9ca0e8f..1f9889775 100644 --- a/crates/syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt +++ b/crates/syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt | |||
@@ -1,2 +1,2 @@ | |||
1 | RAW_STRING 8 "r##\"🦀" | 1 | STRING 8 "r##\"🦀" |
2 | > error0..8 token("r##\"🦀") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) | 2 | > error0..8 token("r##\"🦀") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) |
diff --git a/crates/syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt b/crates/syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt index a906380c7..93f6f72ae 100644 --- a/crates/syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt +++ b/crates/syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt | |||
@@ -1,2 +1,2 @@ | |||
1 | RAW_STRING 8 "r##\"\\x7f" | 1 | STRING 8 "r##\"\\x7f" |
2 | > error0..8 token("r##\"\\x7f") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) | 2 | > error0..8 token("r##\"\\x7f") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) |
diff --git a/crates/syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt b/crates/syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt index 5667c6149..1d2ebc60f 100644 --- a/crates/syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt +++ b/crates/syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt | |||
@@ -1,2 +1,2 @@ | |||
1 | RAW_STRING 12 "r##\"\\u{20AA}" | 1 | STRING 12 "r##\"\\u{20AA}" |
2 | > error0..12 token("r##\"\\u{20AA}") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) | 2 | > error0..12 token("r##\"\\u{20AA}") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) |
diff --git a/crates/syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt b/crates/syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt index 141c8268e..c567ab7e2 100644 --- a/crates/syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt +++ b/crates/syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt | |||
@@ -1,2 +1,2 @@ | |||
1 | RAW_STRING 5 "r##\" " | 1 | STRING 5 "r##\" " |
2 | > error0..5 token("r##\" ") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) | 2 | > error0..5 token("r##\" ") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) |
diff --git a/crates/syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt b/crates/syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt index f61d4cc91..343b20323 100644 --- a/crates/syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt +++ b/crates/syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt | |||
@@ -1,2 +1,2 @@ | |||
1 | RAW_STRING 5 "r##\"\\" | 1 | STRING 5 "r##\"\\" |
2 | > error0..5 token("r##\"\\") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) | 2 | > error0..5 token("r##\"\\") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) |
diff --git a/crates/syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt b/crates/syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt index 12e2c0fc0..041a42737 100644 --- a/crates/syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt +++ b/crates/syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt | |||
@@ -1,2 +1,2 @@ | |||
1 | RAW_STRING 6 "r##\"\\n" | 1 | STRING 6 "r##\"\\n" |
2 | > error0..6 token("r##\"\\n") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) | 2 | > error0..6 token("r##\"\\n") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) |
diff --git a/crates/syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt b/crates/syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt index fe12cb5fc..efaa1cafd 100644 --- a/crates/syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt +++ b/crates/syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt | |||
@@ -1,2 +1,2 @@ | |||
1 | RAW_BYTE_STRING 5 "br##\"" | 1 | BYTE_STRING 5 "br##\"" |
2 | > error0..5 token("br##\"") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) | 2 | > error0..5 token("br##\"") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) |
diff --git a/crates/syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt b/crates/syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt index 5be2a7861..b6c938f94 100644 --- a/crates/syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt +++ b/crates/syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt | |||
@@ -1,2 +1,2 @@ | |||
1 | RAW_BYTE_STRING 9 "br##\"🦀" | 1 | BYTE_STRING 9 "br##\"🦀" |
2 | > error0..9 token("br##\"🦀") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) | 2 | > error0..9 token("br##\"🦀") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) |
diff --git a/crates/syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt b/crates/syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt index 6cbe08d07..f82efe49a 100644 --- a/crates/syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt +++ b/crates/syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt | |||
@@ -1,2 +1,2 @@ | |||
1 | RAW_BYTE_STRING 9 "br##\"\\x7f" | 1 | BYTE_STRING 9 "br##\"\\x7f" |
2 | > error0..9 token("br##\"\\x7f") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) | 2 | > error0..9 token("br##\"\\x7f") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) |
diff --git a/crates/syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt b/crates/syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt index f56a4f984..4e4a57696 100644 --- a/crates/syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt +++ b/crates/syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt | |||
@@ -1,2 +1,2 @@ | |||
1 | RAW_BYTE_STRING 13 "br##\"\\u{20AA}" | 1 | BYTE_STRING 13 "br##\"\\u{20AA}" |
2 | > error0..13 token("br##\"\\u{20AA}") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) | 2 | > error0..13 token("br##\"\\u{20AA}") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) |
diff --git a/crates/syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt b/crates/syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt index 3d32ce34e..0018c8623 100644 --- a/crates/syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt +++ b/crates/syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt | |||
@@ -1,2 +1,2 @@ | |||
1 | RAW_BYTE_STRING 6 "br##\" " | 1 | BYTE_STRING 6 "br##\" " |
2 | > error0..6 token("br##\" ") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) | 2 | > error0..6 token("br##\" ") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) |
diff --git a/crates/syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt b/crates/syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt index 320fea177..c3ba4ae82 100644 --- a/crates/syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt +++ b/crates/syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt | |||
@@ -1,2 +1,2 @@ | |||
1 | RAW_BYTE_STRING 6 "br##\"\\" | 1 | BYTE_STRING 6 "br##\"\\" |
2 | > error0..6 token("br##\"\\") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) | 2 | > error0..6 token("br##\"\\") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) |
diff --git a/crates/syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt b/crates/syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt index b3a56380c..7bda72276 100644 --- a/crates/syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt +++ b/crates/syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt | |||
@@ -1,2 +1,2 @@ | |||
1 | RAW_BYTE_STRING 7 "br##\"\\n" | 1 | BYTE_STRING 7 "br##\"\\n" |
2 | > error0..7 token("br##\"\\n") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) | 2 | > error0..7 token("br##\"\\n") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) |
diff --git a/crates/syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt b/crates/syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt index 5af1e2d97..ce92d2ff7 100644 --- a/crates/syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt +++ b/crates/syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt | |||
@@ -1,2 +1,2 @@ | |||
1 | RAW_STRING 3 "r##" | 1 | STRING 3 "r##" |
2 | > error0..3 token("r##") msg(Missing `"` symbol after `#` symbols to begin the raw string literal) | 2 | > error0..3 token("r##") msg(Missing `"` symbol after `#` symbols to begin the raw string literal) |
diff --git a/crates/syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt b/crates/syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt index aec7afd92..a75d9030c 100644 --- a/crates/syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt +++ b/crates/syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt | |||
@@ -1,2 +1,2 @@ | |||
1 | RAW_BYTE_STRING 4 "br##" | 1 | BYTE_STRING 4 "br##" |
2 | > error0..4 token("br##") msg(Missing `"` symbol after `#` symbols to begin the raw byte string literal) | 2 | > error0..4 token("br##") msg(Missing `"` symbol after `#` symbols to begin the raw byte string literal) |
diff --git a/crates/syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt b/crates/syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt index e22fe5374..516e0b78e 100644 --- a/crates/syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt +++ b/crates/syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt | |||
@@ -1,4 +1,4 @@ | |||
1 | RAW_STRING 4 "r## " | 1 | STRING 4 "r## " |
2 | IDENT 1 "I" | 2 | IDENT 1 "I" |
3 | WHITESPACE 1 " " | 3 | WHITESPACE 1 " " |
4 | IDENT 4 "lack" | 4 | IDENT 4 "lack" |
diff --git a/crates/syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt b/crates/syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt index d74ea4c27..2f8a6f5f2 100644 --- a/crates/syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt +++ b/crates/syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt | |||
@@ -1,4 +1,4 @@ | |||
1 | RAW_BYTE_STRING 5 "br## " | 1 | BYTE_STRING 5 "br## " |
2 | IDENT 1 "I" | 2 | IDENT 1 "I" |
3 | WHITESPACE 1 " " | 3 | WHITESPACE 1 " " |
4 | IDENT 4 "lack" | 4 | IDENT 4 "lack" |
diff --git a/crates/syntax/test_data/lexer/ok/0008_byte_strings.txt b/crates/syntax/test_data/lexer/ok/0008_byte_strings.txt index bc03b51a8..e61ad99be 100644 --- a/crates/syntax/test_data/lexer/ok/0008_byte_strings.txt +++ b/crates/syntax/test_data/lexer/ok/0008_byte_strings.txt | |||
@@ -4,13 +4,13 @@ BYTE 4 "b\'x\'" | |||
4 | WHITESPACE 1 " " | 4 | WHITESPACE 1 " " |
5 | BYTE_STRING 6 "b\"foo\"" | 5 | BYTE_STRING 6 "b\"foo\"" |
6 | WHITESPACE 1 " " | 6 | WHITESPACE 1 " " |
7 | RAW_BYTE_STRING 4 "br\"\"" | 7 | BYTE_STRING 4 "br\"\"" |
8 | WHITESPACE 1 "\n" | 8 | WHITESPACE 1 "\n" |
9 | BYTE 6 "b\'\'suf" | 9 | BYTE 6 "b\'\'suf" |
10 | WHITESPACE 1 " " | 10 | WHITESPACE 1 " " |
11 | BYTE_STRING 5 "b\"\"ix" | 11 | BYTE_STRING 5 "b\"\"ix" |
12 | WHITESPACE 1 " " | 12 | WHITESPACE 1 " " |
13 | RAW_BYTE_STRING 6 "br\"\"br" | 13 | BYTE_STRING 6 "br\"\"br" |
14 | WHITESPACE 1 "\n" | 14 | WHITESPACE 1 "\n" |
15 | BYTE 5 "b\'\\n\'" | 15 | BYTE 5 "b\'\\n\'" |
16 | WHITESPACE 1 " " | 16 | WHITESPACE 1 " " |
diff --git a/crates/syntax/test_data/lexer/ok/0009_strings.txt b/crates/syntax/test_data/lexer/ok/0009_strings.txt index 4cb4d711d..988a8877b 100644 --- a/crates/syntax/test_data/lexer/ok/0009_strings.txt +++ b/crates/syntax/test_data/lexer/ok/0009_strings.txt | |||
@@ -1,6 +1,6 @@ | |||
1 | STRING 7 "\"hello\"" | 1 | STRING 7 "\"hello\"" |
2 | WHITESPACE 1 " " | 2 | WHITESPACE 1 " " |
3 | RAW_STRING 8 "r\"world\"" | 3 | STRING 8 "r\"world\"" |
4 | WHITESPACE 1 " " | 4 | WHITESPACE 1 " " |
5 | STRING 17 "\"\\n\\\"\\\\no escape\"" | 5 | STRING 17 "\"\\n\\\"\\\\no escape\"" |
6 | WHITESPACE 1 " " | 6 | WHITESPACE 1 " " |
diff --git a/crates/syntax/test_data/lexer/ok/0013_raw_strings.txt b/crates/syntax/test_data/lexer/ok/0013_raw_strings.txt index 9cf0957d1..db0d5ffd1 100644 --- a/crates/syntax/test_data/lexer/ok/0013_raw_strings.txt +++ b/crates/syntax/test_data/lexer/ok/0013_raw_strings.txt | |||
@@ -1,2 +1,2 @@ | |||
1 | RAW_STRING 36 "r###\"this is a r##\"raw\"## string\"###" | 1 | STRING 36 "r###\"this is a r##\"raw\"## string\"###" |
2 | WHITESPACE 1 "\n" | 2 | WHITESPACE 1 "\n" |
diff --git a/crates/syntax/test_data/parser/inline/ok/0085_expr_literals.rast b/crates/syntax/test_data/parser/inline/ok/0085_expr_literals.rast index 9a87b5b93..ae838105d 100644 --- a/crates/syntax/test_data/parser/inline/ok/0085_expr_literals.rast +++ b/crates/syntax/test_data/parser/inline/ok/0085_expr_literals.rast | |||
@@ -104,7 +104,7 @@ [email protected] | |||
104 | [email protected] "=" | 104 | [email protected] "=" |
105 | [email protected] " " | 105 | [email protected] " " |
106 | [email protected] | 106 | [email protected] |
107 | RAW_[email protected] "r\"d\"" | 107 | [email protected] "r\"d\"" |
108 | [email protected] ";" | 108 | [email protected] ";" |
109 | [email protected] "\n " | 109 | [email protected] "\n " |
110 | [email protected] | 110 | [email protected] |
@@ -128,7 +128,7 @@ [email protected] | |||
128 | [email protected] "=" | 128 | [email protected] "=" |
129 | [email protected] " " | 129 | [email protected] " " |
130 | [email protected] | 130 | [email protected] |
131 | RAW_[email protected] "br\"f\"" | 131 | [email protected] "br\"f\"" |
132 | [email protected] ";" | 132 | [email protected] ";" |
133 | [email protected] "\n" | 133 | [email protected] "\n" |
134 | [email protected] "}" | 134 | [email protected] "}" |
diff --git a/xtask/src/ast_src.rs b/xtask/src/ast_src.rs index adc191254..8ceaaf60e 100644 --- a/xtask/src/ast_src.rs +++ b/xtask/src/ast_src.rs | |||
@@ -71,16 +71,7 @@ pub(crate) const KINDS_SRC: KindsSrc = KindsSrc { | |||
71 | "trait", "true", "try", "type", "unsafe", "use", "where", "while", | 71 | "trait", "true", "try", "type", "unsafe", "use", "where", "while", |
72 | ], | 72 | ], |
73 | contextual_keywords: &["auto", "default", "existential", "union", "raw"], | 73 | contextual_keywords: &["auto", "default", "existential", "union", "raw"], |
74 | literals: &[ | 74 | literals: &["INT_NUMBER", "FLOAT_NUMBER", "CHAR", "BYTE", "STRING", "BYTE_STRING"], |
75 | "INT_NUMBER", | ||
76 | "FLOAT_NUMBER", | ||
77 | "CHAR", | ||
78 | "BYTE", | ||
79 | "STRING", | ||
80 | "RAW_STRING", | ||
81 | "BYTE_STRING", | ||
82 | "RAW_BYTE_STRING", | ||
83 | ], | ||
84 | tokens: &[ | 75 | tokens: &[ |
85 | "ERROR", | 76 | "ERROR", |
86 | "IDENT", | 77 | "IDENT", |
diff --git a/xtask/src/codegen/gen_syntax.rs b/xtask/src/codegen/gen_syntax.rs index 87c934e66..44460effa 100644 --- a/xtask/src/codegen/gen_syntax.rs +++ b/xtask/src/codegen/gen_syntax.rs | |||
@@ -505,7 +505,7 @@ impl Field { | |||
505 | fn lower(grammar: &Grammar) -> AstSrc { | 505 | fn lower(grammar: &Grammar) -> AstSrc { |
506 | let mut res = AstSrc::default(); | 506 | let mut res = AstSrc::default(); |
507 | 507 | ||
508 | res.tokens = "Whitespace Comment String RawString IntNumber FloatNumber" | 508 | res.tokens = "Whitespace Comment String ByteString IntNumber FloatNumber" |
509 | .split_ascii_whitespace() | 509 | .split_ascii_whitespace() |
510 | .map(|it| it.to_string()) | 510 | .map(|it| it.to_string()) |
511 | .collect::<Vec<_>>(); | 511 | .collect::<Vec<_>>(); |