From 5ba4f949c23dcf53f34995c90b7c01e6c641b1f0 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Fri, 6 Nov 2020 22:21:56 +0100 Subject: Kill RAW_ literals Syntactically, they are indistinguishable from non-raw versions, so it doesn't make sense to separate then *at the syntax* level. --- crates/assists/src/assist_context.rs | 7 ++- crates/assists/src/handlers/add_turbo_fish.rs | 2 +- crates/assists/src/handlers/expand_glob_import.rs | 2 +- crates/assists/src/handlers/flip_comma.rs | 2 +- crates/assists/src/handlers/flip_trait_bound.rs | 2 +- .../src/handlers/introduce_named_lifetime.rs | 2 +- crates/assists/src/handlers/invert_if.rs | 2 +- crates/assists/src/handlers/raw_string.rs | 31 ++++++++----- crates/assists/src/handlers/remove_mut.rs | 2 +- .../src/handlers/replace_let_with_if_let.rs | 2 +- .../src/handlers/replace_string_with_char.rs | 2 +- crates/assists/src/handlers/split_import.rs | 2 +- crates/assists/src/handlers/unwrap_block.rs | 2 +- crates/ide/src/extend_selection.rs | 2 +- crates/ide/src/syntax_highlighting.rs | 16 +++---- crates/ide/src/syntax_highlighting/format.rs | 4 +- crates/ide/src/syntax_highlighting/injection.rs | 2 +- crates/ide/src/syntax_tree.rs | 6 +-- crates/parser/src/grammar.rs | 5 +-- crates/parser/src/grammar/expressions/atom.rs | 14 +----- crates/parser/src/grammar/items.rs | 4 +- crates/parser/src/syntax_kind/generated.rs | 5 +-- crates/syntax/src/ast/expr_ext.rs | 11 ++++- crates/syntax/src/ast/generated/tokens.rs | 8 ++-- crates/syntax/src/ast/node_ext.rs | 8 +--- crates/syntax/src/ast/token_ext.rs | 52 +++++++++------------- crates/syntax/src/parsing/lexer.rs | 4 +- crates/syntax/src/parsing/reparsing.rs | 2 +- crates/syntax/src/validation.rs | 41 +++++++++-------- .../lexer/err/0033_unclosed_raw_string_at_eof.txt | 2 +- .../err/0034_unclosed_raw_string_with_ferris.txt | 2 +- .../0035_unclosed_raw_string_with_ascii_escape.txt | 2 +- ...036_unclosed_raw_string_with_unicode_escape.txt | 2 +- .../err/0037_unclosed_raw_string_with_space.txt | 2 +- .../err/0038_unclosed_raw_string_with_slash.txt | 2 +- .../err/0039_unclosed_raw_string_with_slash_n.txt | 2 +- .../err/0040_unclosed_raw_byte_string_at_eof.txt | 2 +- .../0041_unclosed_raw_byte_string_with_ferris.txt | 2 +- ..._unclosed_raw_byte_string_with_ascii_escape.txt | 2 +- ...nclosed_raw_byte_string_with_unicode_escape.txt | 2 +- .../0044_unclosed_raw_byte_string_with_space.txt | 2 +- .../0045_unclosed_raw_byte_string_with_slash.txt | 2 +- .../0046_unclosed_raw_byte_string_with_slash_n.txt | 2 +- .../lexer/err/0047_unstarted_raw_string_at_eof.txt | 2 +- .../err/0048_unstarted_raw_byte_string_at_eof.txt | 2 +- .../err/0049_unstarted_raw_string_with_ascii.txt | 2 +- .../0050_unstarted_raw_byte_string_with_ascii.txt | 2 +- .../test_data/lexer/ok/0008_byte_strings.txt | 4 +- crates/syntax/test_data/lexer/ok/0009_strings.txt | 2 +- .../syntax/test_data/lexer/ok/0013_raw_strings.txt | 2 +- .../parser/inline/ok/0085_expr_literals.rast | 4 +- 51 files changed, 139 insertions(+), 153 deletions(-) (limited to 'crates') diff --git a/crates/assists/src/assist_context.rs b/crates/assists/src/assist_context.rs index d11fee196..fcfe2d6ee 100644 --- a/crates/assists/src/assist_context.rs +++ b/crates/assists/src/assist_context.rs @@ -12,7 +12,7 @@ use ide_db::{ }; use syntax::{ algo::{self, find_node_at_offset, SyntaxRewriter}, - AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxToken, TextRange, TextSize, + AstNode, AstToken, SourceFile, SyntaxElement, SyntaxKind, SyntaxToken, TextRange, TextSize, TokenAtOffset, }; use text_edit::{TextEdit, TextEditBuilder}; @@ -81,9 +81,12 @@ impl<'a> AssistContext<'a> { pub(crate) fn token_at_offset(&self) -> TokenAtOffset { self.source_file.syntax().token_at_offset(self.offset()) } - pub(crate) fn find_token_at_offset(&self, kind: SyntaxKind) -> Option { + pub(crate) fn find_token_syntax_at_offset(&self, kind: SyntaxKind) -> Option { self.token_at_offset().find(|it| it.kind() == kind) } + pub(crate) fn find_token_at_offset(&self) -> Option { + self.token_at_offset().find_map(T::cast) + } pub(crate) fn find_node_at_offset(&self) -> Option { find_node_at_offset(self.source_file.syntax(), self.offset()) } diff --git a/crates/assists/src/handlers/add_turbo_fish.rs b/crates/assists/src/handlers/add_turbo_fish.rs index e3d84d698..1f486c013 100644 --- a/crates/assists/src/handlers/add_turbo_fish.rs +++ b/crates/assists/src/handlers/add_turbo_fish.rs @@ -25,7 +25,7 @@ use crate::{ // } // ``` pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let ident = ctx.find_token_at_offset(SyntaxKind::IDENT).or_else(|| { + let ident = ctx.find_token_syntax_at_offset(SyntaxKind::IDENT).or_else(|| { let arg_list = ctx.find_node_at_offset::()?; if arg_list.args().count() > 0 { return None; diff --git a/crates/assists/src/handlers/expand_glob_import.rs b/crates/assists/src/handlers/expand_glob_import.rs index 316a58d88..853266395 100644 --- a/crates/assists/src/handlers/expand_glob_import.rs +++ b/crates/assists/src/handlers/expand_glob_import.rs @@ -41,7 +41,7 @@ use crate::{ // fn qux(bar: Bar, baz: Baz) {} // ``` pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let star = ctx.find_token_at_offset(T![*])?; + let star = ctx.find_token_syntax_at_offset(T![*])?; let (parent, mod_path) = find_parent_and_path(&star)?; let target_module = match ctx.sema.resolve_path(&mod_path)? { PathResolution::Def(ModuleDef::Module(it)) => it, diff --git a/crates/assists/src/handlers/flip_comma.rs b/crates/assists/src/handlers/flip_comma.rs index 5c69db53e..64b4b1a76 100644 --- a/crates/assists/src/handlers/flip_comma.rs +++ b/crates/assists/src/handlers/flip_comma.rs @@ -18,7 +18,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; // } // ``` pub(crate) fn flip_comma(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let comma = ctx.find_token_at_offset(T![,])?; + let comma = ctx.find_token_syntax_at_offset(T![,])?; let prev = non_trivia_sibling(comma.clone().into(), Direction::Prev)?; let next = non_trivia_sibling(comma.clone().into(), Direction::Next)?; diff --git a/crates/assists/src/handlers/flip_trait_bound.rs b/crates/assists/src/handlers/flip_trait_bound.rs index 347e79b1d..92ee42181 100644 --- a/crates/assists/src/handlers/flip_trait_bound.rs +++ b/crates/assists/src/handlers/flip_trait_bound.rs @@ -20,7 +20,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; pub(crate) fn flip_trait_bound(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { // We want to replicate the behavior of `flip_binexpr` by only suggesting // the assist when the cursor is on a `+` - let plus = ctx.find_token_at_offset(T![+])?; + let plus = ctx.find_token_syntax_at_offset(T![+])?; // Make sure we're in a `TypeBoundList` if ast::TypeBoundList::cast(plus.parent()).is_none() { diff --git a/crates/assists/src/handlers/introduce_named_lifetime.rs b/crates/assists/src/handlers/introduce_named_lifetime.rs index 5f623e5f7..4cc8dae65 100644 --- a/crates/assists/src/handlers/introduce_named_lifetime.rs +++ b/crates/assists/src/handlers/introduce_named_lifetime.rs @@ -36,7 +36,7 @@ static ASSIST_LABEL: &str = "Introduce named lifetime"; // FIXME: should also add support for the case fun(f: &Foo) -> &<|>Foo pub(crate) fn introduce_named_lifetime(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { let lifetime_token = ctx - .find_token_at_offset(SyntaxKind::LIFETIME) + .find_token_syntax_at_offset(SyntaxKind::LIFETIME) .filter(|lifetime| lifetime.text() == "'_")?; if let Some(fn_def) = lifetime_token.ancestors().find_map(ast::Fn::cast) { generate_fn_def_assist(acc, &fn_def, lifetime_token.text_range()) diff --git a/crates/assists/src/handlers/invert_if.rs b/crates/assists/src/handlers/invert_if.rs index 461fcf862..ea722b91b 100644 --- a/crates/assists/src/handlers/invert_if.rs +++ b/crates/assists/src/handlers/invert_if.rs @@ -29,7 +29,7 @@ use crate::{ // ``` pub(crate) fn invert_if(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let if_keyword = ctx.find_token_at_offset(T![if])?; + let if_keyword = ctx.find_token_syntax_at_offset(T![if])?; let expr = ast::IfExpr::cast(if_keyword.parent())?; let if_range = if_keyword.text_range(); let cursor_in_range = if_range.contains_range(ctx.frange.range); diff --git a/crates/assists/src/handlers/raw_string.rs b/crates/assists/src/handlers/raw_string.rs index 9ddd116e0..7f9f01c9c 100644 --- a/crates/assists/src/handlers/raw_string.rs +++ b/crates/assists/src/handlers/raw_string.rs @@ -2,9 +2,7 @@ use std::borrow::Cow; use syntax::{ ast::{self, HasQuotes, HasStringValue}, - AstToken, - SyntaxKind::{RAW_STRING, STRING}, - TextRange, TextSize, + AstToken, TextRange, TextSize, }; use test_utils::mark; @@ -26,7 +24,10 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; // } // ``` pub(crate) fn make_raw_string(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let token = ctx.find_token_at_offset(STRING).and_then(ast::String::cast)?; + let token = ctx.find_token_at_offset::()?; + if token.is_raw() { + return None; + } let value = token.value()?; let target = token.syntax().text_range(); acc.add( @@ -65,7 +66,10 @@ pub(crate) fn make_raw_string(acc: &mut Assists, ctx: &AssistContext) -> Option< // } // ``` pub(crate) fn make_usual_string(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let token = ctx.find_token_at_offset(RAW_STRING).and_then(ast::RawString::cast)?; + let token = ctx.find_token_at_offset::()?; + if !token.is_raw() { + return None; + } let value = token.value()?; let target = token.syntax().text_range(); acc.add( @@ -104,11 +108,15 @@ pub(crate) fn make_usual_string(acc: &mut Assists, ctx: &AssistContext) -> Optio // } // ``` pub(crate) fn add_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let token = ctx.find_token_at_offset(RAW_STRING)?; - let target = token.text_range(); + let token = ctx.find_token_at_offset::()?; + if !token.is_raw() { + return None; + } + let text_range = token.syntax().text_range(); + let target = text_range; acc.add(AssistId("add_hash", AssistKind::Refactor), "Add #", target, |edit| { - edit.insert(token.text_range().start() + TextSize::of('r'), "#"); - edit.insert(token.text_range().end(), "#"); + edit.insert(text_range.start() + TextSize::of('r'), "#"); + edit.insert(text_range.end(), "#"); }) } @@ -128,7 +136,10 @@ pub(crate) fn add_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { // } // ``` pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let token = ctx.find_token_at_offset(RAW_STRING).and_then(ast::RawString::cast)?; + let token = ctx.find_token_at_offset::()?; + if !token.is_raw() { + return None; + } let text = token.text().as_str(); if !text.starts_with("r#") && text.ends_with('#') { diff --git a/crates/assists/src/handlers/remove_mut.rs b/crates/assists/src/handlers/remove_mut.rs index 44f41daa9..575b271f7 100644 --- a/crates/assists/src/handlers/remove_mut.rs +++ b/crates/assists/src/handlers/remove_mut.rs @@ -18,7 +18,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; // } // ``` pub(crate) fn remove_mut(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let mut_token = ctx.find_token_at_offset(T![mut])?; + let mut_token = ctx.find_token_syntax_at_offset(T![mut])?; let delete_from = mut_token.text_range().start(); let delete_to = match mut_token.next_token() { Some(it) if it.kind() == SyntaxKind::WHITESPACE => it.text_range().end(), diff --git a/crates/assists/src/handlers/replace_let_with_if_let.rs b/crates/assists/src/handlers/replace_let_with_if_let.rs index a5bcbda24..69d3b08d3 100644 --- a/crates/assists/src/handlers/replace_let_with_if_let.rs +++ b/crates/assists/src/handlers/replace_let_with_if_let.rs @@ -37,7 +37,7 @@ use ide_db::ty_filter::TryEnum; // fn compute() -> Option { None } // ``` pub(crate) fn replace_let_with_if_let(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let let_kw = ctx.find_token_at_offset(T![let])?; + let let_kw = ctx.find_token_syntax_at_offset(T![let])?; let let_stmt = let_kw.ancestors().find_map(ast::LetStmt::cast)?; let init = let_stmt.initializer()?; let original_pat = let_stmt.pat()?; diff --git a/crates/assists/src/handlers/replace_string_with_char.rs b/crates/assists/src/handlers/replace_string_with_char.rs index 4ca87a8ec..6d227e883 100644 --- a/crates/assists/src/handlers/replace_string_with_char.rs +++ b/crates/assists/src/handlers/replace_string_with_char.rs @@ -22,7 +22,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; // } // ``` pub(crate) fn replace_string_with_char(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let token = ctx.find_token_at_offset(STRING).and_then(ast::String::cast)?; + let token = ctx.find_token_syntax_at_offset(STRING).and_then(ast::String::cast)?; let value = token.value()?; let target = token.syntax().text_range(); diff --git a/crates/assists/src/handlers/split_import.rs b/crates/assists/src/handlers/split_import.rs index 15e67eaa1..ef1f6b8a1 100644 --- a/crates/assists/src/handlers/split_import.rs +++ b/crates/assists/src/handlers/split_import.rs @@ -16,7 +16,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; // use std::{collections::HashMap}; // ``` pub(crate) fn split_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let colon_colon = ctx.find_token_at_offset(T![::])?; + let colon_colon = ctx.find_token_syntax_at_offset(T![::])?; let path = ast::Path::cast(colon_colon.parent())?.qualifier()?; let top_path = successors(Some(path.clone()), |it| it.parent_path()).last()?; diff --git a/crates/assists/src/handlers/unwrap_block.rs b/crates/assists/src/handlers/unwrap_block.rs index 3851aeb3e..36ef871b9 100644 --- a/crates/assists/src/handlers/unwrap_block.rs +++ b/crates/assists/src/handlers/unwrap_block.rs @@ -29,7 +29,7 @@ pub(crate) fn unwrap_block(acc: &mut Assists, ctx: &AssistContext) -> Option<()> let assist_id = AssistId("unwrap_block", AssistKind::RefactorRewrite); let assist_label = "Unwrap block"; - let l_curly_token = ctx.find_token_at_offset(T!['{'])?; + let l_curly_token = ctx.find_token_syntax_at_offset(T!['{'])?; let mut block = ast::BlockExpr::cast(l_curly_token.parent())?; let mut parent = block.syntax().parent()?; if ast::MatchArm::can_cast(parent.kind()) { diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs index 3ee0af8ad..0971f7701 100644 --- a/crates/ide/src/extend_selection.rs +++ b/crates/ide/src/extend_selection.rs @@ -35,7 +35,7 @@ fn try_extend_selection( ) -> Option { let range = frange.range; - let string_kinds = [COMMENT, STRING, RAW_STRING, BYTE_STRING, RAW_BYTE_STRING]; + let string_kinds = [COMMENT, STRING, BYTE_STRING]; let list_kinds = [ RECORD_PAT_FIELD_LIST, MATCH_ARM_LIST, diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index efcc8ecfe..05bafe9c8 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -179,10 +179,12 @@ pub(crate) fn highlight( element.clone() }; - if let Some(token) = element.as_token().cloned().and_then(ast::RawString::cast) { - let expanded = element_to_highlight.as_token().unwrap().clone(); - if injection::highlight_injection(&mut stack, &sema, token, expanded).is_some() { - continue; + if let Some(token) = element.as_token().cloned().and_then(ast::String::cast) { + if token.is_raw() { + let expanded = element_to_highlight.as_token().unwrap().clone(); + if injection::highlight_injection(&mut stack, &sema, token, expanded).is_some() { + continue; + } } } @@ -214,10 +216,6 @@ pub(crate) fn highlight( } stack.pop_and_inject(None); } - } else if let Some(string) = - element_to_highlight.as_token().cloned().and_then(ast::RawString::cast) - { - format_string_highlighter.highlight_format_string(&mut stack, &string, range); } } } @@ -532,7 +530,7 @@ fn highlight_element( None => h.into(), } } - STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => HighlightTag::StringLiteral.into(), + STRING | BYTE_STRING => HighlightTag::StringLiteral.into(), ATTR => HighlightTag::Attribute.into(), INT_NUMBER | FLOAT_NUMBER => HighlightTag::NumericLiteral.into(), BYTE => HighlightTag::ByteLiteral.into(), diff --git a/crates/ide/src/syntax_highlighting/format.rs b/crates/ide/src/syntax_highlighting/format.rs index 71bde24f0..42f27df5d 100644 --- a/crates/ide/src/syntax_highlighting/format.rs +++ b/crates/ide/src/syntax_highlighting/format.rs @@ -29,9 +29,7 @@ impl FormatStringHighlighter { .children_with_tokens() .filter(|t| t.kind() != SyntaxKind::WHITESPACE) .nth(1) - .filter(|e| { - ast::String::can_cast(e.kind()) || ast::RawString::can_cast(e.kind()) - }) + .filter(|e| ast::String::can_cast(e.kind())) } _ => {} } diff --git a/crates/ide/src/syntax_highlighting/injection.rs b/crates/ide/src/syntax_highlighting/injection.rs index 59a74bc02..79f6b5359 100644 --- a/crates/ide/src/syntax_highlighting/injection.rs +++ b/crates/ide/src/syntax_highlighting/injection.rs @@ -15,7 +15,7 @@ use super::HighlightedRangeStack; pub(super) fn highlight_injection( acc: &mut HighlightedRangeStack, sema: &Semantics, - literal: ast::RawString, + literal: ast::String, expanded: SyntaxToken, ) -> Option<()> { let active_parameter = ActiveParameter::at_token(&sema, expanded)?; diff --git a/crates/ide/src/syntax_tree.rs b/crates/ide/src/syntax_tree.rs index 7941610d6..6dd05c05d 100644 --- a/crates/ide/src/syntax_tree.rs +++ b/crates/ide/src/syntax_tree.rs @@ -1,9 +1,7 @@ use ide_db::base_db::{FileId, SourceDatabase}; use ide_db::RootDatabase; use syntax::{ - algo, AstNode, NodeOrToken, SourceFile, - SyntaxKind::{RAW_STRING, STRING}, - SyntaxToken, TextRange, TextSize, + algo, AstNode, NodeOrToken, SourceFile, SyntaxKind::STRING, SyntaxToken, TextRange, TextSize, }; // Feature: Show Syntax Tree @@ -46,7 +44,7 @@ fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option< // we'll attempt parsing it as rust syntax // to provide the syntax tree of the contents of the string match token.kind() { - STRING | RAW_STRING => syntax_tree_for_token(token, text_range), + STRING => syntax_tree_for_token(token, text_range), _ => None, } } diff --git a/crates/parser/src/grammar.rs b/crates/parser/src/grammar.rs index 4ab206a83..116b991a8 100644 --- a/crates/parser/src/grammar.rs +++ b/crates/parser/src/grammar.rs @@ -236,10 +236,7 @@ fn abi(p: &mut Parser) { assert!(p.at(T![extern])); let abi = p.start(); p.bump(T![extern]); - match p.current() { - STRING | RAW_STRING => p.bump_any(), - _ => (), - } + p.eat(STRING); abi.complete(p, ABI); } diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs index 66a92a4e1..31f42f161 100644 --- a/crates/parser/src/grammar/expressions/atom.rs +++ b/crates/parser/src/grammar/expressions/atom.rs @@ -15,18 +15,8 @@ use super::*; // let _ = b"e"; // let _ = br"f"; // } -pub(crate) const LITERAL_FIRST: TokenSet = TokenSet::new(&[ - TRUE_KW, - FALSE_KW, - INT_NUMBER, - FLOAT_NUMBER, - BYTE, - CHAR, - STRING, - RAW_STRING, - BYTE_STRING, - RAW_BYTE_STRING, -]); +pub(crate) const LITERAL_FIRST: TokenSet = + TokenSet::new(&[TRUE_KW, FALSE_KW, INT_NUMBER, FLOAT_NUMBER, BYTE, CHAR, STRING, BYTE_STRING]); pub(crate) fn literal(p: &mut Parser) -> Option { if !p.at_ts(LITERAL_FIRST) { diff --git a/crates/parser/src/grammar/items.rs b/crates/parser/src/grammar/items.rs index 22810e6fb..780bc470a 100644 --- a/crates/parser/src/grammar/items.rs +++ b/crates/parser/src/grammar/items.rs @@ -239,9 +239,7 @@ fn items_without_modifiers(p: &mut Parser, m: Marker) -> Result<(), Marker> { T![static] => consts::static_(p, m), // test extern_block // extern {} - T![extern] - if la == T!['{'] || ((la == STRING || la == RAW_STRING) && p.nth(2) == T!['{']) => - { + T![extern] if la == T!['{'] || (la == STRING && p.nth(2) == T!['{']) => { abi(p); extern_item_list(p); m.complete(p, EXTERN_BLOCK); diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs index 935bd2c5e..8bc6688f3 100644 --- a/crates/parser/src/syntax_kind/generated.rs +++ b/crates/parser/src/syntax_kind/generated.rs @@ -111,9 +111,7 @@ pub enum SyntaxKind { CHAR, BYTE, STRING, - RAW_STRING, BYTE_STRING, - RAW_BYTE_STRING, ERROR, IDENT, WHITESPACE, @@ -277,8 +275,7 @@ impl SyntaxKind { } pub fn is_literal(self) -> bool { match self { - INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE | STRING | RAW_STRING | BYTE_STRING - | RAW_BYTE_STRING => true, + INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE | STRING | BYTE_STRING => true, _ => false, } } diff --git a/crates/syntax/src/ast/expr_ext.rs b/crates/syntax/src/ast/expr_ext.rs index 3d33cd1cf..eb44bb2ab 100644 --- a/crates/syntax/src/ast/expr_ext.rs +++ b/crates/syntax/src/ast/expr_ext.rs @@ -320,6 +320,13 @@ impl ast::Literal { ast::IntNumber::cast(self.token()) } + pub fn as_string(&self) -> Option { + ast::String::cast(self.token()) + } + pub fn as_byte_string(&self) -> Option { + ast::ByteString::cast(self.token()) + } + fn find_suffix(text: &str, possible_suffixes: &[&str]) -> Option { possible_suffixes .iter() @@ -351,10 +358,10 @@ impl ast::Literal { suffix: Self::find_suffix(&text, &ast::FloatNumber::SUFFIXES), } } - STRING | RAW_STRING => LiteralKind::String, + STRING => LiteralKind::String, T![true] => LiteralKind::Bool(true), T![false] => LiteralKind::Bool(false), - BYTE_STRING | RAW_BYTE_STRING => LiteralKind::ByteString, + BYTE_STRING => LiteralKind::ByteString, CHAR => LiteralKind::Char, BYTE => LiteralKind::Byte, _ => unreachable!(), diff --git a/crates/syntax/src/ast/generated/tokens.rs b/crates/syntax/src/ast/generated/tokens.rs index 1b8449221..728b72cd7 100644 --- a/crates/syntax/src/ast/generated/tokens.rs +++ b/crates/syntax/src/ast/generated/tokens.rs @@ -70,16 +70,16 @@ impl AstToken for String { } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct RawString { +pub struct ByteString { pub(crate) syntax: SyntaxToken, } -impl std::fmt::Display for RawString { +impl std::fmt::Display for ByteString { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(&self.syntax, f) } } -impl AstToken for RawString { - fn can_cast(kind: SyntaxKind) -> bool { kind == RAW_STRING } +impl AstToken for ByteString { + fn can_cast(kind: SyntaxKind) -> bool { kind == BYTE_STRING } fn cast(syntax: SyntaxToken) -> Option { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs index c5cd1c504..5579f72b9 100644 --- a/crates/syntax/src/ast/node_ext.rs +++ b/crates/syntax/src/ast/node_ext.rs @@ -55,13 +55,7 @@ impl ast::Attr { let key = self.simple_name()?; let value_token = lit.syntax().first_token()?; - let value: SmolStr = if let Some(s) = ast::String::cast(value_token.clone()) { - s.value()?.into() - } else if let Some(s) = ast::RawString::cast(value_token) { - s.value()?.into() - } else { - return None; - }; + let value: SmolStr = ast::String::cast(value_token.clone())?.value()?.into(); Some((key, value)) } diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs index 8d3fad5a6..6cd20b6a6 100644 --- a/crates/syntax/src/ast/token_ext.rs +++ b/crates/syntax/src/ast/token_ext.rs @@ -139,14 +139,31 @@ pub trait HasQuotes: AstToken { } impl HasQuotes for ast::String {} -impl HasQuotes for ast::RawString {} pub trait HasStringValue: HasQuotes { fn value(&self) -> Option>; } +impl ast::String { + pub fn is_raw(&self) -> bool { + self.text().starts_with('r') + } + pub fn map_range_up(&self, range: TextRange) -> Option { + let contents_range = self.text_range_between_quotes()?; + assert!(TextRange::up_to(contents_range.len()).contains_range(range)); + Some(range + contents_range.start()) + } +} + impl HasStringValue for ast::String { fn value(&self) -> Option> { + if self.is_raw() { + let text = self.text().as_str(); + let text = + &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; + return Some(Cow::Borrowed(text)); + } + let text = self.text().as_str(); let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; @@ -166,20 +183,9 @@ impl HasStringValue for ast::String { } } -// FIXME: merge `ast::RawString` and `ast::String`. -impl HasStringValue for ast::RawString { - fn value(&self) -> Option> { - let text = self.text().as_str(); - let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; - Some(Cow::Borrowed(text)) - } -} - -impl ast::RawString { - pub fn map_range_up(&self, range: TextRange) -> Option { - let contents_range = self.text_range_between_quotes()?; - assert!(TextRange::up_to(contents_range.len()).contains_range(range)); - Some(range + contents_range.start()) +impl ast::ByteString { + pub fn is_raw(&self) -> bool { + self.text().starts_with("br") } } @@ -522,22 +528,6 @@ impl HasFormatSpecifier for ast::String { } } -impl HasFormatSpecifier for ast::RawString { - fn char_ranges( - &self, - ) -> Option)>> { - let text = self.text().as_str(); - let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; - let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start(); - - let mut res = Vec::with_capacity(text.len()); - for (idx, c) in text.char_indices() { - res.push((TextRange::at(idx.try_into().unwrap(), TextSize::of(c)) + offset, Ok(c))); - } - Some(res) - } -} - impl ast::IntNumber { #[rustfmt::skip] pub(crate) const SUFFIXES: &'static [&'static str] = &[ diff --git a/crates/syntax/src/parsing/lexer.rs b/crates/syntax/src/parsing/lexer.rs index 5674ecb84..8afd7e53b 100644 --- a/crates/syntax/src/parsing/lexer.rs +++ b/crates/syntax/src/parsing/lexer.rs @@ -235,7 +235,7 @@ fn rustc_token_kind_to_syntax_kind( RawStrError::TooManyDelimiters { .. } => "Too many `#` symbols: raw strings may be delimited by up to 65535 `#` symbols", }; }; - RAW_STRING + STRING } rustc_lexer::LiteralKind::RawByteStr { err: raw_str_err, .. } => { if let Some(raw_str_err) = raw_str_err { @@ -250,7 +250,7 @@ fn rustc_token_kind_to_syntax_kind( }; }; - RAW_BYTE_STRING + BYTE_STRING } }; diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs index 4149f856a..190f5f67a 100644 --- a/crates/syntax/src/parsing/reparsing.rs +++ b/crates/syntax/src/parsing/reparsing.rs @@ -44,7 +44,7 @@ fn reparse_token<'node>( let prev_token = algo::find_covering_element(root, edit.delete).as_token()?.clone(); let prev_token_kind = prev_token.kind(); match prev_token_kind { - WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => { + WHITESPACE | COMMENT | IDENT | STRING => { if prev_token_kind == WHITESPACE || prev_token_kind == COMMENT { // removing a new line may extends previous token let deleted_range = edit.delete - prev_token.text_range().start(); diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs index 0f9a5e8ae..62a37c50a 100644 --- a/crates/syntax/src/validation.rs +++ b/crates/syntax/src/validation.rs @@ -4,7 +4,7 @@ mod block; use crate::{ algo, ast, match_ast, AstNode, SyntaxError, - SyntaxKind::{BYTE, BYTE_STRING, CHAR, CONST, FN, INT_NUMBER, STRING, TYPE_ALIAS}, + SyntaxKind::{BYTE, CHAR, CONST, FN, INT_NUMBER, TYPE_ALIAS}, SyntaxNode, SyntaxToken, TextSize, T, }; use rowan::Direction; @@ -121,18 +121,19 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec) { acc.push(SyntaxError::new_at_offset(rustc_unescape_error_to_string(err), off)); }; - match token.kind() { - BYTE => { - if let Some(Err(e)) = unquote(text, 2, '\'').map(unescape_byte) { - push_err(2, e); - } - } - CHAR => { - if let Some(Err(e)) = unquote(text, 1, '\'').map(unescape_char) { - push_err(1, e); + if let Some(s) = literal.as_string() { + if !s.is_raw() { + if let Some(without_quotes) = unquote(text, 1, '"') { + unescape_literal(without_quotes, Mode::Str, &mut |range, char| { + if let Err(err) = char { + push_err(1, (range.start, err)); + } + }) } } - BYTE_STRING => { + } + if let Some(s) = literal.as_byte_string() { + if !s.is_raw() { if let Some(without_quotes) = unquote(text, 2, '"') { unescape_byte_literal(without_quotes, Mode::ByteStr, &mut |range, char| { if let Err(err) = char { @@ -141,13 +142,17 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec) { }) } } - STRING => { - if let Some(without_quotes) = unquote(text, 1, '"') { - unescape_literal(without_quotes, Mode::Str, &mut |range, char| { - if let Err(err) = char { - push_err(1, (range.start, err)); - } - }) + } + + match token.kind() { + BYTE => { + if let Some(Err(e)) = unquote(text, 2, '\'').map(unescape_byte) { + push_err(2, e); + } + } + CHAR => { + if let Some(Err(e)) = unquote(text, 1, '\'').map(unescape_char) { + push_err(1, e); } } _ => (), diff --git a/crates/syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt b/crates/syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt index 6fd59ccc0..54e707b73 100644 --- a/crates/syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt +++ b/crates/syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt @@ -1,2 +1,2 @@ -RAW_STRING 4 "r##\"" +STRING 4 "r##\"" > error0..4 token("r##\"") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) diff --git a/crates/syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt b/crates/syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt index 8d9ca0e8f..1f9889775 100644 --- a/crates/syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt +++ b/crates/syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt @@ -1,2 +1,2 @@ -RAW_STRING 8 "r##\"🦀" +STRING 8 "r##\"🦀" > error0..8 token("r##\"🦀") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) diff --git a/crates/syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt b/crates/syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt index a906380c7..93f6f72ae 100644 --- a/crates/syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt +++ b/crates/syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt @@ -1,2 +1,2 @@ -RAW_STRING 8 "r##\"\\x7f" +STRING 8 "r##\"\\x7f" > error0..8 token("r##\"\\x7f") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) diff --git a/crates/syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt b/crates/syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt index 5667c6149..1d2ebc60f 100644 --- a/crates/syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt +++ b/crates/syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt @@ -1,2 +1,2 @@ -RAW_STRING 12 "r##\"\\u{20AA}" +STRING 12 "r##\"\\u{20AA}" > error0..12 token("r##\"\\u{20AA}") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) diff --git a/crates/syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt b/crates/syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt index 141c8268e..c567ab7e2 100644 --- a/crates/syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt +++ b/crates/syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt @@ -1,2 +1,2 @@ -RAW_STRING 5 "r##\" " +STRING 5 "r##\" " > error0..5 token("r##\" ") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) diff --git a/crates/syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt b/crates/syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt index f61d4cc91..343b20323 100644 --- a/crates/syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt +++ b/crates/syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt @@ -1,2 +1,2 @@ -RAW_STRING 5 "r##\"\\" +STRING 5 "r##\"\\" > error0..5 token("r##\"\\") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) diff --git a/crates/syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt b/crates/syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt index 12e2c0fc0..041a42737 100644 --- a/crates/syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt +++ b/crates/syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt @@ -1,2 +1,2 @@ -RAW_STRING 6 "r##\"\\n" +STRING 6 "r##\"\\n" > error0..6 token("r##\"\\n") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) diff --git a/crates/syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt b/crates/syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt index fe12cb5fc..efaa1cafd 100644 --- a/crates/syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt +++ b/crates/syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt @@ -1,2 +1,2 @@ -RAW_BYTE_STRING 5 "br##\"" +BYTE_STRING 5 "br##\"" > error0..5 token("br##\"") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) diff --git a/crates/syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt b/crates/syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt index 5be2a7861..b6c938f94 100644 --- a/crates/syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt +++ b/crates/syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt @@ -1,2 +1,2 @@ -RAW_BYTE_STRING 9 "br##\"🦀" +BYTE_STRING 9 "br##\"🦀" > error0..9 token("br##\"🦀") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) diff --git a/crates/syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt b/crates/syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt index 6cbe08d07..f82efe49a 100644 --- a/crates/syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt +++ b/crates/syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt @@ -1,2 +1,2 @@ -RAW_BYTE_STRING 9 "br##\"\\x7f" +BYTE_STRING 9 "br##\"\\x7f" > error0..9 token("br##\"\\x7f") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) diff --git a/crates/syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt b/crates/syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt index f56a4f984..4e4a57696 100644 --- a/crates/syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt +++ b/crates/syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt @@ -1,2 +1,2 @@ -RAW_BYTE_STRING 13 "br##\"\\u{20AA}" +BYTE_STRING 13 "br##\"\\u{20AA}" > error0..13 token("br##\"\\u{20AA}") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) diff --git a/crates/syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt b/crates/syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt index 3d32ce34e..0018c8623 100644 --- a/crates/syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt +++ b/crates/syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt @@ -1,2 +1,2 @@ -RAW_BYTE_STRING 6 "br##\" " +BYTE_STRING 6 "br##\" " > error0..6 token("br##\" ") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) diff --git a/crates/syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt b/crates/syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt index 320fea177..c3ba4ae82 100644 --- a/crates/syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt +++ b/crates/syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt @@ -1,2 +1,2 @@ -RAW_BYTE_STRING 6 "br##\"\\" +BYTE_STRING 6 "br##\"\\" > error0..6 token("br##\"\\") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) diff --git a/crates/syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt b/crates/syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt index b3a56380c..7bda72276 100644 --- a/crates/syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt +++ b/crates/syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt @@ -1,2 +1,2 @@ -RAW_BYTE_STRING 7 "br##\"\\n" +BYTE_STRING 7 "br##\"\\n" > error0..7 token("br##\"\\n") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) diff --git a/crates/syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt b/crates/syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt index 5af1e2d97..ce92d2ff7 100644 --- a/crates/syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt +++ b/crates/syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt @@ -1,2 +1,2 @@ -RAW_STRING 3 "r##" +STRING 3 "r##" > error0..3 token("r##") msg(Missing `"` symbol after `#` symbols to begin the raw string literal) diff --git a/crates/syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt b/crates/syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt index aec7afd92..a75d9030c 100644 --- a/crates/syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt +++ b/crates/syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt @@ -1,2 +1,2 @@ -RAW_BYTE_STRING 4 "br##" +BYTE_STRING 4 "br##" > error0..4 token("br##") msg(Missing `"` symbol after `#` symbols to begin the raw byte string literal) diff --git a/crates/syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt b/crates/syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt index e22fe5374..516e0b78e 100644 --- a/crates/syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt +++ b/crates/syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt @@ -1,4 +1,4 @@ -RAW_STRING 4 "r## " +STRING 4 "r## " IDENT 1 "I" WHITESPACE 1 " " IDENT 4 "lack" diff --git a/crates/syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt b/crates/syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt index d74ea4c27..2f8a6f5f2 100644 --- a/crates/syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt +++ b/crates/syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt @@ -1,4 +1,4 @@ -RAW_BYTE_STRING 5 "br## " +BYTE_STRING 5 "br## " IDENT 1 "I" WHITESPACE 1 " " IDENT 4 "lack" diff --git a/crates/syntax/test_data/lexer/ok/0008_byte_strings.txt b/crates/syntax/test_data/lexer/ok/0008_byte_strings.txt index bc03b51a8..e61ad99be 100644 --- a/crates/syntax/test_data/lexer/ok/0008_byte_strings.txt +++ b/crates/syntax/test_data/lexer/ok/0008_byte_strings.txt @@ -4,13 +4,13 @@ BYTE 4 "b\'x\'" WHITESPACE 1 " " BYTE_STRING 6 "b\"foo\"" WHITESPACE 1 " " -RAW_BYTE_STRING 4 "br\"\"" +BYTE_STRING 4 "br\"\"" WHITESPACE 1 "\n" BYTE 6 "b\'\'suf" WHITESPACE 1 " " BYTE_STRING 5 "b\"\"ix" WHITESPACE 1 " " -RAW_BYTE_STRING 6 "br\"\"br" +BYTE_STRING 6 "br\"\"br" WHITESPACE 1 "\n" BYTE 5 "b\'\\n\'" WHITESPACE 1 " " diff --git a/crates/syntax/test_data/lexer/ok/0009_strings.txt b/crates/syntax/test_data/lexer/ok/0009_strings.txt index 4cb4d711d..988a8877b 100644 --- a/crates/syntax/test_data/lexer/ok/0009_strings.txt +++ b/crates/syntax/test_data/lexer/ok/0009_strings.txt @@ -1,6 +1,6 @@ STRING 7 "\"hello\"" WHITESPACE 1 " " -RAW_STRING 8 "r\"world\"" +STRING 8 "r\"world\"" WHITESPACE 1 " " STRING 17 "\"\\n\\\"\\\\no escape\"" WHITESPACE 1 " " diff --git a/crates/syntax/test_data/lexer/ok/0013_raw_strings.txt b/crates/syntax/test_data/lexer/ok/0013_raw_strings.txt index 9cf0957d1..db0d5ffd1 100644 --- a/crates/syntax/test_data/lexer/ok/0013_raw_strings.txt +++ b/crates/syntax/test_data/lexer/ok/0013_raw_strings.txt @@ -1,2 +1,2 @@ -RAW_STRING 36 "r###\"this is a r##\"raw\"## string\"###" +STRING 36 "r###\"this is a r##\"raw\"## string\"###" WHITESPACE 1 "\n" diff --git a/crates/syntax/test_data/parser/inline/ok/0085_expr_literals.rast b/crates/syntax/test_data/parser/inline/ok/0085_expr_literals.rast index 9a87b5b93..ae838105d 100644 --- a/crates/syntax/test_data/parser/inline/ok/0085_expr_literals.rast +++ b/crates/syntax/test_data/parser/inline/ok/0085_expr_literals.rast @@ -104,7 +104,7 @@ SOURCE_FILE@0..189 EQ@142..143 "=" WHITESPACE@143..144 " " LITERAL@144..148 - RAW_STRING@144..148 "r\"d\"" + STRING@144..148 "r\"d\"" SEMICOLON@148..149 ";" WHITESPACE@149..154 "\n " LET_STMT@154..167 @@ -128,7 +128,7 @@ SOURCE_FILE@0..189 EQ@178..179 "=" WHITESPACE@179..180 " " LITERAL@180..185 - RAW_BYTE_STRING@180..185 "br\"f\"" + BYTE_STRING@180..185 "br\"f\"" SEMICOLON@185..186 ";" WHITESPACE@186..187 "\n" R_CURLY@187..188 "}" -- cgit v1.2.3