From 46b4f89c920c314caf1a8af2abdb09732d100d67 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Wed, 20 Jan 2021 01:56:11 +0300 Subject: . --- crates/syntax/Cargo.toml | 2 +- crates/syntax/src/ast.rs | 4 ++-- crates/syntax/src/ast/make.rs | 6 +++--- crates/syntax/src/ast/node_ext.rs | 10 +++++----- crates/syntax/src/ast/token_ext.rs | 12 ++++++------ crates/syntax/src/validation.rs | 2 +- 6 files changed, 18 insertions(+), 18 deletions(-) (limited to 'crates/syntax') diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index 37d3faa03..165533388 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml @@ -12,7 +12,7 @@ doctest = false [dependencies] itertools = "0.10.0" -rowan = "0.11" +rowan = { path="../../../rowan" } rustc_lexer = { version = "697.0.0", package = "rustc-ap-rustc_lexer" } rustc-hash = "1.1.0" arrayvec = "0.5.1" diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs index 83de067d9..a25ff655e 100644 --- a/crates/syntax/src/ast.rs +++ b/crates/syntax/src/ast.rs @@ -12,7 +12,7 @@ use std::marker::PhantomData; use crate::{ syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken}, - SmolStr, SyntaxKind, + SyntaxKind, }; pub use self::{ @@ -54,7 +54,7 @@ pub trait AstToken { fn syntax(&self) -> &SyntaxToken; - fn text(&self) -> &SmolStr { + fn text(&self) -> &str { self.syntax().text() } } diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index cc1717237..b755c9692 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs @@ -495,7 +495,7 @@ pub mod tokens { .syntax() .descendants_with_tokens() .filter_map(|it| it.into_token()) - .find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ") + .find(|it| it.kind() == WHITESPACE && it.text() == " ") .unwrap() } @@ -523,7 +523,7 @@ pub mod tokens { .syntax() .descendants_with_tokens() .filter_map(|it| it.into_token()) - .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n") + .find(|it| it.kind() == WHITESPACE && it.text() == "\n") .unwrap() } @@ -533,7 +533,7 @@ pub mod tokens { .syntax() .descendants_with_tokens() .filter_map(|it| it.into_token()) - .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n\n") + .find(|it| it.kind() == WHITESPACE && it.text() == "\n\n") .unwrap() } diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs index 738c92a5b..5c8cf900f 100644 --- a/crates/syntax/src/ast/node_ext.rs +++ b/crates/syntax/src/ast/node_ext.rs @@ -13,19 +13,19 @@ use crate::{ }; impl ast::Lifetime { - pub fn text(&self) -> &SmolStr { + pub fn text(&self) -> &str { text_of_first_token(self.syntax()) } } impl ast::Name { - pub fn text(&self) -> &SmolStr { + pub fn text(&self) -> &str { text_of_first_token(self.syntax()) } } impl ast::NameRef { - pub fn text(&self) -> &SmolStr { + pub fn text(&self) -> &str { text_of_first_token(self.syntax()) } @@ -34,7 +34,7 @@ impl ast::NameRef { } } -fn text_of_first_token(node: &SyntaxNode) -> &SmolStr { +fn text_of_first_token(node: &SyntaxNode) -> &str { node.green().children().next().and_then(|it| it.into_token()).unwrap().text() } @@ -121,7 +121,7 @@ impl ast::Attr { pub fn simple_name(&self) -> Option { let path = self.path()?; match (path.segment(), path.qualifier()) { - (Some(segment), None) => Some(segment.syntax().first_token()?.text().clone()), + (Some(segment), None) => Some(segment.syntax().first_token()?.text().into()), _ => None, } } diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs index 5e9620a40..5e07ec7d1 100644 --- a/crates/syntax/src/ast/token_ext.rs +++ b/crates/syntax/src/ast/token_ext.rs @@ -41,7 +41,7 @@ impl ast::Comment { match kind { CommentKind { shape, doc: Some(_) } => { let prefix = kind.prefix(); - let text = &self.text().as_str()[prefix.len()..]; + let text = &self.text()[prefix.len()..]; let ws = text.chars().next().filter(|c| c.is_whitespace()); let text = ws.map_or(text, |ws| &text[ws.len_utf8()..]); match shape { @@ -156,13 +156,13 @@ impl ast::String { pub fn value(&self) -> Option> { if self.is_raw() { - let text = self.text().as_str(); + let text = self.text(); let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; return Some(Cow::Borrowed(text)); } - let text = self.text().as_str(); + let text = self.text(); let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; let mut buf = String::new(); @@ -190,7 +190,7 @@ impl ast::String { } pub fn quote_offsets(&self) -> Option { - let text = self.text().as_str(); + let text = self.text(); let offsets = QuoteOffsets::new(text)?; let o = self.syntax().text_range().start(); let offsets = QuoteOffsets { @@ -560,7 +560,7 @@ impl HasFormatSpecifier for ast::String { fn char_ranges( &self, ) -> Option)>> { - let text = self.text().as_str(); + let text = self.text(); let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start(); @@ -590,7 +590,7 @@ impl ast::IntNumber { pub fn value(&self) -> Option { let token = self.syntax(); - let mut text = token.text().as_str(); + let mut text = token.text(); if let Some(suffix) = self.suffix() { text = &text[..text.len() - suffix.len()] } diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs index 7901580ee..7694e8834 100644 --- a/crates/syntax/src/validation.rs +++ b/crates/syntax/src/validation.rs @@ -116,7 +116,7 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec) { } let token = literal.token(); - let text = token.text().as_str(); + let text = token.text(); // FIXME: lift this lambda refactor to `fn` (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366199205) let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| { -- cgit v1.2.3