aboutsummaryrefslogtreecommitdiff
path: root/crates/syntax
diff options
context:
space:
mode:
Diffstat (limited to 'crates/syntax')
-rw-r--r--crates/syntax/Cargo.toml2
-rw-r--r--crates/syntax/src/ast.rs4
-rw-r--r--crates/syntax/src/ast/make.rs6
-rw-r--r--crates/syntax/src/ast/node_ext.rs10
-rw-r--r--crates/syntax/src/ast/token_ext.rs12
-rw-r--r--crates/syntax/src/validation.rs2
6 files changed, 18 insertions, 18 deletions
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml
index 37d3faa03..165533388 100644
--- a/crates/syntax/Cargo.toml
+++ b/crates/syntax/Cargo.toml
@@ -12,7 +12,7 @@ doctest = false
12 12
13[dependencies] 13[dependencies]
14itertools = "0.10.0" 14itertools = "0.10.0"
15rowan = "0.11" 15rowan = { path="../../../rowan" }
16rustc_lexer = { version = "697.0.0", package = "rustc-ap-rustc_lexer" } 16rustc_lexer = { version = "697.0.0", package = "rustc-ap-rustc_lexer" }
17rustc-hash = "1.1.0" 17rustc-hash = "1.1.0"
18arrayvec = "0.5.1" 18arrayvec = "0.5.1"
diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs
index 83de067d9..a25ff655e 100644
--- a/crates/syntax/src/ast.rs
+++ b/crates/syntax/src/ast.rs
@@ -12,7 +12,7 @@ use std::marker::PhantomData;
12 12
13use crate::{ 13use crate::{
14 syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken}, 14 syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken},
15 SmolStr, SyntaxKind, 15 SyntaxKind,
16}; 16};
17 17
18pub use self::{ 18pub use self::{
@@ -54,7 +54,7 @@ pub trait AstToken {
54 54
55 fn syntax(&self) -> &SyntaxToken; 55 fn syntax(&self) -> &SyntaxToken;
56 56
57 fn text(&self) -> &SmolStr { 57 fn text(&self) -> &str {
58 self.syntax().text() 58 self.syntax().text()
59 } 59 }
60} 60}
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index cc1717237..b755c9692 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -495,7 +495,7 @@ pub mod tokens {
495 .syntax() 495 .syntax()
496 .descendants_with_tokens() 496 .descendants_with_tokens()
497 .filter_map(|it| it.into_token()) 497 .filter_map(|it| it.into_token())
498 .find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ") 498 .find(|it| it.kind() == WHITESPACE && it.text() == " ")
499 .unwrap() 499 .unwrap()
500 } 500 }
501 501
@@ -523,7 +523,7 @@ pub mod tokens {
523 .syntax() 523 .syntax()
524 .descendants_with_tokens() 524 .descendants_with_tokens()
525 .filter_map(|it| it.into_token()) 525 .filter_map(|it| it.into_token())
526 .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n") 526 .find(|it| it.kind() == WHITESPACE && it.text() == "\n")
527 .unwrap() 527 .unwrap()
528 } 528 }
529 529
@@ -533,7 +533,7 @@ pub mod tokens {
533 .syntax() 533 .syntax()
534 .descendants_with_tokens() 534 .descendants_with_tokens()
535 .filter_map(|it| it.into_token()) 535 .filter_map(|it| it.into_token())
536 .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n\n") 536 .find(|it| it.kind() == WHITESPACE && it.text() == "\n\n")
537 .unwrap() 537 .unwrap()
538 } 538 }
539 539
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index 738c92a5b..5c8cf900f 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -13,19 +13,19 @@ use crate::{
13}; 13};
14 14
15impl ast::Lifetime { 15impl ast::Lifetime {
16 pub fn text(&self) -> &SmolStr { 16 pub fn text(&self) -> &str {
17 text_of_first_token(self.syntax()) 17 text_of_first_token(self.syntax())
18 } 18 }
19} 19}
20 20
21impl ast::Name { 21impl ast::Name {
22 pub fn text(&self) -> &SmolStr { 22 pub fn text(&self) -> &str {
23 text_of_first_token(self.syntax()) 23 text_of_first_token(self.syntax())
24 } 24 }
25} 25}
26 26
27impl ast::NameRef { 27impl ast::NameRef {
28 pub fn text(&self) -> &SmolStr { 28 pub fn text(&self) -> &str {
29 text_of_first_token(self.syntax()) 29 text_of_first_token(self.syntax())
30 } 30 }
31 31
@@ -34,7 +34,7 @@ impl ast::NameRef {
34 } 34 }
35} 35}
36 36
37fn text_of_first_token(node: &SyntaxNode) -> &SmolStr { 37fn text_of_first_token(node: &SyntaxNode) -> &str {
38 node.green().children().next().and_then(|it| it.into_token()).unwrap().text() 38 node.green().children().next().and_then(|it| it.into_token()).unwrap().text()
39} 39}
40 40
@@ -121,7 +121,7 @@ impl ast::Attr {
121 pub fn simple_name(&self) -> Option<SmolStr> { 121 pub fn simple_name(&self) -> Option<SmolStr> {
122 let path = self.path()?; 122 let path = self.path()?;
123 match (path.segment(), path.qualifier()) { 123 match (path.segment(), path.qualifier()) {
124 (Some(segment), None) => Some(segment.syntax().first_token()?.text().clone()), 124 (Some(segment), None) => Some(segment.syntax().first_token()?.text().into()),
125 _ => None, 125 _ => None,
126 } 126 }
127 } 127 }
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs
index 5e9620a40..5e07ec7d1 100644
--- a/crates/syntax/src/ast/token_ext.rs
+++ b/crates/syntax/src/ast/token_ext.rs
@@ -41,7 +41,7 @@ impl ast::Comment {
41 match kind { 41 match kind {
42 CommentKind { shape, doc: Some(_) } => { 42 CommentKind { shape, doc: Some(_) } => {
43 let prefix = kind.prefix(); 43 let prefix = kind.prefix();
44 let text = &self.text().as_str()[prefix.len()..]; 44 let text = &self.text()[prefix.len()..];
45 let ws = text.chars().next().filter(|c| c.is_whitespace()); 45 let ws = text.chars().next().filter(|c| c.is_whitespace());
46 let text = ws.map_or(text, |ws| &text[ws.len_utf8()..]); 46 let text = ws.map_or(text, |ws| &text[ws.len_utf8()..]);
47 match shape { 47 match shape {
@@ -156,13 +156,13 @@ impl ast::String {
156 156
157 pub fn value(&self) -> Option<Cow<'_, str>> { 157 pub fn value(&self) -> Option<Cow<'_, str>> {
158 if self.is_raw() { 158 if self.is_raw() {
159 let text = self.text().as_str(); 159 let text = self.text();
160 let text = 160 let text =
161 &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; 161 &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
162 return Some(Cow::Borrowed(text)); 162 return Some(Cow::Borrowed(text));
163 } 163 }
164 164
165 let text = self.text().as_str(); 165 let text = self.text();
166 let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; 166 let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
167 167
168 let mut buf = String::new(); 168 let mut buf = String::new();
@@ -190,7 +190,7 @@ impl ast::String {
190 } 190 }
191 191
192 pub fn quote_offsets(&self) -> Option<QuoteOffsets> { 192 pub fn quote_offsets(&self) -> Option<QuoteOffsets> {
193 let text = self.text().as_str(); 193 let text = self.text();
194 let offsets = QuoteOffsets::new(text)?; 194 let offsets = QuoteOffsets::new(text)?;
195 let o = self.syntax().text_range().start(); 195 let o = self.syntax().text_range().start();
196 let offsets = QuoteOffsets { 196 let offsets = QuoteOffsets {
@@ -560,7 +560,7 @@ impl HasFormatSpecifier for ast::String {
560 fn char_ranges( 560 fn char_ranges(
561 &self, 561 &self,
562 ) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>> { 562 ) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>> {
563 let text = self.text().as_str(); 563 let text = self.text();
564 let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; 564 let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
565 let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start(); 565 let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start();
566 566
@@ -590,7 +590,7 @@ impl ast::IntNumber {
590 pub fn value(&self) -> Option<u128> { 590 pub fn value(&self) -> Option<u128> {
591 let token = self.syntax(); 591 let token = self.syntax();
592 592
593 let mut text = token.text().as_str(); 593 let mut text = token.text();
594 if let Some(suffix) = self.suffix() { 594 if let Some(suffix) = self.suffix() {
595 text = &text[..text.len() - suffix.len()] 595 text = &text[..text.len() - suffix.len()]
596 } 596 }
diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs
index 7901580ee..7694e8834 100644
--- a/crates/syntax/src/validation.rs
+++ b/crates/syntax/src/validation.rs
@@ -116,7 +116,7 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
116 } 116 }
117 117
118 let token = literal.token(); 118 let token = literal.token();
119 let text = token.text().as_str(); 119 let text = token.text();
120 120
121 // FIXME: lift this lambda refactor to `fn` (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366199205) 121 // FIXME: lift this lambda refactor to `fn` (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366199205)
122 let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| { 122 let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| {