aboutsummaryrefslogtreecommitdiff
path: root/crates/syntax
diff options
context:
space:
mode:
Diffstat (limited to 'crates/syntax')
-rw-r--r--crates/syntax/Cargo.toml5
-rw-r--r--crates/syntax/src/algo.rs7
-rw-r--r--crates/syntax/src/ast.rs4
-rw-r--r--crates/syntax/src/ast/make.rs8
-rw-r--r--crates/syntax/src/ast/node_ext.rs10
-rw-r--r--crates/syntax/src/ast/token_ext.rs12
-rw-r--r--crates/syntax/src/lib.rs4
-rw-r--r--crates/syntax/src/parsing/reparsing.rs3
-rw-r--r--crates/syntax/src/parsing/text_tree_sink.rs4
-rw-r--r--crates/syntax/src/syntax_node.rs4
-rw-r--r--crates/syntax/src/validation.rs2
11 files changed, 30 insertions, 33 deletions
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml
index 52394b337..55b437a3a 100644
--- a/crates/syntax/Cargo.toml
+++ b/crates/syntax/Cargo.toml
@@ -12,15 +12,12 @@ doctest = false
12 12
13[dependencies] 13[dependencies]
14itertools = "0.10.0" 14itertools = "0.10.0"
15rowan = "0.10.3" 15rowan = "0.12"
16rustc_lexer = { version = "697.0.0", package = "rustc-ap-rustc_lexer" } 16rustc_lexer = { version = "697.0.0", package = "rustc-ap-rustc_lexer" }
17rustc-hash = "1.1.0" 17rustc-hash = "1.1.0"
18arrayvec = "0.5.1" 18arrayvec = "0.5.1"
19once_cell = "1.3.1" 19once_cell = "1.3.1"
20indexmap = "1.4.0" 20indexmap = "1.4.0"
21# This crate transitively depends on `smol_str` via `rowan`.
22# ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here
23# to reduce number of compilations
24smol_str = { version = "0.1.15", features = ["serde"] } 21smol_str = { version = "0.1.15", features = ["serde"] }
25serde = { version = "1.0.106", features = ["derive"] } 22serde = { version = "1.0.106", features = ["derive"] }
26 23
diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs
index 827ae78f9..2ff92f9f6 100644
--- a/crates/syntax/src/algo.rs
+++ b/crates/syntax/src/algo.rs
@@ -4,6 +4,7 @@ use std::{
4 fmt, 4 fmt,
5 hash::BuildHasherDefault, 5 hash::BuildHasherDefault,
6 ops::{self, RangeInclusive}, 6 ops::{self, RangeInclusive},
7 ptr,
7}; 8};
8 9
9use indexmap::IndexMap; 10use indexmap::IndexMap;
@@ -171,7 +172,7 @@ pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff {
171 && lhs.text_range().len() == rhs.text_range().len() 172 && lhs.text_range().len() == rhs.text_range().len()
172 && match (&lhs, &rhs) { 173 && match (&lhs, &rhs) {
173 (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => { 174 (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => {
174 lhs.green() == rhs.green() || lhs.text() == rhs.text() 175 ptr::eq(lhs.green(), rhs.green()) || lhs.text() == rhs.text()
175 } 176 }
176 (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(), 177 (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(),
177 _ => false, 178 _ => false,
@@ -566,7 +567,7 @@ impl<'a> SyntaxRewriter<'a> {
566 567
567fn element_to_green(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { 568fn element_to_green(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> {
568 match element { 569 match element {
569 NodeOrToken::Node(it) => NodeOrToken::Node(it.green().clone()), 570 NodeOrToken::Node(it) => NodeOrToken::Node(it.green().to_owned()),
570 NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), 571 NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()),
571 } 572 }
572} 573}
@@ -624,7 +625,7 @@ fn position_of_child(parent: &SyntaxNode, child: SyntaxElement) -> usize {
624 625
625fn to_green_element(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { 626fn to_green_element(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> {
626 match element { 627 match element {
627 NodeOrToken::Node(it) => it.green().clone().into(), 628 NodeOrToken::Node(it) => it.green().to_owned().into(),
628 NodeOrToken::Token(it) => it.green().clone().into(), 629 NodeOrToken::Token(it) => it.green().clone().into(),
629 } 630 }
630} 631}
diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs
index 83de067d9..a25ff655e 100644
--- a/crates/syntax/src/ast.rs
+++ b/crates/syntax/src/ast.rs
@@ -12,7 +12,7 @@ use std::marker::PhantomData;
12 12
13use crate::{ 13use crate::{
14 syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken}, 14 syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken},
15 SmolStr, SyntaxKind, 15 SyntaxKind,
16}; 16};
17 17
18pub use self::{ 18pub use self::{
@@ -54,7 +54,7 @@ pub trait AstToken {
54 54
55 fn syntax(&self) -> &SyntaxToken; 55 fn syntax(&self) -> &SyntaxToken;
56 56
57 fn text(&self) -> &SmolStr { 57 fn text(&self) -> &str {
58 self.syntax().text() 58 self.syntax().text()
59 } 59 }
60} 60}
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index 9ffc3ae11..b755c9692 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -478,7 +478,7 @@ fn ast_from_text<N: AstNode>(text: &str) -> N {
478} 478}
479 479
480fn unroot(n: SyntaxNode) -> SyntaxNode { 480fn unroot(n: SyntaxNode) -> SyntaxNode {
481 SyntaxNode::new_root(n.green().clone()) 481 SyntaxNode::new_root(n.green().to_owned())
482} 482}
483 483
484pub mod tokens { 484pub mod tokens {
@@ -495,7 +495,7 @@ pub mod tokens {
495 .syntax() 495 .syntax()
496 .descendants_with_tokens() 496 .descendants_with_tokens()
497 .filter_map(|it| it.into_token()) 497 .filter_map(|it| it.into_token())
498 .find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ") 498 .find(|it| it.kind() == WHITESPACE && it.text() == " ")
499 .unwrap() 499 .unwrap()
500 } 500 }
501 501
@@ -523,7 +523,7 @@ pub mod tokens {
523 .syntax() 523 .syntax()
524 .descendants_with_tokens() 524 .descendants_with_tokens()
525 .filter_map(|it| it.into_token()) 525 .filter_map(|it| it.into_token())
526 .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n") 526 .find(|it| it.kind() == WHITESPACE && it.text() == "\n")
527 .unwrap() 527 .unwrap()
528 } 528 }
529 529
@@ -533,7 +533,7 @@ pub mod tokens {
533 .syntax() 533 .syntax()
534 .descendants_with_tokens() 534 .descendants_with_tokens()
535 .filter_map(|it| it.into_token()) 535 .filter_map(|it| it.into_token())
536 .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n\n") 536 .find(|it| it.kind() == WHITESPACE && it.text() == "\n\n")
537 .unwrap() 537 .unwrap()
538 } 538 }
539 539
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index 738c92a5b..5c8cf900f 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -13,19 +13,19 @@ use crate::{
13}; 13};
14 14
15impl ast::Lifetime { 15impl ast::Lifetime {
16 pub fn text(&self) -> &SmolStr { 16 pub fn text(&self) -> &str {
17 text_of_first_token(self.syntax()) 17 text_of_first_token(self.syntax())
18 } 18 }
19} 19}
20 20
21impl ast::Name { 21impl ast::Name {
22 pub fn text(&self) -> &SmolStr { 22 pub fn text(&self) -> &str {
23 text_of_first_token(self.syntax()) 23 text_of_first_token(self.syntax())
24 } 24 }
25} 25}
26 26
27impl ast::NameRef { 27impl ast::NameRef {
28 pub fn text(&self) -> &SmolStr { 28 pub fn text(&self) -> &str {
29 text_of_first_token(self.syntax()) 29 text_of_first_token(self.syntax())
30 } 30 }
31 31
@@ -34,7 +34,7 @@ impl ast::NameRef {
34 } 34 }
35} 35}
36 36
37fn text_of_first_token(node: &SyntaxNode) -> &SmolStr { 37fn text_of_first_token(node: &SyntaxNode) -> &str {
38 node.green().children().next().and_then(|it| it.into_token()).unwrap().text() 38 node.green().children().next().and_then(|it| it.into_token()).unwrap().text()
39} 39}
40 40
@@ -121,7 +121,7 @@ impl ast::Attr {
121 pub fn simple_name(&self) -> Option<SmolStr> { 121 pub fn simple_name(&self) -> Option<SmolStr> {
122 let path = self.path()?; 122 let path = self.path()?;
123 match (path.segment(), path.qualifier()) { 123 match (path.segment(), path.qualifier()) {
124 (Some(segment), None) => Some(segment.syntax().first_token()?.text().clone()), 124 (Some(segment), None) => Some(segment.syntax().first_token()?.text().into()),
125 _ => None, 125 _ => None,
126 } 126 }
127 } 127 }
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs
index 5e9620a40..5e07ec7d1 100644
--- a/crates/syntax/src/ast/token_ext.rs
+++ b/crates/syntax/src/ast/token_ext.rs
@@ -41,7 +41,7 @@ impl ast::Comment {
41 match kind { 41 match kind {
42 CommentKind { shape, doc: Some(_) } => { 42 CommentKind { shape, doc: Some(_) } => {
43 let prefix = kind.prefix(); 43 let prefix = kind.prefix();
44 let text = &self.text().as_str()[prefix.len()..]; 44 let text = &self.text()[prefix.len()..];
45 let ws = text.chars().next().filter(|c| c.is_whitespace()); 45 let ws = text.chars().next().filter(|c| c.is_whitespace());
46 let text = ws.map_or(text, |ws| &text[ws.len_utf8()..]); 46 let text = ws.map_or(text, |ws| &text[ws.len_utf8()..]);
47 match shape { 47 match shape {
@@ -156,13 +156,13 @@ impl ast::String {
156 156
157 pub fn value(&self) -> Option<Cow<'_, str>> { 157 pub fn value(&self) -> Option<Cow<'_, str>> {
158 if self.is_raw() { 158 if self.is_raw() {
159 let text = self.text().as_str(); 159 let text = self.text();
160 let text = 160 let text =
161 &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; 161 &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
162 return Some(Cow::Borrowed(text)); 162 return Some(Cow::Borrowed(text));
163 } 163 }
164 164
165 let text = self.text().as_str(); 165 let text = self.text();
166 let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; 166 let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
167 167
168 let mut buf = String::new(); 168 let mut buf = String::new();
@@ -190,7 +190,7 @@ impl ast::String {
190 } 190 }
191 191
192 pub fn quote_offsets(&self) -> Option<QuoteOffsets> { 192 pub fn quote_offsets(&self) -> Option<QuoteOffsets> {
193 let text = self.text().as_str(); 193 let text = self.text();
194 let offsets = QuoteOffsets::new(text)?; 194 let offsets = QuoteOffsets::new(text)?;
195 let o = self.syntax().text_range().start(); 195 let o = self.syntax().text_range().start();
196 let offsets = QuoteOffsets { 196 let offsets = QuoteOffsets {
@@ -560,7 +560,7 @@ impl HasFormatSpecifier for ast::String {
560 fn char_ranges( 560 fn char_ranges(
561 &self, 561 &self,
562 ) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>> { 562 ) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>> {
563 let text = self.text().as_str(); 563 let text = self.text();
564 let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; 564 let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
565 let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start(); 565 let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start();
566 566
@@ -590,7 +590,7 @@ impl ast::IntNumber {
590 pub fn value(&self) -> Option<u128> { 590 pub fn value(&self) -> Option<u128> {
591 let token = self.syntax(); 591 let token = self.syntax();
592 592
593 let mut text = token.text().as_str(); 593 let mut text = token.text();
594 if let Some(suffix) = self.suffix() { 594 if let Some(suffix) = self.suffix() {
595 text = &text[..text.len() - suffix.len()] 595 text = &text[..text.len() - suffix.len()]
596 } 596 }
diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs
index ea7482bb1..11294c5b2 100644
--- a/crates/syntax/src/lib.rs
+++ b/crates/syntax/src/lib.rs
@@ -56,9 +56,9 @@ pub use crate::{
56}; 56};
57pub use parser::{SyntaxKind, T}; 57pub use parser::{SyntaxKind, T};
58pub use rowan::{ 58pub use rowan::{
59 Direction, GreenNode, NodeOrToken, SmolStr, SyntaxText, TextRange, TextSize, TokenAtOffset, 59 Direction, GreenNode, NodeOrToken, SyntaxText, TextRange, TextSize, TokenAtOffset, WalkEvent,
60 WalkEvent,
61}; 60};
61pub use smol_str::SmolStr;
62 62
63/// `Parse` is the result of the parsing: a syntax tree and a collection of 63/// `Parse` is the result of the parsing: a syntax tree and a collection of
64/// errors. 64/// errors.
diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs
index 76f01084c..3d637bf91 100644
--- a/crates/syntax/src/parsing/reparsing.rs
+++ b/crates/syntax/src/parsing/reparsing.rs
@@ -73,8 +73,7 @@ fn reparse_token<'node>(
73 new_text.pop(); 73 new_text.pop();
74 } 74 }
75 75
76 let new_token = 76 let new_token = GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), &new_text);
77 GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), new_text.into());
78 Some(( 77 Some((
79 prev_token.replace_with(new_token), 78 prev_token.replace_with(new_token),
80 new_err.into_iter().collect(), 79 new_err.into_iter().collect(),
diff --git a/crates/syntax/src/parsing/text_tree_sink.rs b/crates/syntax/src/parsing/text_tree_sink.rs
index ce27c3dd9..d5ddc076f 100644
--- a/crates/syntax/src/parsing/text_tree_sink.rs
+++ b/crates/syntax/src/parsing/text_tree_sink.rs
@@ -8,7 +8,7 @@ use crate::{
8 ast, 8 ast,
9 parsing::Token, 9 parsing::Token,
10 syntax_node::GreenNode, 10 syntax_node::GreenNode,
11 SmolStr, SyntaxError, 11 SyntaxError,
12 SyntaxKind::{self, *}, 12 SyntaxKind::{self, *},
13 SyntaxTreeBuilder, TextRange, TextSize, 13 SyntaxTreeBuilder, TextRange, TextSize,
14}; 14};
@@ -135,7 +135,7 @@ impl<'a> TextTreeSink<'a> {
135 135
136 fn do_token(&mut self, kind: SyntaxKind, len: TextSize, n_tokens: usize) { 136 fn do_token(&mut self, kind: SyntaxKind, len: TextSize, n_tokens: usize) {
137 let range = TextRange::at(self.text_pos, len); 137 let range = TextRange::at(self.text_pos, len);
138 let text: SmolStr = self.text[range].into(); 138 let text = &self.text[range];
139 self.text_pos += len; 139 self.text_pos += len;
140 self.token_pos += n_tokens; 140 self.token_pos += n_tokens;
141 self.inner.token(kind, text); 141 self.inner.token(kind, text);
diff --git a/crates/syntax/src/syntax_node.rs b/crates/syntax/src/syntax_node.rs
index cc30138fa..8f643b228 100644
--- a/crates/syntax/src/syntax_node.rs
+++ b/crates/syntax/src/syntax_node.rs
@@ -8,7 +8,7 @@
8 8
9use rowan::{GreenNodeBuilder, Language}; 9use rowan::{GreenNodeBuilder, Language};
10 10
11use crate::{Parse, SmolStr, SyntaxError, SyntaxKind, TextSize}; 11use crate::{Parse, SyntaxError, SyntaxKind, TextSize};
12 12
13pub(crate) use rowan::{GreenNode, GreenToken, NodeOrToken}; 13pub(crate) use rowan::{GreenNode, GreenToken, NodeOrToken};
14 14
@@ -53,7 +53,7 @@ impl SyntaxTreeBuilder {
53 Parse::new(green, errors) 53 Parse::new(green, errors)
54 } 54 }
55 55
56 pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) { 56 pub fn token(&mut self, kind: SyntaxKind, text: &str) {
57 let kind = RustLanguage::kind_to_raw(kind); 57 let kind = RustLanguage::kind_to_raw(kind);
58 self.inner.token(kind, text) 58 self.inner.token(kind, text)
59 } 59 }
diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs
index 7901580ee..7694e8834 100644
--- a/crates/syntax/src/validation.rs
+++ b/crates/syntax/src/validation.rs
@@ -116,7 +116,7 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
116 } 116 }
117 117
118 let token = literal.token(); 118 let token = literal.token();
119 let text = token.text().as_str(); 119 let text = token.text();
120 120
121 // FIXME: lift this lambda refactor to `fn` (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366199205) 121 // FIXME: lift this lambda refactor to `fn` (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366199205)
122 let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| { 122 let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| {