From 46b4f89c920c314caf1a8af2abdb09732d100d67 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Wed, 20 Jan 2021 01:56:11 +0300 Subject: . --- crates/syntax/Cargo.toml | 2 +- crates/syntax/src/ast.rs | 4 ++-- crates/syntax/src/ast/make.rs | 6 +++--- crates/syntax/src/ast/node_ext.rs | 10 +++++----- crates/syntax/src/ast/token_ext.rs | 12 ++++++------ crates/syntax/src/validation.rs | 2 +- 6 files changed, 18 insertions(+), 18 deletions(-) (limited to 'crates/syntax') diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index 37d3faa03..165533388 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml @@ -12,7 +12,7 @@ doctest = false [dependencies] itertools = "0.10.0" -rowan = "0.11" +rowan = { path="../../../rowan" } rustc_lexer = { version = "697.0.0", package = "rustc-ap-rustc_lexer" } rustc-hash = "1.1.0" arrayvec = "0.5.1" diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs index 83de067d9..a25ff655e 100644 --- a/crates/syntax/src/ast.rs +++ b/crates/syntax/src/ast.rs @@ -12,7 +12,7 @@ use std::marker::PhantomData; use crate::{ syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken}, - SmolStr, SyntaxKind, + SyntaxKind, }; pub use self::{ @@ -54,7 +54,7 @@ pub trait AstToken { fn syntax(&self) -> &SyntaxToken; - fn text(&self) -> &SmolStr { + fn text(&self) -> &str { self.syntax().text() } } diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index cc1717237..b755c9692 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs @@ -495,7 +495,7 @@ pub mod tokens { .syntax() .descendants_with_tokens() .filter_map(|it| it.into_token()) - .find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ") + .find(|it| it.kind() == WHITESPACE && it.text() == " ") .unwrap() } @@ -523,7 +523,7 @@ pub mod tokens { .syntax() .descendants_with_tokens() .filter_map(|it| it.into_token()) - .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n") + .find(|it| it.kind() == WHITESPACE && it.text() == "\n") .unwrap() } @@ -533,7 +533,7 @@ pub mod tokens { .syntax() .descendants_with_tokens() .filter_map(|it| it.into_token()) - .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n\n") + .find(|it| it.kind() == WHITESPACE && it.text() == "\n\n") .unwrap() } diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs index 738c92a5b..5c8cf900f 100644 --- a/crates/syntax/src/ast/node_ext.rs +++ b/crates/syntax/src/ast/node_ext.rs @@ -13,19 +13,19 @@ use crate::{ }; impl ast::Lifetime { - pub fn text(&self) -> &SmolStr { + pub fn text(&self) -> &str { text_of_first_token(self.syntax()) } } impl ast::Name { - pub fn text(&self) -> &SmolStr { + pub fn text(&self) -> &str { text_of_first_token(self.syntax()) } } impl ast::NameRef { - pub fn text(&self) -> &SmolStr { + pub fn text(&self) -> &str { text_of_first_token(self.syntax()) } @@ -34,7 +34,7 @@ impl ast::NameRef { } } -fn text_of_first_token(node: &SyntaxNode) -> &SmolStr { +fn text_of_first_token(node: &SyntaxNode) -> &str { node.green().children().next().and_then(|it| it.into_token()).unwrap().text() } @@ -121,7 +121,7 @@ impl ast::Attr { pub fn simple_name(&self) -> Option { let path = self.path()?; match (path.segment(), path.qualifier()) { - (Some(segment), None) => Some(segment.syntax().first_token()?.text().clone()), + (Some(segment), None) => Some(segment.syntax().first_token()?.text().into()), _ => None, } } diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs index 5e9620a40..5e07ec7d1 100644 --- a/crates/syntax/src/ast/token_ext.rs +++ b/crates/syntax/src/ast/token_ext.rs @@ -41,7 +41,7 @@ impl ast::Comment { match kind { CommentKind { shape, doc: Some(_) } => { let prefix = kind.prefix(); - let text = &self.text().as_str()[prefix.len()..]; + let text = &self.text()[prefix.len()..]; let ws = text.chars().next().filter(|c| c.is_whitespace()); let text = ws.map_or(text, |ws| &text[ws.len_utf8()..]); match shape { @@ -156,13 +156,13 @@ impl ast::String { pub fn value(&self) -> Option> { if self.is_raw() { - let text = self.text().as_str(); + let text = self.text(); let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; return Some(Cow::Borrowed(text)); } - let text = self.text().as_str(); + let text = self.text(); let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; let mut buf = String::new(); @@ -190,7 +190,7 @@ impl ast::String { } pub fn quote_offsets(&self) -> Option { - let text = self.text().as_str(); + let text = self.text(); let offsets = QuoteOffsets::new(text)?; let o = self.syntax().text_range().start(); let offsets = QuoteOffsets { @@ -560,7 +560,7 @@ impl HasFormatSpecifier for ast::String { fn char_ranges( &self, ) -> Option)>> { - let text = self.text().as_str(); + let text = self.text(); let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start(); @@ -590,7 +590,7 @@ impl ast::IntNumber { pub fn value(&self) -> Option { let token = self.syntax(); - let mut text = token.text().as_str(); + let mut text = token.text(); if let Some(suffix) = self.suffix() { text = &text[..text.len() - suffix.len()] } diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs index 7901580ee..7694e8834 100644 --- a/crates/syntax/src/validation.rs +++ b/crates/syntax/src/validation.rs @@ -116,7 +116,7 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec) { } let token = literal.token(); - let text = token.text().as_str(); + let text = token.text(); // FIXME: lift this lambda refactor to `fn` (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366199205) let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| { -- cgit v1.2.3 From 3429b32ad119756985e1a7bfa5e9e53042671f8b Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Wed, 20 Jan 2021 14:04:53 +0300 Subject: :arrow_up: rowan It now stores text inline with tokens --- crates/syntax/Cargo.toml | 5 +---- crates/syntax/src/lib.rs | 4 ++-- crates/syntax/src/parsing/reparsing.rs | 3 +-- crates/syntax/src/parsing/text_tree_sink.rs | 4 ++-- crates/syntax/src/syntax_node.rs | 4 ++-- 5 files changed, 8 insertions(+), 12 deletions(-) (limited to 'crates/syntax') diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index 165533388..55b437a3a 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml @@ -12,15 +12,12 @@ doctest = false [dependencies] itertools = "0.10.0" -rowan = { path="../../../rowan" } +rowan = "0.12" rustc_lexer = { version = "697.0.0", package = "rustc-ap-rustc_lexer" } rustc-hash = "1.1.0" arrayvec = "0.5.1" once_cell = "1.3.1" indexmap = "1.4.0" -# This crate transitively depends on `smol_str` via `rowan`. -# ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here -# to reduce number of compilations smol_str = { version = "0.1.15", features = ["serde"] } serde = { version = "1.0.106", features = ["derive"] } diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index ea7482bb1..11294c5b2 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -56,9 +56,9 @@ pub use crate::{ }; pub use parser::{SyntaxKind, T}; pub use rowan::{ - Direction, GreenNode, NodeOrToken, SmolStr, SyntaxText, TextRange, TextSize, TokenAtOffset, - WalkEvent, + Direction, GreenNode, NodeOrToken, SyntaxText, TextRange, TextSize, TokenAtOffset, WalkEvent, }; +pub use smol_str::SmolStr; /// `Parse` is the result of the parsing: a syntax tree and a collection of /// errors. diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs index 76f01084c..3d637bf91 100644 --- a/crates/syntax/src/parsing/reparsing.rs +++ b/crates/syntax/src/parsing/reparsing.rs @@ -73,8 +73,7 @@ fn reparse_token<'node>( new_text.pop(); } - let new_token = - GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), new_text.into()); + let new_token = GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), &new_text); Some(( prev_token.replace_with(new_token), new_err.into_iter().collect(), diff --git a/crates/syntax/src/parsing/text_tree_sink.rs b/crates/syntax/src/parsing/text_tree_sink.rs index ce27c3dd9..d5ddc076f 100644 --- a/crates/syntax/src/parsing/text_tree_sink.rs +++ b/crates/syntax/src/parsing/text_tree_sink.rs @@ -8,7 +8,7 @@ use crate::{ ast, parsing::Token, syntax_node::GreenNode, - SmolStr, SyntaxError, + SyntaxError, SyntaxKind::{self, *}, SyntaxTreeBuilder, TextRange, TextSize, }; @@ -135,7 +135,7 @@ impl<'a> TextTreeSink<'a> { fn do_token(&mut self, kind: SyntaxKind, len: TextSize, n_tokens: usize) { let range = TextRange::at(self.text_pos, len); - let text: SmolStr = self.text[range].into(); + let text = &self.text[range]; self.text_pos += len; self.token_pos += n_tokens; self.inner.token(kind, text); diff --git a/crates/syntax/src/syntax_node.rs b/crates/syntax/src/syntax_node.rs index cc30138fa..8f643b228 100644 --- a/crates/syntax/src/syntax_node.rs +++ b/crates/syntax/src/syntax_node.rs @@ -8,7 +8,7 @@ use rowan::{GreenNodeBuilder, Language}; -use crate::{Parse, SmolStr, SyntaxError, SyntaxKind, TextSize}; +use crate::{Parse, SyntaxError, SyntaxKind, TextSize}; pub(crate) use rowan::{GreenNode, GreenToken, NodeOrToken}; @@ -53,7 +53,7 @@ impl SyntaxTreeBuilder { Parse::new(green, errors) } - pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) { + pub fn token(&mut self, kind: SyntaxKind, text: &str) { let kind = RustLanguage::kind_to_raw(kind); self.inner.token(kind, text) } -- cgit v1.2.3