aboutsummaryrefslogtreecommitdiff
path: root/crates/syntax/src
diff options
context:
space:
mode:
Diffstat (limited to 'crates/syntax/src')
-rw-r--r--crates/syntax/src/ast/edit.rs4
-rw-r--r--crates/syntax/src/ast/generated/nodes.rs31
-rw-r--r--crates/syntax/src/ast/node_ext.rs19
-rw-r--r--crates/syntax/src/ast/token_ext.rs2
-rw-r--r--crates/syntax/src/parsing.rs4
-rw-r--r--crates/syntax/src/parsing/lexer.rs2
-rw-r--r--crates/syntax/src/parsing/reparsing.rs6
-rw-r--r--crates/syntax/src/tests.rs4
8 files changed, 54 insertions, 18 deletions
diff --git a/crates/syntax/src/ast/edit.rs b/crates/syntax/src/ast/edit.rs
index 19107ee38..2663c0759 100644
--- a/crates/syntax/src/ast/edit.rs
+++ b/crates/syntax/src/ast/edit.rs
@@ -30,7 +30,7 @@ impl ast::UseTree {
30 let suffix = if self.path().as_ref() == Some(prefix) && self.use_tree_list().is_none() { 30 let suffix = if self.path().as_ref() == Some(prefix) && self.use_tree_list().is_none() {
31 make::path_unqualified(make::path_segment_self()) 31 make::path_unqualified(make::path_segment_self())
32 } else { 32 } else {
33 match split_path_prefix(&prefix) { 33 match split_path_prefix(prefix) {
34 Some(it) => it, 34 Some(it) => it,
35 None => return self.clone(), 35 None => return self.clone(),
36 } 36 }
@@ -95,7 +95,7 @@ impl fmt::Display for IndentLevel {
95 let indent = if len <= spaces.len() { 95 let indent = if len <= spaces.len() {
96 &spaces[..len] 96 &spaces[..len]
97 } else { 97 } else {
98 buf = iter::repeat(' ').take(len).collect::<String>(); 98 buf = " ".repeat(len);
99 &buf 99 &buf
100 }; 100 };
101 fmt::Display::fmt(indent, f) 101 fmt::Display::fmt(indent, f)
diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs
index 9a88fdb56..702de59a9 100644
--- a/crates/syntax/src/ast/generated/nodes.rs
+++ b/crates/syntax/src/ast/generated/nodes.rs
@@ -150,10 +150,7 @@ impl Attr {
150 pub fn pound_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![#]) } 150 pub fn pound_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![#]) }
151 pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) } 151 pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
152 pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) } 152 pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
153 pub fn path(&self) -> Option<Path> { support::child(&self.syntax) } 153 pub fn meta(&self) -> Option<Meta> { support::child(&self.syntax) }
154 pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
155 pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
156 pub fn token_tree(&self) -> Option<TokenTree> { support::child(&self.syntax) }
157 pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) } 154 pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
158} 155}
159#[derive(Debug, Clone, PartialEq, Eq, Hash)] 156#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -632,6 +629,16 @@ impl WherePred {
632 pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } 629 pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
633} 630}
634#[derive(Debug, Clone, PartialEq, Eq, Hash)] 631#[derive(Debug, Clone, PartialEq, Eq, Hash)]
632pub struct Meta {
633 pub(crate) syntax: SyntaxNode,
634}
635impl Meta {
636 pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
637 pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
638 pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
639 pub fn token_tree(&self) -> Option<TokenTree> { support::child(&self.syntax) }
640}
641#[derive(Debug, Clone, PartialEq, Eq, Hash)]
635pub struct ExprStmt { 642pub struct ExprStmt {
636 pub(crate) syntax: SyntaxNode, 643 pub(crate) syntax: SyntaxNode,
637} 644}
@@ -2072,6 +2079,17 @@ impl AstNode for WherePred {
2072 } 2079 }
2073 fn syntax(&self) -> &SyntaxNode { &self.syntax } 2080 fn syntax(&self) -> &SyntaxNode { &self.syntax }
2074} 2081}
2082impl AstNode for Meta {
2083 fn can_cast(kind: SyntaxKind) -> bool { kind == META }
2084 fn cast(syntax: SyntaxNode) -> Option<Self> {
2085 if Self::can_cast(syntax.kind()) {
2086 Some(Self { syntax })
2087 } else {
2088 None
2089 }
2090 }
2091 fn syntax(&self) -> &SyntaxNode { &self.syntax }
2092}
2075impl AstNode for ExprStmt { 2093impl AstNode for ExprStmt {
2076 fn can_cast(kind: SyntaxKind) -> bool { kind == EXPR_STMT } 2094 fn can_cast(kind: SyntaxKind) -> bool { kind == EXPR_STMT }
2077 fn cast(syntax: SyntaxNode) -> Option<Self> { 2095 fn cast(syntax: SyntaxNode) -> Option<Self> {
@@ -3887,6 +3905,11 @@ impl std::fmt::Display for WherePred {
3887 std::fmt::Display::fmt(self.syntax(), f) 3905 std::fmt::Display::fmt(self.syntax(), f)
3888 } 3906 }
3889} 3907}
3908impl std::fmt::Display for Meta {
3909 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
3910 std::fmt::Display::fmt(self.syntax(), f)
3911 }
3912}
3890impl std::fmt::Display for ExprStmt { 3913impl std::fmt::Display for ExprStmt {
3891 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 3914 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
3892 std::fmt::Display::fmt(self.syntax(), f) 3915 std::fmt::Display::fmt(self.syntax(), f)
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index a60bc5ad9..3d27d2c1a 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -144,19 +144,20 @@ impl AttrKind {
144 144
145impl ast::Attr { 145impl ast::Attr {
146 pub fn as_simple_atom(&self) -> Option<SmolStr> { 146 pub fn as_simple_atom(&self) -> Option<SmolStr> {
147 if self.eq_token().is_some() || self.token_tree().is_some() { 147 let meta = self.meta()?;
148 if meta.eq_token().is_some() || meta.token_tree().is_some() {
148 return None; 149 return None;
149 } 150 }
150 self.simple_name() 151 self.simple_name()
151 } 152 }
152 153
153 pub fn as_simple_call(&self) -> Option<(SmolStr, ast::TokenTree)> { 154 pub fn as_simple_call(&self) -> Option<(SmolStr, ast::TokenTree)> {
154 let tt = self.token_tree()?; 155 let tt = self.meta()?.token_tree()?;
155 Some((self.simple_name()?, tt)) 156 Some((self.simple_name()?, tt))
156 } 157 }
157 158
158 pub fn simple_name(&self) -> Option<SmolStr> { 159 pub fn simple_name(&self) -> Option<SmolStr> {
159 let path = self.path()?; 160 let path = self.meta()?.path()?;
160 match (path.segment(), path.qualifier()) { 161 match (path.segment(), path.qualifier()) {
161 (Some(segment), None) => Some(segment.syntax().first_token()?.text().into()), 162 (Some(segment), None) => Some(segment.syntax().first_token()?.text().into()),
162 _ => None, 163 _ => None,
@@ -174,6 +175,18 @@ impl ast::Attr {
174 _ => AttrKind::Outer, 175 _ => AttrKind::Outer,
175 } 176 }
176 } 177 }
178
179 pub fn path(&self) -> Option<ast::Path> {
180 self.meta()?.path()
181 }
182
183 pub fn expr(&self) -> Option<ast::Expr> {
184 self.meta()?.expr()
185 }
186
187 pub fn token_tree(&self) -> Option<ast::TokenTree> {
188 self.meta()?.token_tree()
189 }
177} 190}
178 191
179#[derive(Debug, Clone, PartialEq, Eq)] 192#[derive(Debug, Clone, PartialEq, Eq)]
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs
index 4b1e1ccee..ad52d9f54 100644
--- a/crates/syntax/src/ast/token_ext.rs
+++ b/crates/syntax/src/ast/token_ext.rs
@@ -242,7 +242,7 @@ impl ast::ByteString {
242 (Ok(c), true) if char_range.len() == 1 && Some(c) == text_iter.next() => (), 242 (Ok(c), true) if char_range.len() == 1 && Some(c) == text_iter.next() => (),
243 (Ok(c), true) => { 243 (Ok(c), true) => {
244 buf.reserve_exact(text.len()); 244 buf.reserve_exact(text.len());
245 buf.extend_from_slice(&text[..char_range.start].as_bytes()); 245 buf.extend_from_slice(text[..char_range.start].as_bytes());
246 buf.push(c as u8); 246 buf.push(c as u8);
247 } 247 }
248 (Err(_), _) => has_error = true, 248 (Err(_), _) => has_error = true,
diff --git a/crates/syntax/src/parsing.rs b/crates/syntax/src/parsing.rs
index 431ed0699..001921343 100644
--- a/crates/syntax/src/parsing.rs
+++ b/crates/syntax/src/parsing.rs
@@ -15,7 +15,7 @@ use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode};
15pub(crate) use crate::parsing::{lexer::*, reparsing::incremental_reparse}; 15pub(crate) use crate::parsing::{lexer::*, reparsing::incremental_reparse};
16 16
17pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { 17pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
18 let (tokens, lexer_errors) = tokenize(&text); 18 let (tokens, lexer_errors) = tokenize(text);
19 19
20 let mut token_source = TextTokenSource::new(text, &tokens); 20 let mut token_source = TextTokenSource::new(text, &tokens);
21 let mut tree_sink = TextTreeSink::new(text, &tokens); 21 let mut tree_sink = TextTreeSink::new(text, &tokens);
@@ -33,7 +33,7 @@ pub(crate) fn parse_text_fragment<T: AstNode>(
33 text: &str, 33 text: &str,
34 fragment_kind: parser::FragmentKind, 34 fragment_kind: parser::FragmentKind,
35) -> Result<T, ()> { 35) -> Result<T, ()> {
36 let (tokens, lexer_errors) = tokenize(&text); 36 let (tokens, lexer_errors) = tokenize(text);
37 if !lexer_errors.is_empty() { 37 if !lexer_errors.is_empty() {
38 return Err(()); 38 return Err(());
39 } 39 }
diff --git a/crates/syntax/src/parsing/lexer.rs b/crates/syntax/src/parsing/lexer.rs
index 7c8d0a4c4..ae4844e48 100644
--- a/crates/syntax/src/parsing/lexer.rs
+++ b/crates/syntax/src/parsing/lexer.rs
@@ -144,7 +144,7 @@ fn rustc_token_kind_to_syntax_kind(
144 } 144 }
145 145
146 rustc_lexer::TokenKind::RawIdent => IDENT, 146 rustc_lexer::TokenKind::RawIdent => IDENT,
147 rustc_lexer::TokenKind::Literal { kind, .. } => return match_literal_kind(&kind), 147 rustc_lexer::TokenKind::Literal { kind, .. } => return match_literal_kind(kind),
148 148
149 rustc_lexer::TokenKind::Lifetime { starts_with_number: false } => LIFETIME_IDENT, 149 rustc_lexer::TokenKind::Lifetime { starts_with_number: false } => LIFETIME_IDENT,
150 rustc_lexer::TokenKind::Lifetime { starts_with_number: true } => { 150 rustc_lexer::TokenKind::Lifetime { starts_with_number: true } => {
diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs
index 304f47b3d..186cc9e74 100644
--- a/crates/syntax/src/parsing/reparsing.rs
+++ b/crates/syntax/src/parsing/reparsing.rs
@@ -26,11 +26,11 @@ pub(crate) fn incremental_reparse(
26 edit: &Indel, 26 edit: &Indel,
27 errors: Vec<SyntaxError>, 27 errors: Vec<SyntaxError>,
28) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { 28) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
29 if let Some((green, new_errors, old_range)) = reparse_token(node, &edit) { 29 if let Some((green, new_errors, old_range)) = reparse_token(node, edit) {
30 return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); 30 return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
31 } 31 }
32 32
33 if let Some((green, new_errors, old_range)) = reparse_block(node, &edit) { 33 if let Some((green, new_errors, old_range)) = reparse_block(node, edit) {
34 return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); 34 return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
35 } 35 }
36 None 36 None
@@ -52,7 +52,7 @@ fn reparse_token(
52 } 52 }
53 } 53 }
54 54
55 let mut new_text = get_text_after_edit(prev_token.clone().into(), &edit); 55 let mut new_text = get_text_after_edit(prev_token.clone().into(), edit);
56 let (new_token_kind, new_err) = lex_single_syntax_kind(&new_text)?; 56 let (new_token_kind, new_err) = lex_single_syntax_kind(&new_text)?;
57 57
58 if new_token_kind != prev_token_kind 58 if new_token_kind != prev_token_kind
diff --git a/crates/syntax/src/tests.rs b/crates/syntax/src/tests.rs
index 9f2426171..4961ca08d 100644
--- a/crates/syntax/src/tests.rs
+++ b/crates/syntax/src/tests.rs
@@ -69,13 +69,13 @@ fn parser_tests() {
69 dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], "rast", |text, path| { 69 dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], "rast", |text, path| {
70 let parse = SourceFile::parse(text); 70 let parse = SourceFile::parse(text);
71 let errors = parse.errors(); 71 let errors = parse.errors();
72 assert_errors_are_absent(&errors, path); 72 assert_errors_are_absent(errors, path);
73 parse.debug_dump() 73 parse.debug_dump()
74 }); 74 });
75 dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], "rast", |text, path| { 75 dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], "rast", |text, path| {
76 let parse = SourceFile::parse(text); 76 let parse = SourceFile::parse(text);
77 let errors = parse.errors(); 77 let errors = parse.errors();
78 assert_errors_are_present(&errors, path); 78 assert_errors_are_present(errors, path);
79 parse.debug_dump() 79 parse.debug_dump()
80 }); 80 });
81} 81}