aboutsummaryrefslogtreecommitdiff
path: root/crates/syntax/src
diff options
context:
space:
mode:
Diffstat (limited to 'crates/syntax/src')
-rw-r--r--crates/syntax/src/algo.rs4
-rw-r--r--crates/syntax/src/ast/make.rs2
-rw-r--r--crates/syntax/src/ast/node_ext.rs2
-rw-r--r--crates/syntax/src/parsing/lexer.rs8
-rw-r--r--crates/syntax/src/validation.rs6
5 files changed, 11 insertions, 11 deletions
diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs
index 22ab36cd2..384d031e7 100644
--- a/crates/syntax/src/algo.rs
+++ b/crates/syntax/src/algo.rs
@@ -88,8 +88,8 @@ pub fn least_common_ancestor(u: &SyntaxNode, v: &SyntaxNode) -> Option<SyntaxNod
88 let keep = u_depth.min(v_depth); 88 let keep = u_depth.min(v_depth);
89 89
90 let u_candidates = u.ancestors().skip(u_depth - keep); 90 let u_candidates = u.ancestors().skip(u_depth - keep);
91 let v_canidates = v.ancestors().skip(v_depth - keep); 91 let v_candidates = v.ancestors().skip(v_depth - keep);
92 let (res, _) = u_candidates.zip(v_canidates).find(|(x, y)| x == y)?; 92 let (res, _) = u_candidates.zip(v_candidates).find(|(x, y)| x == y)?;
93 Some(res) 93 Some(res)
94} 94}
95 95
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index cafa4c198..1ed8a96e5 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -241,7 +241,7 @@ pub fn wildcard_pat() -> ast::WildcardPat {
241 } 241 }
242} 242}
243 243
244/// Creates a tuple of patterns from an interator of patterns. 244/// Creates a tuple of patterns from an iterator of patterns.
245/// 245///
246/// Invariant: `pats` must be length > 1 246/// Invariant: `pats` must be length > 1
247/// 247///
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index 2aa472fb4..27381ba80 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -133,7 +133,7 @@ impl ast::Attr {
133 first_token.and_then(|token| token.next_token()).as_ref().map(SyntaxToken::kind); 133 first_token.and_then(|token| token.next_token()).as_ref().map(SyntaxToken::kind);
134 134
135 match (first_token_kind, second_token_kind) { 135 match (first_token_kind, second_token_kind) {
136 (Some(SyntaxKind::POUND), Some(T![!])) => AttrKind::Inner, 136 (Some(T![#]), Some(T![!])) => AttrKind::Inner,
137 _ => AttrKind::Outer, 137 _ => AttrKind::Outer,
138 } 138 }
139 } 139 }
diff --git a/crates/syntax/src/parsing/lexer.rs b/crates/syntax/src/parsing/lexer.rs
index 0cbba73c5..7c8d0a4c4 100644
--- a/crates/syntax/src/parsing/lexer.rs
+++ b/crates/syntax/src/parsing/lexer.rs
@@ -24,7 +24,7 @@ pub struct Token {
24/// Beware that it checks for shebang first and its length contributes to resulting 24/// Beware that it checks for shebang first and its length contributes to resulting
25/// tokens offsets. 25/// tokens offsets.
26pub fn tokenize(text: &str) -> (Vec<Token>, Vec<SyntaxError>) { 26pub fn tokenize(text: &str) -> (Vec<Token>, Vec<SyntaxError>) {
27 // non-empty string is a precondtion of `rustc_lexer::strip_shebang()`. 27 // non-empty string is a precondition of `rustc_lexer::strip_shebang()`.
28 if text.is_empty() { 28 if text.is_empty() {
29 return Default::default(); 29 return Default::default();
30 } 30 }
@@ -76,7 +76,7 @@ pub fn lex_single_syntax_kind(text: &str) -> Option<(SyntaxKind, Option<SyntaxEr
76} 76}
77 77
78/// The same as `lex_single_syntax_kind()` but returns only `SyntaxKind` and 78/// The same as `lex_single_syntax_kind()` but returns only `SyntaxKind` and
79/// returns `None` if any tokenization error occured. 79/// returns `None` if any tokenization error occurred.
80/// 80///
81/// Beware that unescape errors are not checked at tokenization time. 81/// Beware that unescape errors are not checked at tokenization time.
82pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> { 82pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> {
@@ -96,7 +96,7 @@ pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> {
96/// 96///
97/// Beware that unescape errors are not checked at tokenization time. 97/// Beware that unescape errors are not checked at tokenization time.
98fn lex_first_token(text: &str) -> Option<(Token, Option<SyntaxError>)> { 98fn lex_first_token(text: &str) -> Option<(Token, Option<SyntaxError>)> {
99 // non-empty string is a precondtion of `rustc_lexer::first_token()`. 99 // non-empty string is a precondition of `rustc_lexer::first_token()`.
100 if text.is_empty() { 100 if text.is_empty() {
101 return None; 101 return None;
102 } 102 }
@@ -117,7 +117,7 @@ fn rustc_token_kind_to_syntax_kind(
117 token_text: &str, 117 token_text: &str,
118) -> (SyntaxKind, Option<&'static str>) { 118) -> (SyntaxKind, Option<&'static str>) {
119 // A note on an intended tradeoff: 119 // A note on an intended tradeoff:
120 // We drop some useful infromation here (see patterns with double dots `..`) 120 // We drop some useful information here (see patterns with double dots `..`)
121 // Storing that info in `SyntaxKind` is not possible due to its layout requirements of 121 // Storing that info in `SyntaxKind` is not possible due to its layout requirements of
122 // being `u16` that come from `rowan::SyntaxKind`. 122 // being `u16` that come from `rowan::SyntaxKind`.
123 123
diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs
index 2ddaeb176..bfa2dc4ba 100644
--- a/crates/syntax/src/validation.rs
+++ b/crates/syntax/src/validation.rs
@@ -173,7 +173,7 @@ pub(crate) fn validate_block_structure(root: &SyntaxNode) {
173 assert_eq!( 173 assert_eq!(
174 node.parent(), 174 node.parent(),
175 pair.parent(), 175 pair.parent(),
176 "\nunpaired curleys:\n{}\n{:#?}\n", 176 "\nunpaired curlys:\n{}\n{:#?}\n",
177 root.text(), 177 root.text(),
178 root, 178 root,
179 ); 179 );
@@ -344,9 +344,9 @@ fn validate_trait_object_ty(ty: ast::DynTraitType) -> Option<SyntaxError> {
344 344
345 if tbl.bounds().count() > 1 { 345 if tbl.bounds().count() > 1 {
346 let dyn_token = ty.dyn_token()?; 346 let dyn_token = ty.dyn_token()?;
347 let potential_parentheses = 347 let potential_parenthesis =
348 algo::skip_trivia_token(dyn_token.prev_token()?, Direction::Prev)?; 348 algo::skip_trivia_token(dyn_token.prev_token()?, Direction::Prev)?;
349 let kind = potential_parentheses.kind(); 349 let kind = potential_parenthesis.kind();
350 if !matches!(kind, T!['('] | T![<] | T![=]) { 350 if !matches!(kind, T!['('] | T![<] | T![=]) {
351 return Some(SyntaxError::new("ambiguous `+` in a type", ty.syntax().text_range())); 351 return Some(SyntaxError::new("ambiguous `+` in a type", ty.syntax().text_range()));
352 } 352 }