aboutsummaryrefslogtreecommitdiff
path: root/crates/syntax/src
diff options
context:
space:
mode:
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>2021-01-10 18:16:29 +0000
committerGitHub <[email protected]>2021-01-10 18:16:29 +0000
commit607b9ea160149bacca41c0638f16d372c3b235cd (patch)
treec1ef9b29af2f080530fd3d79b9bb6622bcff0a2a /crates/syntax/src
parent3e32e39da765632dd5c61d31b846bfa93738e786 (diff)
parentd4621197447d6906305ed30f8ab4fb48d657ec86 (diff)
Merge #7218
7218: Fix typos r=Veykril a=regexident Apart from the very last commit on this PR (which fixes a public type's name) all changes are non-breaking. Co-authored-by: Vincent Esche <[email protected]>
Diffstat (limited to 'crates/syntax/src')
-rw-r--r--crates/syntax/src/algo.rs4
-rw-r--r--crates/syntax/src/ast/make.rs2
-rw-r--r--crates/syntax/src/parsing/lexer.rs8
-rw-r--r--crates/syntax/src/validation.rs2
4 files changed, 8 insertions, 8 deletions
diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs
index 22ab36cd2..384d031e7 100644
--- a/crates/syntax/src/algo.rs
+++ b/crates/syntax/src/algo.rs
@@ -88,8 +88,8 @@ pub fn least_common_ancestor(u: &SyntaxNode, v: &SyntaxNode) -> Option<SyntaxNod
88 let keep = u_depth.min(v_depth); 88 let keep = u_depth.min(v_depth);
89 89
90 let u_candidates = u.ancestors().skip(u_depth - keep); 90 let u_candidates = u.ancestors().skip(u_depth - keep);
91 let v_canidates = v.ancestors().skip(v_depth - keep); 91 let v_candidates = v.ancestors().skip(v_depth - keep);
92 let (res, _) = u_candidates.zip(v_canidates).find(|(x, y)| x == y)?; 92 let (res, _) = u_candidates.zip(v_candidates).find(|(x, y)| x == y)?;
93 Some(res) 93 Some(res)
94} 94}
95 95
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index cafa4c198..1ed8a96e5 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -241,7 +241,7 @@ pub fn wildcard_pat() -> ast::WildcardPat {
241 } 241 }
242} 242}
243 243
244/// Creates a tuple of patterns from an interator of patterns. 244/// Creates a tuple of patterns from an iterator of patterns.
245/// 245///
246/// Invariant: `pats` must be length > 1 246/// Invariant: `pats` must be length > 1
247/// 247///
diff --git a/crates/syntax/src/parsing/lexer.rs b/crates/syntax/src/parsing/lexer.rs
index 0cbba73c5..7c8d0a4c4 100644
--- a/crates/syntax/src/parsing/lexer.rs
+++ b/crates/syntax/src/parsing/lexer.rs
@@ -24,7 +24,7 @@ pub struct Token {
24/// Beware that it checks for shebang first and its length contributes to resulting 24/// Beware that it checks for shebang first and its length contributes to resulting
25/// tokens offsets. 25/// tokens offsets.
26pub fn tokenize(text: &str) -> (Vec<Token>, Vec<SyntaxError>) { 26pub fn tokenize(text: &str) -> (Vec<Token>, Vec<SyntaxError>) {
27 // non-empty string is a precondtion of `rustc_lexer::strip_shebang()`. 27 // non-empty string is a precondition of `rustc_lexer::strip_shebang()`.
28 if text.is_empty() { 28 if text.is_empty() {
29 return Default::default(); 29 return Default::default();
30 } 30 }
@@ -76,7 +76,7 @@ pub fn lex_single_syntax_kind(text: &str) -> Option<(SyntaxKind, Option<SyntaxEr
76} 76}
77 77
78/// The same as `lex_single_syntax_kind()` but returns only `SyntaxKind` and 78/// The same as `lex_single_syntax_kind()` but returns only `SyntaxKind` and
79/// returns `None` if any tokenization error occured. 79/// returns `None` if any tokenization error occurred.
80/// 80///
81/// Beware that unescape errors are not checked at tokenization time. 81/// Beware that unescape errors are not checked at tokenization time.
82pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> { 82pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> {
@@ -96,7 +96,7 @@ pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> {
96/// 96///
97/// Beware that unescape errors are not checked at tokenization time. 97/// Beware that unescape errors are not checked at tokenization time.
98fn lex_first_token(text: &str) -> Option<(Token, Option<SyntaxError>)> { 98fn lex_first_token(text: &str) -> Option<(Token, Option<SyntaxError>)> {
99 // non-empty string is a precondtion of `rustc_lexer::first_token()`. 99 // non-empty string is a precondition of `rustc_lexer::first_token()`.
100 if text.is_empty() { 100 if text.is_empty() {
101 return None; 101 return None;
102 } 102 }
@@ -117,7 +117,7 @@ fn rustc_token_kind_to_syntax_kind(
117 token_text: &str, 117 token_text: &str,
118) -> (SyntaxKind, Option<&'static str>) { 118) -> (SyntaxKind, Option<&'static str>) {
119 // A note on an intended tradeoff: 119 // A note on an intended tradeoff:
120 // We drop some useful infromation here (see patterns with double dots `..`) 120 // We drop some useful information here (see patterns with double dots `..`)
121 // Storing that info in `SyntaxKind` is not possible due to its layout requirements of 121 // Storing that info in `SyntaxKind` is not possible due to its layout requirements of
122 // being `u16` that come from `rowan::SyntaxKind`. 122 // being `u16` that come from `rowan::SyntaxKind`.
123 123
diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs
index 7f9088382..bfa2dc4ba 100644
--- a/crates/syntax/src/validation.rs
+++ b/crates/syntax/src/validation.rs
@@ -173,7 +173,7 @@ pub(crate) fn validate_block_structure(root: &SyntaxNode) {
173 assert_eq!( 173 assert_eq!(
174 node.parent(), 174 node.parent(),
175 pair.parent(), 175 pair.parent(),
176 "\nunpaired curleys:\n{}\n{:#?}\n", 176 "\nunpaired curlys:\n{}\n{:#?}\n",
177 root.text(), 177 root.text(),
178 root, 178 root,
179 ); 179 );