diff options
author | bors[bot] <26634292+bors[bot]@users.noreply.github.com> | 2021-06-03 13:09:10 +0100 |
---|---|---|
committer | GitHub <[email protected]> | 2021-06-03 13:09:10 +0100 |
commit | 48ea50bf04a1bd12999bd9e27558ac31988c7228 (patch) | |
tree | 64b223398a3b62b8cc4e86239495a5b3e446441a | |
parent | c7eb19ebf9d42cada8cb532c6b3cb0e11a531b93 (diff) | |
parent | a1e650082b068919f9515330e6d0977c09af190f (diff) |
Merge #9124
9124: Apply a few clippy suggestions r=lnicola a=clemenswasser
Co-authored-by: Clemens Wasser <[email protected]>
-rw-r--r-- | crates/mbe/src/benchmark.rs | 2 | ||||
-rw-r--r-- | crates/mbe/src/expander/matcher.rs | 21 | ||||
-rw-r--r-- | crates/mbe/src/lib.rs | 10 | ||||
-rw-r--r-- | crates/mbe/src/parser.rs | 2 | ||||
-rw-r--r-- | crates/mbe/src/syntax_bridge.rs | 5 | ||||
-rw-r--r-- | crates/mbe/src/tt_iter.rs | 2 | ||||
-rw-r--r-- | crates/parser/src/grammar/expressions/atom.rs | 8 | ||||
-rw-r--r-- | crates/syntax/src/parsing/reparsing.rs | 8 | ||||
-rw-r--r-- | crates/syntax/src/tests.rs | 2 | ||||
-rw-r--r-- | crates/test_utils/src/lib.rs | 11 | ||||
-rw-r--r-- | xtask/src/codegen/gen_syntax.rs | 20 | ||||
-rw-r--r-- | xtask/src/release/changelog.rs | 4 | ||||
-rw-r--r-- | xtask/src/tidy.rs | 8 |
13 files changed, 43 insertions, 60 deletions
diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index 38707ffa5..18eb97f0d 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs | |||
@@ -187,7 +187,7 @@ fn invocation_fixtures(rules: &FxHashMap<String, MacroRules>) -> Vec<(String, tt | |||
187 | let a = 1664525; | 187 | let a = 1664525; |
188 | let c = 1013904223; | 188 | let c = 1013904223; |
189 | *seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c); | 189 | *seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c); |
190 | return *seed; | 190 | *seed |
191 | } | 191 | } |
192 | fn make_ident(ident: &str) -> tt::TokenTree { | 192 | fn make_ident(ident: &str) -> tt::TokenTree { |
193 | tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), text: SmolStr::new(ident) }) | 193 | tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), text: SmolStr::new(ident) }) |
diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index 84ca3ff87..c982eb58f 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs | |||
@@ -219,7 +219,7 @@ impl BindingsBuilder { | |||
219 | bindings | 219 | bindings |
220 | } | 220 | } |
221 | 221 | ||
222 | fn build_inner(&self, bindings: &mut Bindings, link_nodes: &Vec<LinkNode<Rc<BindingKind>>>) { | 222 | fn build_inner(&self, bindings: &mut Bindings, link_nodes: &[LinkNode<Rc<BindingKind>>]) { |
223 | let mut nodes = Vec::new(); | 223 | let mut nodes = Vec::new(); |
224 | self.collect_nodes(&link_nodes, &mut nodes); | 224 | self.collect_nodes(&link_nodes, &mut nodes); |
225 | 225 | ||
@@ -301,7 +301,7 @@ impl BindingsBuilder { | |||
301 | 301 | ||
302 | fn collect_nodes<'a>( | 302 | fn collect_nodes<'a>( |
303 | &'a self, | 303 | &'a self, |
304 | link_nodes: &'a Vec<LinkNode<Rc<BindingKind>>>, | 304 | link_nodes: &'a [LinkNode<Rc<BindingKind>>], |
305 | nodes: &mut Vec<&'a Rc<BindingKind>>, | 305 | nodes: &mut Vec<&'a Rc<BindingKind>>, |
306 | ) { | 306 | ) { |
307 | link_nodes.iter().for_each(|it| match it { | 307 | link_nodes.iter().for_each(|it| match it { |
@@ -494,15 +494,8 @@ fn match_loop_inner<'t>( | |||
494 | } | 494 | } |
495 | Some(err) => { | 495 | Some(err) => { |
496 | res.add_err(err); | 496 | res.add_err(err); |
497 | match match_res.value { | 497 | if let Some(fragment) = match_res.value { |
498 | Some(fragment) => { | 498 | bindings_builder.push_fragment(&mut item.bindings, &name, fragment); |
499 | bindings_builder.push_fragment( | ||
500 | &mut item.bindings, | ||
501 | &name, | ||
502 | fragment, | ||
503 | ); | ||
504 | } | ||
505 | _ => {} | ||
506 | } | 499 | } |
507 | item.is_error = true; | 500 | item.is_error = true; |
508 | error_items.push(item); | 501 | error_items.push(item); |
@@ -578,9 +571,9 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match { | |||
578 | ); | 571 | ); |
579 | stdx::always!(cur_items.is_empty()); | 572 | stdx::always!(cur_items.is_empty()); |
580 | 573 | ||
581 | if error_items.len() > 0 { | 574 | if !error_items.is_empty() { |
582 | error_recover_item = error_items.pop().map(|it| it.bindings); | 575 | error_recover_item = error_items.pop().map(|it| it.bindings); |
583 | } else if eof_items.len() > 0 { | 576 | } else if !eof_items.is_empty() { |
584 | error_recover_item = Some(eof_items[0].bindings.clone()); | 577 | error_recover_item = Some(eof_items[0].bindings.clone()); |
585 | } | 578 | } |
586 | 579 | ||
@@ -793,7 +786,7 @@ impl<'a> TtIter<'a> { | |||
793 | _ => (), | 786 | _ => (), |
794 | } | 787 | } |
795 | 788 | ||
796 | let tt = self.next().ok_or_else(|| ())?.clone(); | 789 | let tt = self.next().ok_or(())?.clone(); |
797 | let punct = match tt { | 790 | let punct = match tt { |
798 | tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.spacing == tt::Spacing::Joint => { | 791 | tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.spacing == tt::Spacing::Joint => { |
799 | punct | 792 | punct |
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index b95374b76..380a50744 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs | |||
@@ -295,8 +295,8 @@ fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> { | |||
295 | // Checks that no repetition which could match an empty token | 295 | // Checks that no repetition which could match an empty token |
296 | // https://github.com/rust-lang/rust/blob/a58b1ed44f5e06976de2bdc4d7dc81c36a96934f/src/librustc_expand/mbe/macro_rules.rs#L558 | 296 | // https://github.com/rust-lang/rust/blob/a58b1ed44f5e06976de2bdc4d7dc81c36a96934f/src/librustc_expand/mbe/macro_rules.rs#L558 |
297 | 297 | ||
298 | if separator.is_none() { | 298 | if separator.is_none() |
299 | if subtree.iter().all(|child_op| { | 299 | && subtree.iter().all(|child_op| { |
300 | match child_op { | 300 | match child_op { |
301 | Op::Var { kind, .. } => { | 301 | Op::Var { kind, .. } => { |
302 | // vis is optional | 302 | // vis is optional |
@@ -314,9 +314,9 @@ fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> { | |||
314 | Op::Subtree { .. } => {} | 314 | Op::Subtree { .. } => {} |
315 | } | 315 | } |
316 | false | 316 | false |
317 | }) { | 317 | }) |
318 | return Err(ParseError::RepetitionEmptyTokenTree); | 318 | { |
319 | } | 319 | return Err(ParseError::RepetitionEmptyTokenTree); |
320 | } | 320 | } |
321 | validate(subtree)? | 321 | validate(subtree)? |
322 | } | 322 | } |
diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs index 61b2a4955..04c0d3e75 100644 --- a/crates/mbe/src/parser.rs +++ b/crates/mbe/src/parser.rs | |||
@@ -213,7 +213,7 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul | |||
213 | Ok(res) | 213 | Ok(res) |
214 | } | 214 | } |
215 | 215 | ||
216 | fn eat_fragment_kind<'a>(src: &mut TtIter<'a>, mode: Mode) -> Result<Option<SmolStr>, ParseError> { | 216 | fn eat_fragment_kind(src: &mut TtIter<'_>, mode: Mode) -> Result<Option<SmolStr>, ParseError> { |
217 | if let Mode::Pattern = mode { | 217 | if let Mode::Pattern = mode { |
218 | src.expect_char(':').map_err(|()| err!("bad fragment specifier 1"))?; | 218 | src.expect_char(':').map_err(|()| err!("bad fragment specifier 1"))?; |
219 | let ident = src.expect_ident().map_err(|()| err!("bad fragment specifier 1"))?; | 219 | let ident = src.expect_ident().map_err(|()| err!("bad fragment specifier 1"))?; |
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index b11172caf..978c75747 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs | |||
@@ -243,8 +243,7 @@ trait TokenConvertor { | |||
243 | type Token: SrcToken; | 243 | type Token: SrcToken; |
244 | 244 | ||
245 | fn go(&mut self) -> tt::Subtree { | 245 | fn go(&mut self) -> tt::Subtree { |
246 | let mut subtree = tt::Subtree::default(); | 246 | let mut subtree = tt::Subtree { delimiter: None, ..Default::default() }; |
247 | subtree.delimiter = None; | ||
248 | while self.peek().is_some() { | 247 | while self.peek().is_some() { |
249 | self.collect_leaf(&mut subtree.token_trees); | 248 | self.collect_leaf(&mut subtree.token_trees); |
250 | } | 249 | } |
@@ -506,7 +505,7 @@ impl TokenConvertor for Convertor { | |||
506 | 505 | ||
507 | fn peek(&self) -> Option<Self::Token> { | 506 | fn peek(&self) -> Option<Self::Token> { |
508 | if let Some((punct, mut offset)) = self.punct_offset.clone() { | 507 | if let Some((punct, mut offset)) = self.punct_offset.clone() { |
509 | offset = offset + TextSize::of('.'); | 508 | offset += TextSize::of('.'); |
510 | if usize::from(offset) < punct.text().len() { | 509 | if usize::from(offset) < punct.text().len() { |
511 | return Some(SynToken::Punch(punct, offset)); | 510 | return Some(SynToken::Punch(punct, offset)); |
512 | } | 511 | } |
diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index 99a8d250b..bd54f2442 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs | |||
@@ -138,7 +138,7 @@ impl<'a> TtIter<'a> { | |||
138 | } | 138 | } |
139 | } | 139 | } |
140 | self.inner = self.inner.as_slice()[res.len()..].iter(); | 140 | self.inner = self.inner.as_slice()[res.len()..].iter(); |
141 | if res.len() == 0 && err.is_none() { | 141 | if res.is_empty() && err.is_none() { |
142 | err = Some(err!("no tokens consumed")); | 142 | err = Some(err!("no tokens consumed")); |
143 | } | 143 | } |
144 | let res = match res.len() { | 144 | let res = match res.len() { |
diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs index 269f223e6..abdfca1fe 100644 --- a/crates/parser/src/grammar/expressions/atom.rs +++ b/crates/parser/src/grammar/expressions/atom.rs | |||
@@ -252,12 +252,10 @@ fn closure_expr(p: &mut Parser) -> CompletedMarker { | |||
252 | // test lambda_ret_block | 252 | // test lambda_ret_block |
253 | // fn main() { || -> i32 { 92 }(); } | 253 | // fn main() { || -> i32 { 92 }(); } |
254 | block_expr(p); | 254 | block_expr(p); |
255 | } else if p.at_ts(EXPR_FIRST) { | ||
256 | expr(p); | ||
255 | } else { | 257 | } else { |
256 | if p.at_ts(EXPR_FIRST) { | 258 | p.error("expected expression"); |
257 | expr(p); | ||
258 | } else { | ||
259 | p.error("expected expression"); | ||
260 | } | ||
261 | } | 259 | } |
262 | m.complete(p, CLOSURE_EXPR) | 260 | m.complete(p, CLOSURE_EXPR) |
263 | } | 261 | } |
diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs index 4ad50ab72..304f47b3d 100644 --- a/crates/syntax/src/parsing/reparsing.rs +++ b/crates/syntax/src/parsing/reparsing.rs | |||
@@ -36,8 +36,8 @@ pub(crate) fn incremental_reparse( | |||
36 | None | 36 | None |
37 | } | 37 | } |
38 | 38 | ||
39 | fn reparse_token<'node>( | 39 | fn reparse_token( |
40 | root: &'node SyntaxNode, | 40 | root: &SyntaxNode, |
41 | edit: &Indel, | 41 | edit: &Indel, |
42 | ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { | 42 | ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { |
43 | let prev_token = root.covering_element(edit.delete).as_token()?.clone(); | 43 | let prev_token = root.covering_element(edit.delete).as_token()?.clone(); |
@@ -84,8 +84,8 @@ fn reparse_token<'node>( | |||
84 | } | 84 | } |
85 | } | 85 | } |
86 | 86 | ||
87 | fn reparse_block<'node>( | 87 | fn reparse_block( |
88 | root: &'node SyntaxNode, | 88 | root: &SyntaxNode, |
89 | edit: &Indel, | 89 | edit: &Indel, |
90 | ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { | 90 | ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { |
91 | let (node, reparser) = find_reparsable_node(root, edit.delete)?; | 91 | let (node, reparser) = find_reparsable_node(root, edit.delete)?; |
diff --git a/crates/syntax/src/tests.rs b/crates/syntax/src/tests.rs index 45f3c800f..9f2426171 100644 --- a/crates/syntax/src/tests.rs +++ b/crates/syntax/src/tests.rs | |||
@@ -236,7 +236,7 @@ where | |||
236 | } | 236 | } |
237 | }); | 237 | }); |
238 | dir_tests(&test_data_dir(), err_paths, "rast", |text, path| { | 238 | dir_tests(&test_data_dir(), err_paths, "rast", |text, path| { |
239 | if let Ok(_) = f(text) { | 239 | if f(text).is_ok() { |
240 | panic!("'{:?}' successfully parsed when it should have errored", path); | 240 | panic!("'{:?}' successfully parsed when it should have errored", path); |
241 | } else { | 241 | } else { |
242 | "ERROR\n".to_owned() | 242 | "ERROR\n".to_owned() |
diff --git a/crates/test_utils/src/lib.rs b/crates/test_utils/src/lib.rs index bd017567c..ac5a9509d 100644 --- a/crates/test_utils/src/lib.rs +++ b/crates/test_utils/src/lib.rs | |||
@@ -238,14 +238,9 @@ fn extract_line_annotations(mut line: &str) -> Vec<LineAnnotation> { | |||
238 | let mut res = Vec::new(); | 238 | let mut res = Vec::new(); |
239 | let mut offset: TextSize = 0.into(); | 239 | let mut offset: TextSize = 0.into(); |
240 | let marker: fn(char) -> bool = if line.contains('^') { |c| c == '^' } else { |c| c == '|' }; | 240 | let marker: fn(char) -> bool = if line.contains('^') { |c| c == '^' } else { |c| c == '|' }; |
241 | loop { | 241 | while let Some(idx) = line.find(marker) { |
242 | match line.find(marker) { | 242 | offset += TextSize::try_from(idx).unwrap(); |
243 | Some(idx) => { | 243 | line = &line[idx..]; |
244 | offset += TextSize::try_from(idx).unwrap(); | ||
245 | line = &line[idx..]; | ||
246 | } | ||
247 | None => break, | ||
248 | }; | ||
249 | 244 | ||
250 | let mut len = line.chars().take_while(|&it| it == '^').count(); | 245 | let mut len = line.chars().take_while(|&it| it == '^').count(); |
251 | let mut continuation = false; | 246 | let mut continuation = false; |
diff --git a/xtask/src/codegen/gen_syntax.rs b/xtask/src/codegen/gen_syntax.rs index ba4b24848..b0b9e30db 100644 --- a/xtask/src/codegen/gen_syntax.rs +++ b/xtask/src/codegen/gen_syntax.rs | |||
@@ -94,18 +94,16 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> Result<String> { | |||
94 | support::children(&self.syntax) | 94 | support::children(&self.syntax) |
95 | } | 95 | } |
96 | } | 96 | } |
97 | } else { | 97 | } else if let Some(token_kind) = field.token_kind() { |
98 | if let Some(token_kind) = field.token_kind() { | 98 | quote! { |
99 | quote! { | 99 | pub fn #method_name(&self) -> Option<#ty> { |
100 | pub fn #method_name(&self) -> Option<#ty> { | 100 | support::token(&self.syntax, #token_kind) |
101 | support::token(&self.syntax, #token_kind) | ||
102 | } | ||
103 | } | 101 | } |
104 | } else { | 102 | } |
105 | quote! { | 103 | } else { |
106 | pub fn #method_name(&self) -> Option<#ty> { | 104 | quote! { |
107 | support::child(&self.syntax) | 105 | pub fn #method_name(&self) -> Option<#ty> { |
108 | } | 106 | support::child(&self.syntax) |
109 | } | 107 | } |
110 | } | 108 | } |
111 | } | 109 | } |
diff --git a/xtask/src/release/changelog.rs b/xtask/src/release/changelog.rs index ffcae2cf7..2384a746f 100644 --- a/xtask/src/release/changelog.rs +++ b/xtask/src/release/changelog.rs | |||
@@ -132,7 +132,7 @@ fn parse_changelog_line(s: &str) -> Option<PrInfo> { | |||
132 | return Some(PrInfo { kind, message: Some(message) }); | 132 | return Some(PrInfo { kind, message: Some(message) }); |
133 | } | 133 | } |
134 | }; | 134 | }; |
135 | let res = PrInfo { kind, message }; | 135 | let res = PrInfo { message, kind }; |
136 | Some(res) | 136 | Some(res) |
137 | } | 137 | } |
138 | 138 | ||
@@ -152,7 +152,7 @@ fn parse_title_line(s: &str) -> PrInfo { | |||
152 | PrKind::Skip => None, | 152 | PrKind::Skip => None, |
153 | _ => Some(s[prefix.len()..].to_string()), | 153 | _ => Some(s[prefix.len()..].to_string()), |
154 | }; | 154 | }; |
155 | return PrInfo { kind, message }; | 155 | return PrInfo { message, kind }; |
156 | } | 156 | } |
157 | } | 157 | } |
158 | PrInfo { kind: PrKind::Other, message: Some(s.to_string()) } | 158 | PrInfo { kind: PrKind::Other, message: Some(s.to_string()) } |
diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs index 6f687a788..618cf12fb 100644 --- a/xtask/src/tidy.rs +++ b/xtask/src/tidy.rs | |||
@@ -33,7 +33,7 @@ fn check_code_formatting() { | |||
33 | let _e = pushenv("RUSTUP_TOOLCHAIN", "stable"); | 33 | let _e = pushenv("RUSTUP_TOOLCHAIN", "stable"); |
34 | crate::ensure_rustfmt().unwrap(); | 34 | crate::ensure_rustfmt().unwrap(); |
35 | let res = cmd!("cargo fmt -- --check").run(); | 35 | let res = cmd!("cargo fmt -- --check").run(); |
36 | if !res.is_ok() { | 36 | if res.is_err() { |
37 | let _ = cmd!("cargo fmt").run(); | 37 | let _ = cmd!("cargo fmt").run(); |
38 | } | 38 | } |
39 | res.unwrap() | 39 | res.unwrap() |
@@ -244,19 +244,19 @@ Zlib OR Apache-2.0 OR MIT | |||
244 | .map(|it| it.trim()) | 244 | .map(|it| it.trim()) |
245 | .map(|it| it[r#""license":"#.len()..].trim_matches('"')) | 245 | .map(|it| it[r#""license":"#.len()..].trim_matches('"')) |
246 | .collect::<Vec<_>>(); | 246 | .collect::<Vec<_>>(); |
247 | licenses.sort(); | 247 | licenses.sort_unstable(); |
248 | licenses.dedup(); | 248 | licenses.dedup(); |
249 | if licenses != expected { | 249 | if licenses != expected { |
250 | let mut diff = String::new(); | 250 | let mut diff = String::new(); |
251 | 251 | ||
252 | diff += &format!("New Licenses:\n"); | 252 | diff.push_str("New Licenses:\n"); |
253 | for &l in licenses.iter() { | 253 | for &l in licenses.iter() { |
254 | if !expected.contains(&l) { | 254 | if !expected.contains(&l) { |
255 | diff += &format!(" {}\n", l) | 255 | diff += &format!(" {}\n", l) |
256 | } | 256 | } |
257 | } | 257 | } |
258 | 258 | ||
259 | diff += &format!("\nMissing Licenses:\n"); | 259 | diff.push_str("\nMissing Licenses:\n"); |
260 | for &l in expected.iter() { | 260 | for &l in expected.iter() { |
261 | if !licenses.contains(&l) { | 261 | if !licenses.contains(&l) { |
262 | diff += &format!(" {}\n", l) | 262 | diff += &format!(" {}\n", l) |