diff options
author | bors[bot] <26634292+bors[bot]@users.noreply.github.com> | 2021-03-21 15:49:31 +0000 |
---|---|---|
committer | GitHub <[email protected]> | 2021-03-21 15:49:31 +0000 |
commit | d51cf133f68eec63eee27a8666c7590d2e8b4ef8 (patch) | |
tree | cd74909ed13e4a697459bf66afffcb06d89820cd | |
parent | 5bb65bb4962cfad613e9744a340ae23b533f479e (diff) | |
parent | 202b51bc7b6999900e06ec2cfb8d72fe9aa4af29 (diff) |
Merge #8135
8135: more clippy::{perf, complexity, style} fixes r=Veykril a=matthiaskrgr
Co-authored-by: Matthias Krüger <[email protected]>
39 files changed, 130 insertions, 168 deletions
diff --git a/crates/base_db/src/input.rs b/crates/base_db/src/input.rs index d0def2181..e9e8dfc2e 100644 --- a/crates/base_db/src/input.rs +++ b/crates/base_db/src/input.rs | |||
@@ -410,7 +410,7 @@ impl CrateId { | |||
410 | 410 | ||
411 | impl CrateData { | 411 | impl CrateData { |
412 | fn add_dep(&mut self, name: CrateName, crate_id: CrateId) { | 412 | fn add_dep(&mut self, name: CrateName, crate_id: CrateId) { |
413 | self.dependencies.push(Dependency { name, crate_id }) | 413 | self.dependencies.push(Dependency { crate_id, name }) |
414 | } | 414 | } |
415 | } | 415 | } |
416 | 416 | ||
diff --git a/crates/cfg/src/dnf.rs b/crates/cfg/src/dnf.rs index 30f4bcdf7..75ded9aa1 100644 --- a/crates/cfg/src/dnf.rs +++ b/crates/cfg/src/dnf.rs | |||
@@ -255,9 +255,9 @@ impl Builder { | |||
255 | fn make_dnf(expr: CfgExpr) -> CfgExpr { | 255 | fn make_dnf(expr: CfgExpr) -> CfgExpr { |
256 | match expr { | 256 | match expr { |
257 | CfgExpr::Invalid | CfgExpr::Atom(_) | CfgExpr::Not(_) => expr, | 257 | CfgExpr::Invalid | CfgExpr::Atom(_) | CfgExpr::Not(_) => expr, |
258 | CfgExpr::Any(e) => CfgExpr::Any(e.into_iter().map(|expr| make_dnf(expr)).collect()), | 258 | CfgExpr::Any(e) => CfgExpr::Any(e.into_iter().map(make_dnf).collect()), |
259 | CfgExpr::All(e) => { | 259 | CfgExpr::All(e) => { |
260 | let e = e.into_iter().map(|expr| make_nnf(expr)).collect::<Vec<_>>(); | 260 | let e = e.into_iter().map(make_nnf).collect::<Vec<_>>(); |
261 | 261 | ||
262 | CfgExpr::Any(distribute_conj(&e)) | 262 | CfgExpr::Any(distribute_conj(&e)) |
263 | } | 263 | } |
@@ -300,8 +300,8 @@ fn distribute_conj(conj: &[CfgExpr]) -> Vec<CfgExpr> { | |||
300 | fn make_nnf(expr: CfgExpr) -> CfgExpr { | 300 | fn make_nnf(expr: CfgExpr) -> CfgExpr { |
301 | match expr { | 301 | match expr { |
302 | CfgExpr::Invalid | CfgExpr::Atom(_) => expr, | 302 | CfgExpr::Invalid | CfgExpr::Atom(_) => expr, |
303 | CfgExpr::Any(expr) => CfgExpr::Any(expr.into_iter().map(|expr| make_nnf(expr)).collect()), | 303 | CfgExpr::Any(expr) => CfgExpr::Any(expr.into_iter().map(make_nnf).collect()), |
304 | CfgExpr::All(expr) => CfgExpr::All(expr.into_iter().map(|expr| make_nnf(expr)).collect()), | 304 | CfgExpr::All(expr) => CfgExpr::All(expr.into_iter().map(make_nnf).collect()), |
305 | CfgExpr::Not(operand) => match *operand { | 305 | CfgExpr::Not(operand) => match *operand { |
306 | CfgExpr::Invalid | CfgExpr::Atom(_) => CfgExpr::Not(operand.clone()), // Original negated expr | 306 | CfgExpr::Invalid | CfgExpr::Atom(_) => CfgExpr::Not(operand.clone()), // Original negated expr |
307 | CfgExpr::Not(expr) => { | 307 | CfgExpr::Not(expr) => { |
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index e34be7e42..44eaeffb9 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs | |||
@@ -213,7 +213,7 @@ impl Crate { | |||
213 | Some(TokenTree::Leaf(Leaf::Literal(Literal{ref text, ..}))) => Some(text), | 213 | Some(TokenTree::Leaf(Leaf::Literal(Literal{ref text, ..}))) => Some(text), |
214 | _ => None | 214 | _ => None |
215 | } | 215 | } |
216 | }).flat_map(|t| t).next(); | 216 | }).flatten().next(); |
217 | 217 | ||
218 | doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/") | 218 | doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/") |
219 | } | 219 | } |
diff --git a/crates/hir_def/src/attr.rs b/crates/hir_def/src/attr.rs index 2c10f46d8..52a2bce9b 100644 --- a/crates/hir_def/src/attr.rs +++ b/crates/hir_def/src/attr.rs | |||
@@ -638,7 +638,7 @@ fn collect_attrs( | |||
638 | owner: &dyn ast::AttrsOwner, | 638 | owner: &dyn ast::AttrsOwner, |
639 | ) -> impl Iterator<Item = Either<ast::Attr, ast::Comment>> { | 639 | ) -> impl Iterator<Item = Either<ast::Attr, ast::Comment>> { |
640 | let (inner_attrs, inner_docs) = inner_attributes(owner.syntax()) | 640 | let (inner_attrs, inner_docs) = inner_attributes(owner.syntax()) |
641 | .map_or((None, None), |(attrs, docs)| ((Some(attrs), Some(docs)))); | 641 | .map_or((None, None), |(attrs, docs)| (Some(attrs), Some(docs))); |
642 | 642 | ||
643 | let outer_attrs = owner.attrs().filter(|attr| attr.excl_token().is_none()); | 643 | let outer_attrs = owner.attrs().filter(|attr| attr.excl_token().is_none()); |
644 | let attrs = outer_attrs | 644 | let attrs = outer_attrs |
diff --git a/crates/hir_def/src/resolver.rs b/crates/hir_def/src/resolver.rs index 4a2d1c087..04ea9c5d7 100644 --- a/crates/hir_def/src/resolver.rs +++ b/crates/hir_def/src/resolver.rs | |||
@@ -472,7 +472,7 @@ impl Scope { | |||
472 | } | 472 | } |
473 | Scope::ExprScope(scope) => { | 473 | Scope::ExprScope(scope) => { |
474 | if let Some((label, name)) = scope.expr_scopes.label(scope.scope_id) { | 474 | if let Some((label, name)) = scope.expr_scopes.label(scope.scope_id) { |
475 | f(name.clone(), ScopeDef::Label(label)) | 475 | f(name, ScopeDef::Label(label)) |
476 | } | 476 | } |
477 | scope.expr_scopes.entries(scope.scope_id).iter().for_each(|e| { | 477 | scope.expr_scopes.entries(scope.scope_id).iter().for_each(|e| { |
478 | f(e.name().clone(), ScopeDef::Local(e.pat())); | 478 | f(e.name().clone(), ScopeDef::Local(e.pat())); |
diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs index 2748e25cf..fc73e435b 100644 --- a/crates/hir_expand/src/db.rs +++ b/crates/hir_expand/src/db.rs | |||
@@ -173,7 +173,7 @@ fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> { | |||
173 | }; | 173 | }; |
174 | let loc = db.lookup_intern_macro(id); | 174 | let loc = db.lookup_intern_macro(id); |
175 | let arg = loc.kind.arg(db)?; | 175 | let arg = loc.kind.arg(db)?; |
176 | Some(arg.green().to_owned()) | 176 | Some(arg.green()) |
177 | } | 177 | } |
178 | 178 | ||
179 | fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { | 179 | fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { |
diff --git a/crates/hir_expand/src/name.rs b/crates/hir_expand/src/name.rs index 43de9edd6..0aeea48d5 100644 --- a/crates/hir_expand/src/name.rs +++ b/crates/hir_expand/src/name.rs | |||
@@ -48,9 +48,8 @@ impl Name { | |||
48 | 48 | ||
49 | /// Resolve a name from the text of token. | 49 | /// Resolve a name from the text of token. |
50 | fn resolve(raw_text: &str) -> Name { | 50 | fn resolve(raw_text: &str) -> Name { |
51 | let raw_start = "r#"; | 51 | if let Some(text) = raw_text.strip_prefix("r#") { |
52 | if raw_text.starts_with(raw_start) { | 52 | Name::new_text(SmolStr::new(text)) |
53 | Name::new_text(SmolStr::new(&raw_text[raw_start.len()..])) | ||
54 | } else { | 53 | } else { |
55 | Name::new_text(raw_text.into()) | 54 | Name::new_text(raw_text.into()) |
56 | } | 55 | } |
diff --git a/crates/hir_ty/src/infer/pat.rs b/crates/hir_ty/src/infer/pat.rs index befa0d69b..ec491648f 100644 --- a/crates/hir_ty/src/infer/pat.rs +++ b/crates/hir_ty/src/infer/pat.rs | |||
@@ -38,7 +38,7 @@ impl<'a> InferenceContext<'a> { | |||
38 | let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default(); | 38 | let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default(); |
39 | let (pre, post) = match ellipsis { | 39 | let (pre, post) = match ellipsis { |
40 | Some(idx) => subpats.split_at(idx), | 40 | Some(idx) => subpats.split_at(idx), |
41 | None => (&subpats[..], &[][..]), | 41 | None => (subpats, &[][..]), |
42 | }; | 42 | }; |
43 | let post_idx_offset = field_tys.iter().count() - post.len(); | 43 | let post_idx_offset = field_tys.iter().count() - post.len(); |
44 | 44 | ||
diff --git a/crates/hir_ty/src/lower.rs b/crates/hir_ty/src/lower.rs index cbbb535e5..c914a3b8e 100644 --- a/crates/hir_ty/src/lower.rs +++ b/crates/hir_ty/src/lower.rs | |||
@@ -946,8 +946,7 @@ pub(crate) fn trait_environment_query( | |||
946 | let substs = Substitution::type_params(db, trait_id); | 946 | let substs = Substitution::type_params(db, trait_id); |
947 | let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution: substs }; | 947 | let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution: substs }; |
948 | let pred = WhereClause::Implemented(trait_ref); | 948 | let pred = WhereClause::Implemented(trait_ref); |
949 | let program_clause: chalk_ir::ProgramClause<Interner> = | 949 | let program_clause: chalk_ir::ProgramClause<Interner> = pred.to_chalk(db).cast(&Interner); |
950 | pred.clone().to_chalk(db).cast(&Interner); | ||
951 | clauses.push(program_clause.into_from_env_clause(&Interner)); | 950 | clauses.push(program_clause.into_from_env_clause(&Interner)); |
952 | } | 951 | } |
953 | 952 | ||
diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs index e187243cb..5201ce587 100644 --- a/crates/ide/src/extend_selection.rs +++ b/crates/ide/src/extend_selection.rs | |||
@@ -263,11 +263,10 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> { | |||
263 | ) -> Option<SyntaxToken> { | 263 | ) -> Option<SyntaxToken> { |
264 | node.siblings_with_tokens(dir) | 264 | node.siblings_with_tokens(dir) |
265 | .skip(1) | 265 | .skip(1) |
266 | .skip_while(|node| match node { | 266 | .find(|node| match node { |
267 | NodeOrToken::Node(_) => false, | 267 | NodeOrToken::Node(_) => true, |
268 | NodeOrToken::Token(it) => is_single_line_ws(it), | 268 | NodeOrToken::Token(it) => !is_single_line_ws(it), |
269 | }) | 269 | }) |
270 | .next() | ||
271 | .and_then(|it| it.into_token()) | 270 | .and_then(|it| it.into_token()) |
272 | .filter(|node| node.kind() == delimiter_kind) | 271 | .filter(|node| node.kind() == delimiter_kind) |
273 | } | 272 | } |
diff --git a/crates/ide_assists/src/handlers/convert_comment_block.rs b/crates/ide_assists/src/handlers/convert_comment_block.rs index cdc45fc42..9dc3ee28f 100644 --- a/crates/ide_assists/src/handlers/convert_comment_block.rs +++ b/crates/ide_assists/src/handlers/convert_comment_block.rs | |||
@@ -1,5 +1,4 @@ | |||
1 | use itertools::Itertools; | 1 | use itertools::Itertools; |
2 | use std::convert::identity; | ||
3 | use syntax::{ | 2 | use syntax::{ |
4 | ast::{ | 3 | ast::{ |
5 | self, | 4 | self, |
@@ -140,7 +139,7 @@ fn relevant_line_comments(comment: &ast::Comment) -> Vec<Comment> { | |||
140 | .filter(|s| !skippable(s)) | 139 | .filter(|s| !skippable(s)) |
141 | .map(|not| not.into_token().and_then(Comment::cast).filter(same_prefix)) | 140 | .map(|not| not.into_token().and_then(Comment::cast).filter(same_prefix)) |
142 | .take_while(|opt_com| opt_com.is_some()) | 141 | .take_while(|opt_com| opt_com.is_some()) |
143 | .filter_map(identity) | 142 | .flatten() |
144 | .skip(1); // skip the first element so we don't duplicate it in next_comments | 143 | .skip(1); // skip the first element so we don't duplicate it in next_comments |
145 | 144 | ||
146 | let next_comments = comment | 145 | let next_comments = comment |
@@ -149,7 +148,7 @@ fn relevant_line_comments(comment: &ast::Comment) -> Vec<Comment> { | |||
149 | .filter(|s| !skippable(s)) | 148 | .filter(|s| !skippable(s)) |
150 | .map(|not| not.into_token().and_then(Comment::cast).filter(same_prefix)) | 149 | .map(|not| not.into_token().and_then(Comment::cast).filter(same_prefix)) |
151 | .take_while(|opt_com| opt_com.is_some()) | 150 | .take_while(|opt_com| opt_com.is_some()) |
152 | .filter_map(identity); | 151 | .flatten(); |
153 | 152 | ||
154 | let mut comments: Vec<_> = prev_comments.collect(); | 153 | let mut comments: Vec<_> = prev_comments.collect(); |
155 | comments.reverse(); | 154 | comments.reverse(); |
diff --git a/crates/ide_assists/src/handlers/expand_glob_import.rs b/crates/ide_assists/src/handlers/expand_glob_import.rs index 83aa11d52..98389e4f7 100644 --- a/crates/ide_assists/src/handlers/expand_glob_import.rs +++ b/crates/ide_assists/src/handlers/expand_glob_import.rs | |||
@@ -136,18 +136,13 @@ impl Refs { | |||
136 | .into_iter() | 136 | .into_iter() |
137 | .filter(|r| { | 137 | .filter(|r| { |
138 | if let Def::ModuleDef(ModuleDef::Trait(tr)) = r.def { | 138 | if let Def::ModuleDef(ModuleDef::Trait(tr)) = r.def { |
139 | if tr | 139 | if tr.items(ctx.db()).into_iter().any(|ai| { |
140 | .items(ctx.db()) | 140 | if let AssocItem::Function(f) = ai { |
141 | .into_iter() | 141 | Def::ModuleDef(ModuleDef::Function(f)).is_referenced_in(ctx) |
142 | .find(|ai| { | 142 | } else { |
143 | if let AssocItem::Function(f) = *ai { | 143 | false |
144 | Def::ModuleDef(ModuleDef::Function(f)).is_referenced_in(ctx) | 144 | } |
145 | } else { | 145 | }) { |
146 | false | ||
147 | } | ||
148 | }) | ||
149 | .is_some() | ||
150 | { | ||
151 | return true; | 146 | return true; |
152 | } | 147 | } |
153 | } | 148 | } |
diff --git a/crates/ide_assists/src/handlers/reorder_impl.rs b/crates/ide_assists/src/handlers/reorder_impl.rs index edf4b0bfe..f976e73ad 100644 --- a/crates/ide_assists/src/handlers/reorder_impl.rs +++ b/crates/ide_assists/src/handlers/reorder_impl.rs | |||
@@ -95,7 +95,7 @@ fn compute_method_ranks(path: &ast::Path, ctx: &AssistContext) -> Option<FxHashM | |||
95 | _ => None, | 95 | _ => None, |
96 | }) | 96 | }) |
97 | .enumerate() | 97 | .enumerate() |
98 | .map(|(idx, func)| ((func.name(ctx.db()).to_string(), idx))) | 98 | .map(|(idx, func)| (func.name(ctx.db()).to_string(), idx)) |
99 | .collect(), | 99 | .collect(), |
100 | ) | 100 | ) |
101 | } | 101 | } |
diff --git a/crates/ide_completion/src/patterns.rs b/crates/ide_completion/src/patterns.rs index cf5ef07b7..d82564381 100644 --- a/crates/ide_completion/src/patterns.rs +++ b/crates/ide_completion/src/patterns.rs | |||
@@ -71,7 +71,7 @@ fn test_has_block_expr_parent() { | |||
71 | } | 71 | } |
72 | 72 | ||
73 | pub(crate) fn has_bind_pat_parent(element: SyntaxElement) -> bool { | 73 | pub(crate) fn has_bind_pat_parent(element: SyntaxElement) -> bool { |
74 | element.ancestors().find(|it| it.kind() == IDENT_PAT).is_some() | 74 | element.ancestors().any(|it| it.kind() == IDENT_PAT) |
75 | } | 75 | } |
76 | #[test] | 76 | #[test] |
77 | fn test_has_bind_pat_parent() { | 77 | fn test_has_bind_pat_parent() { |
diff --git a/crates/ide_ssr/src/parsing.rs b/crates/ide_ssr/src/parsing.rs index 5ff25cb6d..5e757e701 100644 --- a/crates/ide_ssr/src/parsing.rs +++ b/crates/ide_ssr/src/parsing.rs | |||
@@ -67,7 +67,7 @@ impl ParsedRule { | |||
67 | ) -> Result<Vec<ParsedRule>, SsrError> { | 67 | ) -> Result<Vec<ParsedRule>, SsrError> { |
68 | let raw_pattern = pattern.as_rust_code(); | 68 | let raw_pattern = pattern.as_rust_code(); |
69 | let raw_template = template.map(|t| t.as_rust_code()); | 69 | let raw_template = template.map(|t| t.as_rust_code()); |
70 | let raw_template = raw_template.as_ref().map(|s| s.as_str()); | 70 | let raw_template = raw_template.as_deref(); |
71 | let mut builder = RuleBuilder { | 71 | let mut builder = RuleBuilder { |
72 | placeholders_by_stand_in: pattern.placeholders_by_stand_in(), | 72 | placeholders_by_stand_in: pattern.placeholders_by_stand_in(), |
73 | rules: Vec::new(), | 73 | rules: Vec::new(), |
diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index 1682b21b0..75d2f2eed 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs | |||
@@ -304,7 +304,7 @@ impl BindingsBuilder { | |||
304 | link_nodes: &'a Vec<LinkNode<Rc<BindingKind>>>, | 304 | link_nodes: &'a Vec<LinkNode<Rc<BindingKind>>>, |
305 | nodes: &mut Vec<&'a Rc<BindingKind>>, | 305 | nodes: &mut Vec<&'a Rc<BindingKind>>, |
306 | ) { | 306 | ) { |
307 | link_nodes.into_iter().for_each(|it| match it { | 307 | link_nodes.iter().for_each(|it| match it { |
308 | LinkNode::Node(it) => nodes.push(it), | 308 | LinkNode::Node(it) => nodes.push(it), |
309 | LinkNode::Parent { idx, len } => self.collect_nodes_ref(*idx, *len, nodes), | 309 | LinkNode::Parent { idx, len } => self.collect_nodes_ref(*idx, *len, nodes), |
310 | }); | 310 | }); |
@@ -713,10 +713,9 @@ fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragmen | |||
713 | .map(|ident| Some(tt::Leaf::from(ident.clone()).into())) | 713 | .map(|ident| Some(tt::Leaf::from(ident.clone()).into())) |
714 | .map_err(|()| err!("expected ident")), | 714 | .map_err(|()| err!("expected ident")), |
715 | "tt" => input.expect_tt().map(Some).map_err(|()| err!()), | 715 | "tt" => input.expect_tt().map(Some).map_err(|()| err!()), |
716 | "lifetime" => input | 716 | "lifetime" => { |
717 | .expect_lifetime() | 717 | input.expect_lifetime().map(Some).map_err(|()| err!("expected lifetime")) |
718 | .map(|tt| Some(tt)) | 718 | } |
719 | .map_err(|()| err!("expected lifetime")), | ||
720 | "literal" => { | 719 | "literal" => { |
721 | let neg = input.eat_char('-'); | 720 | let neg = input.eat_char('-'); |
722 | input | 721 | input |
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 33b85e23d..e74f8cf3f 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs | |||
@@ -356,6 +356,6 @@ impl<T> ExpandResult<T> { | |||
356 | 356 | ||
357 | impl<T: Default> From<Result<T, ExpandError>> for ExpandResult<T> { | 357 | impl<T: Default> From<Result<T, ExpandError>> for ExpandResult<T> { |
358 | fn from(result: Result<T, ExpandError>) -> Self { | 358 | fn from(result: Result<T, ExpandError>) -> Self { |
359 | result.map_or_else(|e| Self::only_err(e), |it| Self::ok(it)) | 359 | result.map_or_else(Self::only_err, Self::ok) |
360 | } | 360 | } |
361 | } | 361 | } |
diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs index c88387653..61b2a4955 100644 --- a/crates/mbe/src/parser.rs +++ b/crates/mbe/src/parser.rs | |||
@@ -57,7 +57,7 @@ impl<'a> Iterator for OpDelimitedIter<'a> { | |||
57 | 57 | ||
58 | fn size_hint(&self) -> (usize, Option<usize>) { | 58 | fn size_hint(&self) -> (usize, Option<usize>) { |
59 | let len = self.inner.len() + if self.delimited.is_some() { 2 } else { 0 }; | 59 | let len = self.inner.len() + if self.delimited.is_some() { 2 } else { 0 }; |
60 | let remain = len.checked_sub(self.idx).unwrap_or(0); | 60 | let remain = len.saturating_sub(self.idx); |
61 | (remain, Some(remain)) | 61 | (remain, Some(remain)) |
62 | } | 62 | } |
63 | } | 63 | } |
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 8bba3d3d5..9d433b3b0 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs | |||
@@ -362,7 +362,7 @@ trait TokenConvertor { | |||
362 | if let Some((kind, closed)) = delim { | 362 | if let Some((kind, closed)) = delim { |
363 | let mut subtree = tt::Subtree::default(); | 363 | let mut subtree = tt::Subtree::default(); |
364 | let (id, idx) = self.id_alloc().open_delim(range); | 364 | let (id, idx) = self.id_alloc().open_delim(range); |
365 | subtree.delimiter = Some(tt::Delimiter { kind, id }); | 365 | subtree.delimiter = Some(tt::Delimiter { id, kind }); |
366 | 366 | ||
367 | while self.peek().map(|it| it.kind() != closed).unwrap_or(false) { | 367 | while self.peek().map(|it| it.kind() != closed).unwrap_or(false) { |
368 | self.collect_leaf(&mut subtree.token_trees); | 368 | self.collect_leaf(&mut subtree.token_trees); |
diff --git a/crates/mbe/src/tests/expand.rs b/crates/mbe/src/tests/expand.rs index 2cce62781..8951f3813 100644 --- a/crates/mbe/src/tests/expand.rs +++ b/crates/mbe/src/tests/expand.rs | |||
@@ -1225,8 +1225,7 @@ macro_rules! m { | |||
1225 | ) | 1225 | ) |
1226 | .expand_statements(r#"m!(C("0"))"#) | 1226 | .expand_statements(r#"m!(C("0"))"#) |
1227 | .descendants() | 1227 | .descendants() |
1228 | .find(|token| token.kind() == ERROR) | 1228 | .any(|token| token.kind() == ERROR)); |
1229 | .is_some()); | ||
1230 | } | 1229 | } |
1231 | 1230 | ||
1232 | #[test] | 1231 | #[test] |
diff --git a/crates/project_model/src/build_data.rs b/crates/project_model/src/build_data.rs index 728a258ea..f7050be4e 100644 --- a/crates/project_model/src/build_data.rs +++ b/crates/project_model/src/build_data.rs | |||
@@ -137,60 +137,53 @@ fn collect_from_workspace( | |||
137 | let stdout = BufReader::new(child_stdout); | 137 | let stdout = BufReader::new(child_stdout); |
138 | 138 | ||
139 | let mut res = BuildDataMap::default(); | 139 | let mut res = BuildDataMap::default(); |
140 | for message in cargo_metadata::Message::parse_stream(stdout) { | 140 | for message in cargo_metadata::Message::parse_stream(stdout).flatten() { |
141 | if let Ok(message) = message { | 141 | match message { |
142 | match message { | 142 | Message::BuildScriptExecuted(BuildScript { |
143 | Message::BuildScriptExecuted(BuildScript { | 143 | package_id, out_dir, cfgs, env, .. |
144 | package_id, | 144 | }) => { |
145 | out_dir, | 145 | let cfgs = { |
146 | cfgs, | 146 | let mut acc = Vec::new(); |
147 | env, | 147 | for cfg in cfgs { |
148 | .. | 148 | match cfg.parse::<CfgFlag>() { |
149 | }) => { | 149 | Ok(it) => acc.push(it), |
150 | let cfgs = { | 150 | Err(err) => { |
151 | let mut acc = Vec::new(); | 151 | anyhow::bail!("invalid cfg from cargo-metadata: {}", err) |
152 | for cfg in cfgs { | 152 | } |
153 | match cfg.parse::<CfgFlag>() { | 153 | }; |
154 | Ok(it) => acc.push(it), | ||
155 | Err(err) => { | ||
156 | anyhow::bail!("invalid cfg from cargo-metadata: {}", err) | ||
157 | } | ||
158 | }; | ||
159 | } | ||
160 | acc | ||
161 | }; | ||
162 | let res = res.entry(package_id.repr.clone()).or_default(); | ||
163 | // cargo_metadata crate returns default (empty) path for | ||
164 | // older cargos, which is not absolute, so work around that. | ||
165 | if !out_dir.as_str().is_empty() { | ||
166 | let out_dir = AbsPathBuf::assert(PathBuf::from(out_dir.into_os_string())); | ||
167 | res.out_dir = Some(out_dir); | ||
168 | res.cfgs = cfgs; | ||
169 | } | 154 | } |
170 | 155 | acc | |
171 | res.envs = env; | 156 | }; |
157 | let res = res.entry(package_id.repr.clone()).or_default(); | ||
158 | // cargo_metadata crate returns default (empty) path for | ||
159 | // older cargos, which is not absolute, so work around that. | ||
160 | if !out_dir.as_str().is_empty() { | ||
161 | let out_dir = AbsPathBuf::assert(PathBuf::from(out_dir.into_os_string())); | ||
162 | res.out_dir = Some(out_dir); | ||
163 | res.cfgs = cfgs; | ||
172 | } | 164 | } |
173 | Message::CompilerArtifact(message) => { | 165 | |
174 | progress(format!("metadata {}", message.target.name)); | 166 | res.envs = env; |
175 | 167 | } | |
176 | if message.target.kind.contains(&"proc-macro".to_string()) { | 168 | Message::CompilerArtifact(message) => { |
177 | let package_id = message.package_id; | 169 | progress(format!("metadata {}", message.target.name)); |
178 | // Skip rmeta file | 170 | |
179 | if let Some(filename) = message.filenames.iter().find(|name| is_dylib(name)) | 171 | if message.target.kind.contains(&"proc-macro".to_string()) { |
180 | { | 172 | let package_id = message.package_id; |
181 | let filename = AbsPathBuf::assert(PathBuf::from(&filename)); | 173 | // Skip rmeta file |
182 | let res = res.entry(package_id.repr.clone()).or_default(); | 174 | if let Some(filename) = message.filenames.iter().find(|name| is_dylib(name)) { |
183 | res.proc_macro_dylib_path = Some(filename); | 175 | let filename = AbsPathBuf::assert(PathBuf::from(&filename)); |
184 | } | 176 | let res = res.entry(package_id.repr.clone()).or_default(); |
177 | res.proc_macro_dylib_path = Some(filename); | ||
185 | } | 178 | } |
186 | } | 179 | } |
187 | Message::CompilerMessage(message) => { | ||
188 | progress(message.target.name.clone()); | ||
189 | } | ||
190 | Message::BuildFinished(_) => {} | ||
191 | Message::TextLine(_) => {} | ||
192 | _ => {} | ||
193 | } | 180 | } |
181 | Message::CompilerMessage(message) => { | ||
182 | progress(message.target.name.clone()); | ||
183 | } | ||
184 | Message::BuildFinished(_) => {} | ||
185 | Message::TextLine(_) => {} | ||
186 | _ => {} | ||
194 | } | 187 | } |
195 | } | 188 | } |
196 | 189 | ||
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 984790d35..c63a0eaea 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs | |||
@@ -242,11 +242,8 @@ impl GlobalState { | |||
242 | } | 242 | } |
243 | BuildDataProgress::End(collector) => { | 243 | BuildDataProgress::End(collector) => { |
244 | self.fetch_build_data_completed(); | 244 | self.fetch_build_data_completed(); |
245 | let workspaces = (*self.workspaces) | 245 | let workspaces = |
246 | .clone() | 246 | (*self.workspaces).clone().into_iter().map(Ok).collect(); |
247 | .into_iter() | ||
248 | .map(|it| Ok(it)) | ||
249 | .collect(); | ||
250 | self.switch_workspaces(workspaces, Some(collector)); | 247 | self.switch_workspaces(workspaces, Some(collector)); |
251 | (Some(Progress::End), None) | 248 | (Some(Progress::End), None) |
252 | } | 249 | } |
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index aa8504c3d..76fdbcddd 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs | |||
@@ -237,7 +237,7 @@ impl GlobalState { | |||
237 | None => None, | 237 | None => None, |
238 | }; | 238 | }; |
239 | 239 | ||
240 | if &*self.workspaces == &workspaces && self.workspace_build_data == workspace_build_data { | 240 | if *self.workspaces == workspaces && self.workspace_build_data == workspace_build_data { |
241 | return; | 241 | return; |
242 | } | 242 | } |
243 | 243 | ||
diff --git a/crates/rust-analyzer/tests/rust-analyzer/support.rs b/crates/rust-analyzer/tests/rust-analyzer/support.rs index cd0c91481..95bf26f01 100644 --- a/crates/rust-analyzer/tests/rust-analyzer/support.rs +++ b/crates/rust-analyzer/tests/rust-analyzer/support.rs | |||
@@ -54,7 +54,7 @@ impl<'a> Project<'a> { | |||
54 | } | 54 | } |
55 | 55 | ||
56 | pub(crate) fn server(self) -> Server { | 56 | pub(crate) fn server(self) -> Server { |
57 | let tmp_dir = self.tmp_dir.unwrap_or_else(|| TestDir::new()); | 57 | let tmp_dir = self.tmp_dir.unwrap_or_else(TestDir::new); |
58 | static INIT: Once = Once::new(); | 58 | static INIT: Once = Once::new(); |
59 | INIT.call_once(|| { | 59 | INIT.call_once(|| { |
60 | env_logger::builder().is_test(true).parse_env("RA_LOG").try_init().unwrap(); | 60 | env_logger::builder().is_test(true).parse_env("RA_LOG").try_init().unwrap(); |
diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs index 82ebf9037..a153a9e1c 100644 --- a/crates/syntax/src/algo.rs +++ b/crates/syntax/src/algo.rs | |||
@@ -567,7 +567,7 @@ impl<'a> SyntaxRewriter<'a> { | |||
567 | 567 | ||
568 | fn element_to_green(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { | 568 | fn element_to_green(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { |
569 | match element { | 569 | match element { |
570 | NodeOrToken::Node(it) => NodeOrToken::Node(it.green().to_owned()), | 570 | NodeOrToken::Node(it) => NodeOrToken::Node(it.green()), |
571 | NodeOrToken::Token(it) => NodeOrToken::Token(it.green().to_owned()), | 571 | NodeOrToken::Token(it) => NodeOrToken::Token(it.green().to_owned()), |
572 | } | 572 | } |
573 | } | 573 | } |
@@ -625,7 +625,7 @@ fn position_of_child(parent: &SyntaxNode, child: SyntaxElement) -> usize { | |||
625 | 625 | ||
626 | fn to_green_element(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { | 626 | fn to_green_element(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { |
627 | match element { | 627 | match element { |
628 | NodeOrToken::Node(it) => it.green().to_owned().into(), | 628 | NodeOrToken::Node(it) => it.green().into(), |
629 | NodeOrToken::Token(it) => it.green().to_owned().into(), | 629 | NodeOrToken::Token(it) => it.green().to_owned().into(), |
630 | } | 630 | } |
631 | } | 631 | } |
diff --git a/crates/syntax/src/ast/edit.rs b/crates/syntax/src/ast/edit.rs index 64fac13a7..347862b8a 100644 --- a/crates/syntax/src/ast/edit.rs +++ b/crates/syntax/src/ast/edit.rs | |||
@@ -333,8 +333,7 @@ impl ast::Use { | |||
333 | .and_then(ast::Whitespace::cast); | 333 | .and_then(ast::Whitespace::cast); |
334 | if let Some(next_ws) = next_ws { | 334 | if let Some(next_ws) = next_ws { |
335 | let ws_text = next_ws.syntax().text(); | 335 | let ws_text = next_ws.syntax().text(); |
336 | if ws_text.starts_with('\n') { | 336 | if let Some(rest) = ws_text.strip_prefix('\n') { |
337 | let rest = &ws_text[1..]; | ||
338 | if rest.is_empty() { | 337 | if rest.is_empty() { |
339 | res.delete(next_ws.syntax()) | 338 | res.delete(next_ws.syntax()) |
340 | } else { | 339 | } else { |
@@ -462,8 +461,7 @@ impl ast::MatchArmList { | |||
462 | let end = if let Some(comma) = start | 461 | let end = if let Some(comma) = start |
463 | .siblings_with_tokens(Direction::Next) | 462 | .siblings_with_tokens(Direction::Next) |
464 | .skip(1) | 463 | .skip(1) |
465 | .skip_while(|it| it.kind().is_trivia()) | 464 | .find(|it| !it.kind().is_trivia()) |
466 | .next() | ||
467 | .filter(|it| it.kind() == T![,]) | 465 | .filter(|it| it.kind() == T![,]) |
468 | { | 466 | { |
469 | comma | 467 | comma |
@@ -597,7 +595,7 @@ impl IndentLevel { | |||
597 | pub fn from_node(node: &SyntaxNode) -> IndentLevel { | 595 | pub fn from_node(node: &SyntaxNode) -> IndentLevel { |
598 | match node.first_token() { | 596 | match node.first_token() { |
599 | Some(it) => Self::from_token(&it), | 597 | Some(it) => Self::from_token(&it), |
600 | None => return IndentLevel(0), | 598 | None => IndentLevel(0), |
601 | } | 599 | } |
602 | } | 600 | } |
603 | 601 | ||
diff --git a/crates/syntax/src/ast/expr_ext.rs b/crates/syntax/src/ast/expr_ext.rs index 636ce166d..6317d84ba 100644 --- a/crates/syntax/src/ast/expr_ext.rs +++ b/crates/syntax/src/ast/expr_ext.rs | |||
@@ -11,16 +11,16 @@ impl ast::AttrsOwner for ast::Expr {} | |||
11 | 11 | ||
12 | impl ast::Expr { | 12 | impl ast::Expr { |
13 | pub fn is_block_like(&self) -> bool { | 13 | pub fn is_block_like(&self) -> bool { |
14 | match self { | 14 | matches!( |
15 | self, | ||
15 | ast::Expr::IfExpr(_) | 16 | ast::Expr::IfExpr(_) |
16 | | ast::Expr::LoopExpr(_) | 17 | | ast::Expr::LoopExpr(_) |
17 | | ast::Expr::ForExpr(_) | 18 | | ast::Expr::ForExpr(_) |
18 | | ast::Expr::WhileExpr(_) | 19 | | ast::Expr::WhileExpr(_) |
19 | | ast::Expr::BlockExpr(_) | 20 | | ast::Expr::BlockExpr(_) |
20 | | ast::Expr::MatchExpr(_) | 21 | | ast::Expr::MatchExpr(_) |
21 | | ast::Expr::EffectExpr(_) => true, | 22 | | ast::Expr::EffectExpr(_) |
22 | _ => false, | 23 | ) |
23 | } | ||
24 | } | 24 | } |
25 | 25 | ||
26 | pub fn name_ref(&self) -> Option<ast::NameRef> { | 26 | pub fn name_ref(&self) -> Option<ast::NameRef> { |
@@ -151,20 +151,20 @@ pub enum BinOp { | |||
151 | 151 | ||
152 | impl BinOp { | 152 | impl BinOp { |
153 | pub fn is_assignment(self) -> bool { | 153 | pub fn is_assignment(self) -> bool { |
154 | match self { | 154 | matches!( |
155 | self, | ||
155 | BinOp::Assignment | 156 | BinOp::Assignment |
156 | | BinOp::AddAssign | 157 | | BinOp::AddAssign |
157 | | BinOp::DivAssign | 158 | | BinOp::DivAssign |
158 | | BinOp::MulAssign | 159 | | BinOp::MulAssign |
159 | | BinOp::RemAssign | 160 | | BinOp::RemAssign |
160 | | BinOp::ShrAssign | 161 | | BinOp::ShrAssign |
161 | | BinOp::ShlAssign | 162 | | BinOp::ShlAssign |
162 | | BinOp::SubAssign | 163 | | BinOp::SubAssign |
163 | | BinOp::BitOrAssign | 164 | | BinOp::BitOrAssign |
164 | | BinOp::BitAndAssign | 165 | | BinOp::BitAndAssign |
165 | | BinOp::BitXorAssign => true, | 166 | | BinOp::BitXorAssign |
166 | _ => false, | 167 | ) |
167 | } | ||
168 | } | 168 | } |
169 | } | 169 | } |
170 | 170 | ||
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index 810c8d4c8..7049affd9 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs | |||
@@ -532,7 +532,7 @@ fn ast_from_text<N: AstNode>(text: &str) -> N { | |||
532 | } | 532 | } |
533 | 533 | ||
534 | fn unroot(n: SyntaxNode) -> SyntaxNode { | 534 | fn unroot(n: SyntaxNode) -> SyntaxNode { |
535 | SyntaxNode::new_root(n.green().to_owned()) | 535 | SyntaxNode::new_root(n.green()) |
536 | } | 536 | } |
537 | 537 | ||
538 | pub mod tokens { | 538 | pub mod tokens { |
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs index 42a7b9c2a..bdf907a21 100644 --- a/crates/syntax/src/ast/node_ext.rs +++ b/crates/syntax/src/ast/node_ext.rs | |||
@@ -58,10 +58,7 @@ impl From<ast::MacroDef> for Macro { | |||
58 | 58 | ||
59 | impl AstNode for Macro { | 59 | impl AstNode for Macro { |
60 | fn can_cast(kind: SyntaxKind) -> bool { | 60 | fn can_cast(kind: SyntaxKind) -> bool { |
61 | match kind { | 61 | matches!(kind, SyntaxKind::MACRO_RULES | SyntaxKind::MACRO_DEF) |
62 | SyntaxKind::MACRO_RULES | SyntaxKind::MACRO_DEF => true, | ||
63 | _ => false, | ||
64 | } | ||
65 | } | 62 | } |
66 | fn cast(syntax: SyntaxNode) -> Option<Self> { | 63 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
67 | let res = match syntax.kind() { | 64 | let res = match syntax.kind() { |
@@ -462,10 +459,8 @@ impl ast::FieldExpr { | |||
462 | pub fn field_access(&self) -> Option<FieldKind> { | 459 | pub fn field_access(&self) -> Option<FieldKind> { |
463 | if let Some(nr) = self.name_ref() { | 460 | if let Some(nr) = self.name_ref() { |
464 | Some(FieldKind::Name(nr)) | 461 | Some(FieldKind::Name(nr)) |
465 | } else if let Some(tok) = self.index_token() { | ||
466 | Some(FieldKind::Index(tok)) | ||
467 | } else { | 462 | } else { |
468 | None | 463 | self.index_token().map(FieldKind::Index) |
469 | } | 464 | } |
470 | } | 465 | } |
471 | } | 466 | } |
@@ -482,16 +477,10 @@ impl ast::SlicePat { | |||
482 | let prefix = args | 477 | let prefix = args |
483 | .peeking_take_while(|p| match p { | 478 | .peeking_take_while(|p| match p { |
484 | ast::Pat::RestPat(_) => false, | 479 | ast::Pat::RestPat(_) => false, |
485 | ast::Pat::IdentPat(bp) => match bp.pat() { | 480 | ast::Pat::IdentPat(bp) => !matches!(bp.pat(), Some(ast::Pat::RestPat(_))), |
486 | Some(ast::Pat::RestPat(_)) => false, | ||
487 | _ => true, | ||
488 | }, | ||
489 | ast::Pat::RefPat(rp) => match rp.pat() { | 481 | ast::Pat::RefPat(rp) => match rp.pat() { |
490 | Some(ast::Pat::RestPat(_)) => false, | 482 | Some(ast::Pat::RestPat(_)) => false, |
491 | Some(ast::Pat::IdentPat(bp)) => match bp.pat() { | 483 | Some(ast::Pat::IdentPat(bp)) => !matches!(bp.pat(), Some(ast::Pat::RestPat(_))), |
492 | Some(ast::Pat::RestPat(_)) => false, | ||
493 | _ => true, | ||
494 | }, | ||
495 | _ => true, | 484 | _ => true, |
496 | }, | 485 | }, |
497 | _ => true, | 486 | _ => true, |
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs index 6c242d126..090282d28 100644 --- a/crates/syntax/src/ast/token_ext.rs +++ b/crates/syntax/src/ast/token_ext.rs | |||
@@ -494,9 +494,8 @@ pub trait HasFormatSpecifier: AstToken { | |||
494 | } | 494 | } |
495 | _ => { | 495 | _ => { |
496 | while let Some((_, Ok(next_char))) = chars.peek() { | 496 | while let Some((_, Ok(next_char))) = chars.peek() { |
497 | match next_char { | 497 | if next_char == &'{' { |
498 | '{' => break, | 498 | break; |
499 | _ => {} | ||
500 | } | 499 | } |
501 | chars.next(); | 500 | chars.next(); |
502 | } | 501 | } |
diff --git a/crates/syntax/src/fuzz.rs b/crates/syntax/src/fuzz.rs index fbb97aa27..aa84239d2 100644 --- a/crates/syntax/src/fuzz.rs +++ b/crates/syntax/src/fuzz.rs | |||
@@ -43,7 +43,7 @@ impl CheckReparse { | |||
43 | TextRange::at(delete_start.try_into().unwrap(), delete_len.try_into().unwrap()); | 43 | TextRange::at(delete_start.try_into().unwrap(), delete_len.try_into().unwrap()); |
44 | let edited_text = | 44 | let edited_text = |
45 | format!("{}{}{}", &text[..delete_start], &insert, &text[delete_start + delete_len..]); | 45 | format!("{}{}{}", &text[..delete_start], &insert, &text[delete_start + delete_len..]); |
46 | let edit = Indel { delete, insert }; | 46 | let edit = Indel { insert, delete }; |
47 | Some(CheckReparse { text, edit, edited_text }) | 47 | Some(CheckReparse { text, edit, edited_text }) |
48 | } | 48 | } |
49 | 49 | ||
diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs index 3e216fb70..bbe802174 100644 --- a/crates/syntax/src/validation.rs +++ b/crates/syntax/src/validation.rs | |||
@@ -297,7 +297,7 @@ fn validate_path_keywords(segment: ast::PathSegment, errors: &mut Vec<SyntaxErro | |||
297 | } | 297 | } |
298 | }; | 298 | }; |
299 | } | 299 | } |
300 | return None; | 300 | None |
301 | } | 301 | } |
302 | 302 | ||
303 | fn all_supers(path: &ast::Path) -> bool { | 303 | fn all_supers(path: &ast::Path) -> bool { |
@@ -314,7 +314,7 @@ fn validate_path_keywords(segment: ast::PathSegment, errors: &mut Vec<SyntaxErro | |||
314 | return all_supers(subpath); | 314 | return all_supers(subpath); |
315 | } | 315 | } |
316 | 316 | ||
317 | return true; | 317 | true |
318 | } | 318 | } |
319 | } | 319 | } |
320 | 320 | ||
diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index 9d9a01e30..bed44d600 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs | |||
@@ -239,9 +239,8 @@ impl Subtree { | |||
239 | 239 | ||
240 | let mut res = String::new(); | 240 | let mut res = String::new(); |
241 | res.push_str(delim.0); | 241 | res.push_str(delim.0); |
242 | let mut iter = self.token_trees.iter(); | ||
243 | let mut last = None; | 242 | let mut last = None; |
244 | while let Some(child) = iter.next() { | 243 | for child in &self.token_trees { |
245 | let s = match child { | 244 | let s = match child { |
246 | TokenTree::Leaf(it) => { | 245 | TokenTree::Leaf(it) => { |
247 | let s = match it { | 246 | let s = match it { |
diff --git a/xtask/src/codegen/gen_assists_docs.rs b/xtask/src/codegen/gen_assists_docs.rs index 158680993..c91716409 100644 --- a/xtask/src/codegen/gen_assists_docs.rs +++ b/xtask/src/codegen/gen_assists_docs.rs | |||
@@ -154,8 +154,8 @@ fn hide_hash_comments(text: &str) -> String { | |||
154 | fn reveal_hash_comments(text: &str) -> String { | 154 | fn reveal_hash_comments(text: &str) -> String { |
155 | text.split('\n') // want final newline | 155 | text.split('\n') // want final newline |
156 | .map(|it| { | 156 | .map(|it| { |
157 | if it.starts_with("# ") { | 157 | if let Some(stripped) = it.strip_prefix("# ") { |
158 | &it[2..] | 158 | stripped |
159 | } else if it == "#" { | 159 | } else if it == "#" { |
160 | "" | 160 | "" |
161 | } else { | 161 | } else { |
diff --git a/xtask/src/codegen/gen_parser_tests.rs b/xtask/src/codegen/gen_parser_tests.rs index 096590653..2fecb9b5b 100644 --- a/xtask/src/codegen/gen_parser_tests.rs +++ b/xtask/src/codegen/gen_parser_tests.rs | |||
@@ -60,12 +60,10 @@ fn collect_tests(s: &str) -> Vec<Test> { | |||
60 | let mut res = Vec::new(); | 60 | let mut res = Vec::new(); |
61 | for comment_block in extract_comment_blocks(s) { | 61 | for comment_block in extract_comment_blocks(s) { |
62 | let first_line = &comment_block[0]; | 62 | let first_line = &comment_block[0]; |
63 | let (name, ok) = if first_line.starts_with("test ") { | 63 | let (name, ok) = if let Some(name) = first_line.strip_prefix("test ") { |
64 | let name = first_line["test ".len()..].to_string(); | 64 | (name.to_string(), true) |
65 | (name, true) | 65 | } else if let Some(name) = first_line.strip_prefix("test_err ") { |
66 | } else if first_line.starts_with("test_err ") { | 66 | (name.to_string(), false) |
67 | let name = first_line["test_err ".len()..].to_string(); | ||
68 | (name, false) | ||
69 | } else { | 67 | } else { |
70 | continue; | 68 | continue; |
71 | }; | 69 | }; |
diff --git a/xtask/src/codegen/gen_syntax.rs b/xtask/src/codegen/gen_syntax.rs index 80f26e8f5..ba4b24848 100644 --- a/xtask/src/codegen/gen_syntax.rs +++ b/xtask/src/codegen/gen_syntax.rs | |||
@@ -707,7 +707,7 @@ fn extract_struct_trait(node: &mut AstNodeSrc, trait_name: &str, methods: &[&str | |||
707 | let mut to_remove = Vec::new(); | 707 | let mut to_remove = Vec::new(); |
708 | for (i, field) in node.fields.iter().enumerate() { | 708 | for (i, field) in node.fields.iter().enumerate() { |
709 | let method_name = field.method_name().to_string(); | 709 | let method_name = field.method_name().to_string(); |
710 | if methods.iter().any(|&it| it == &method_name) { | 710 | if methods.iter().any(|&it| it == method_name) { |
711 | to_remove.push(i); | 711 | to_remove.push(i); |
712 | } | 712 | } |
713 | } | 713 | } |
diff --git a/xtask/src/main.rs b/xtask/src/main.rs index 915aae71a..960927fc0 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs | |||
@@ -37,7 +37,7 @@ fn main() -> Result<()> { | |||
37 | match flags.subcommand { | 37 | match flags.subcommand { |
38 | flags::XtaskCmd::Help(_) => { | 38 | flags::XtaskCmd::Help(_) => { |
39 | println!("{}", flags::Xtask::HELP); | 39 | println!("{}", flags::Xtask::HELP); |
40 | return Ok(()); | 40 | Ok(()) |
41 | } | 41 | } |
42 | flags::XtaskCmd::Install(cmd) => cmd.run(), | 42 | flags::XtaskCmd::Install(cmd) => cmd.run(), |
43 | flags::XtaskCmd::FuzzTests(_) => run_fuzzer(), | 43 | flags::XtaskCmd::FuzzTests(_) => run_fuzzer(), |
diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs index 97395738b..b0b76b8aa 100644 --- a/xtask/src/metrics.rs +++ b/xtask/src/metrics.rs | |||
@@ -167,7 +167,7 @@ impl Host { | |||
167 | 167 | ||
168 | return Ok(Host { os, cpu, mem }); | 168 | return Ok(Host { os, cpu, mem }); |
169 | 169 | ||
170 | fn read_field<'a>(path: &str, field: &str) -> Result<String> { | 170 | fn read_field(path: &str, field: &str) -> Result<String> { |
171 | let text = read_file(path)?; | 171 | let text = read_file(path)?; |
172 | 172 | ||
173 | let line = text | 173 | let line = text |
diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs index 1352d1218..50d9efccd 100644 --- a/xtask/src/tidy.rs +++ b/xtask/src/tidy.rs | |||
@@ -193,7 +193,7 @@ https://github.blog/2015-06-08-how-to-undo-almost-anything-with-git/#redo-after- | |||
193 | } | 193 | } |
194 | } | 194 | } |
195 | 195 | ||
196 | fn deny_clippy(path: &PathBuf, text: &String) { | 196 | fn deny_clippy(path: &Path, text: &str) { |
197 | let ignore = &[ | 197 | let ignore = &[ |
198 | // The documentation in string literals may contain anything for its own purposes | 198 | // The documentation in string literals may contain anything for its own purposes |
199 | "ide_completion/src/generated_lint_completions.rs", | 199 | "ide_completion/src/generated_lint_completions.rs", |