diff options
114 files changed, 285 insertions, 285 deletions
diff --git a/crates/base_db/src/fixture.rs b/crates/base_db/src/fixture.rs index 69ceba735..da4afb5eb 100644 --- a/crates/base_db/src/fixture.rs +++ b/crates/base_db/src/fixture.rs | |||
@@ -190,7 +190,7 @@ impl From<Fixture> for FileMeta { | |||
190 | edition: f | 190 | edition: f |
191 | .edition | 191 | .edition |
192 | .as_ref() | 192 | .as_ref() |
193 | .map_or(Edition::Edition2018, |v| Edition::from_str(&v).unwrap()), | 193 | .map_or(Edition::Edition2018, |v| Edition::from_str(v).unwrap()), |
194 | env: f.env.into_iter().collect(), | 194 | env: f.env.into_iter().collect(), |
195 | introduce_new_source_root: f.introduce_new_source_root, | 195 | introduce_new_source_root: f.introduce_new_source_root, |
196 | } | 196 | } |
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 2468c0dc6..f6eb23262 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs | |||
@@ -1112,7 +1112,7 @@ impl Function { | |||
1112 | .collect(); | 1112 | .collect(); |
1113 | sink.push(MissingFields { | 1113 | sink.push(MissingFields { |
1114 | file: source_ptr.file_id, | 1114 | file: source_ptr.file_id, |
1115 | field_list_parent: AstPtr::new(&record_expr), | 1115 | field_list_parent: AstPtr::new(record_expr), |
1116 | field_list_parent_path: record_expr | 1116 | field_list_parent_path: record_expr |
1117 | .path() | 1117 | .path() |
1118 | .map(|path| AstPtr::new(&path)), | 1118 | .map(|path| AstPtr::new(&path)), |
@@ -2531,13 +2531,13 @@ impl Type { | |||
2531 | match ty.kind(&Interner) { | 2531 | match ty.kind(&Interner) { |
2532 | TyKind::Adt(_, substs) => { | 2532 | TyKind::Adt(_, substs) => { |
2533 | cb(type_.derived(ty.clone())); | 2533 | cb(type_.derived(ty.clone())); |
2534 | walk_substs(db, type_, &substs, cb); | 2534 | walk_substs(db, type_, substs, cb); |
2535 | } | 2535 | } |
2536 | TyKind::AssociatedType(_, substs) => { | 2536 | TyKind::AssociatedType(_, substs) => { |
2537 | if let Some(_) = ty.associated_type_parent_trait(db) { | 2537 | if let Some(_) = ty.associated_type_parent_trait(db) { |
2538 | cb(type_.derived(ty.clone())); | 2538 | cb(type_.derived(ty.clone())); |
2539 | } | 2539 | } |
2540 | walk_substs(db, type_, &substs, cb); | 2540 | walk_substs(db, type_, substs, cb); |
2541 | } | 2541 | } |
2542 | TyKind::OpaqueType(_, subst) => { | 2542 | TyKind::OpaqueType(_, subst) => { |
2543 | if let Some(bounds) = ty.impl_trait_bounds(db) { | 2543 | if let Some(bounds) = ty.impl_trait_bounds(db) { |
@@ -2577,7 +2577,7 @@ impl Type { | |||
2577 | TyKind::FnDef(_, substs) | 2577 | TyKind::FnDef(_, substs) |
2578 | | TyKind::Tuple(_, substs) | 2578 | | TyKind::Tuple(_, substs) |
2579 | | TyKind::Closure(.., substs) => { | 2579 | | TyKind::Closure(.., substs) => { |
2580 | walk_substs(db, type_, &substs, cb); | 2580 | walk_substs(db, type_, substs, cb); |
2581 | } | 2581 | } |
2582 | TyKind::Function(hir_ty::FnPointer { substitution, .. }) => { | 2582 | TyKind::Function(hir_ty::FnPointer { substitution, .. }) => { |
2583 | walk_substs(db, type_, &substitution.0, cb); | 2583 | walk_substs(db, type_, &substitution.0, cb); |
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index d522d5245..613266e07 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs | |||
@@ -192,7 +192,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
192 | node: &SyntaxNode, | 192 | node: &SyntaxNode, |
193 | offset: TextSize, | 193 | offset: TextSize, |
194 | ) -> Option<N> { | 194 | ) -> Option<N> { |
195 | if let Some(it) = find_node_at_offset(&node, offset) { | 195 | if let Some(it) = find_node_at_offset(node, offset) { |
196 | return Some(it); | 196 | return Some(it); |
197 | } | 197 | } |
198 | 198 | ||
@@ -744,7 +744,7 @@ impl<'db> SemanticsImpl<'db> { | |||
744 | return None; | 744 | return None; |
745 | } | 745 | } |
746 | 746 | ||
747 | let func = self.resolve_method_call(&method_call_expr).map(Function::from)?; | 747 | let func = self.resolve_method_call(method_call_expr).map(Function::from)?; |
748 | let res = match func.self_param(self.db)?.access(self.db) { | 748 | let res = match func.self_param(self.db)?.access(self.db) { |
749 | Access::Shared | Access::Exclusive => true, | 749 | Access::Shared | Access::Exclusive => true, |
750 | Access::Owned => false, | 750 | Access::Owned => false, |
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 37a050415..c9744d81d 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs | |||
@@ -222,7 +222,7 @@ impl SourceAnalyzer { | |||
222 | Pat::Path(path) => path, | 222 | Pat::Path(path) => path, |
223 | _ => return None, | 223 | _ => return None, |
224 | }; | 224 | }; |
225 | let res = resolve_hir_path(db, &self.resolver, &path)?; | 225 | let res = resolve_hir_path(db, &self.resolver, path)?; |
226 | match res { | 226 | match res { |
227 | PathResolution::Def(def) => Some(def), | 227 | PathResolution::Def(def) => Some(def), |
228 | _ => None, | 228 | _ => None, |
@@ -329,7 +329,7 @@ impl SourceAnalyzer { | |||
329 | 329 | ||
330 | let (variant, missing_fields, _exhaustive) = | 330 | let (variant, missing_fields, _exhaustive) = |
331 | record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?; | 331 | record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?; |
332 | let res = self.missing_fields(db, krate, &substs, variant, missing_fields); | 332 | let res = self.missing_fields(db, krate, substs, variant, missing_fields); |
333 | Some(res) | 333 | Some(res) |
334 | } | 334 | } |
335 | 335 | ||
@@ -347,7 +347,7 @@ impl SourceAnalyzer { | |||
347 | 347 | ||
348 | let (variant, missing_fields, _exhaustive) = | 348 | let (variant, missing_fields, _exhaustive) = |
349 | record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?; | 349 | record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?; |
350 | let res = self.missing_fields(db, krate, &substs, variant, missing_fields); | 350 | let res = self.missing_fields(db, krate, substs, variant, missing_fields); |
351 | Some(res) | 351 | Some(res) |
352 | } | 352 | } |
353 | 353 | ||
diff --git a/crates/hir_def/src/body/lower.rs b/crates/hir_def/src/body/lower.rs index da1fdac33..a8bd36a0a 100644 --- a/crates/hir_def/src/body/lower.rs +++ b/crates/hir_def/src/body/lower.rs | |||
@@ -1002,16 +1002,16 @@ impl From<ast::LiteralKind> for Literal { | |||
1002 | if let builtin @ Some(_) = lit.suffix().and_then(BuiltinFloat::from_suffix) { | 1002 | if let builtin @ Some(_) = lit.suffix().and_then(BuiltinFloat::from_suffix) { |
1003 | return Literal::Float(Default::default(), builtin); | 1003 | return Literal::Float(Default::default(), builtin); |
1004 | } else if let builtin @ Some(_) = | 1004 | } else if let builtin @ Some(_) = |
1005 | lit.suffix().and_then(|it| BuiltinInt::from_suffix(&it)) | 1005 | lit.suffix().and_then(|it| BuiltinInt::from_suffix(it)) |
1006 | { | 1006 | { |
1007 | Literal::Int(lit.value().unwrap_or(0) as i128, builtin) | 1007 | Literal::Int(lit.value().unwrap_or(0) as i128, builtin) |
1008 | } else { | 1008 | } else { |
1009 | let builtin = lit.suffix().and_then(|it| BuiltinUint::from_suffix(&it)); | 1009 | let builtin = lit.suffix().and_then(|it| BuiltinUint::from_suffix(it)); |
1010 | Literal::Uint(lit.value().unwrap_or(0), builtin) | 1010 | Literal::Uint(lit.value().unwrap_or(0), builtin) |
1011 | } | 1011 | } |
1012 | } | 1012 | } |
1013 | LiteralKind::FloatNumber(lit) => { | 1013 | LiteralKind::FloatNumber(lit) => { |
1014 | let ty = lit.suffix().and_then(|it| BuiltinFloat::from_suffix(&it)); | 1014 | let ty = lit.suffix().and_then(|it| BuiltinFloat::from_suffix(it)); |
1015 | Literal::Float(Default::default(), ty) | 1015 | Literal::Float(Default::default(), ty) |
1016 | } | 1016 | } |
1017 | LiteralKind::ByteString(bs) => { | 1017 | LiteralKind::ByteString(bs) => { |
diff --git a/crates/hir_def/src/body/scope.rs b/crates/hir_def/src/body/scope.rs index 6764de3a7..58a1fc81c 100644 --- a/crates/hir_def/src/body/scope.rs +++ b/crates/hir_def/src/body/scope.rs | |||
@@ -198,7 +198,7 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope | |||
198 | } | 198 | } |
199 | Expr::Lambda { args, body: body_expr, .. } => { | 199 | Expr::Lambda { args, body: body_expr, .. } => { |
200 | let scope = scopes.new_scope(scope); | 200 | let scope = scopes.new_scope(scope); |
201 | scopes.add_params_bindings(body, scope, &args); | 201 | scopes.add_params_bindings(body, scope, args); |
202 | compute_expr_scopes(*body_expr, body, scopes, scope); | 202 | compute_expr_scopes(*body_expr, body, scopes, scope); |
203 | } | 203 | } |
204 | Expr::Match { expr, arms } => { | 204 | Expr::Match { expr, arms } => { |
diff --git a/crates/hir_def/src/generics.rs b/crates/hir_def/src/generics.rs index 44d22b918..6933f6e3c 100644 --- a/crates/hir_def/src/generics.rs +++ b/crates/hir_def/src/generics.rs | |||
@@ -280,7 +280,7 @@ impl GenericParams { | |||
280 | sm.type_params.insert(param_id, Either::Right(type_param.clone())); | 280 | sm.type_params.insert(param_id, Either::Right(type_param.clone())); |
281 | 281 | ||
282 | let type_ref = TypeRef::Path(name.into()); | 282 | let type_ref = TypeRef::Path(name.into()); |
283 | self.fill_bounds(&lower_ctx, &type_param, Either::Left(type_ref)); | 283 | self.fill_bounds(lower_ctx, &type_param, Either::Left(type_ref)); |
284 | } | 284 | } |
285 | for lifetime_param in params.lifetime_params() { | 285 | for lifetime_param in params.lifetime_params() { |
286 | let name = | 286 | let name = |
@@ -289,7 +289,7 @@ impl GenericParams { | |||
289 | let param_id = self.lifetimes.alloc(param); | 289 | let param_id = self.lifetimes.alloc(param); |
290 | sm.lifetime_params.insert(param_id, lifetime_param.clone()); | 290 | sm.lifetime_params.insert(param_id, lifetime_param.clone()); |
291 | let lifetime_ref = LifetimeRef::new_name(name); | 291 | let lifetime_ref = LifetimeRef::new_name(name); |
292 | self.fill_bounds(&lower_ctx, &lifetime_param, Either::Right(lifetime_ref)); | 292 | self.fill_bounds(lower_ctx, &lifetime_param, Either::Right(lifetime_ref)); |
293 | } | 293 | } |
294 | for const_param in params.const_params() { | 294 | for const_param in params.const_params() { |
295 | let name = const_param.name().map_or_else(Name::missing, |it| it.as_name()); | 295 | let name = const_param.name().map_or_else(Name::missing, |it| it.as_name()); |
diff --git a/crates/hir_def/src/item_tree/lower.rs b/crates/hir_def/src/item_tree/lower.rs index cfda7cb32..3f90bda74 100644 --- a/crates/hir_def/src/item_tree/lower.rs +++ b/crates/hir_def/src/item_tree/lower.rs | |||
@@ -823,7 +823,7 @@ fn is_intrinsic_fn_unsafe(name: &Name) -> bool { | |||
823 | known::type_name, | 823 | known::type_name, |
824 | known::variant_count, | 824 | known::variant_count, |
825 | ] | 825 | ] |
826 | .contains(&name) | 826 | .contains(name) |
827 | } | 827 | } |
828 | 828 | ||
829 | fn lower_abi(abi: ast::Abi) -> Interned<str> { | 829 | fn lower_abi(abi: ast::Abi) -> Interned<str> { |
@@ -855,7 +855,7 @@ impl UseTreeLowering<'_> { | |||
855 | // E.g. `use something::{inner}` (prefix is `None`, path is `something`) | 855 | // E.g. `use something::{inner}` (prefix is `None`, path is `something`) |
856 | // or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`) | 856 | // or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`) |
857 | Some(path) => { | 857 | Some(path) => { |
858 | match ModPath::from_src(self.db, path, &self.hygiene) { | 858 | match ModPath::from_src(self.db, path, self.hygiene) { |
859 | Some(it) => Some(it), | 859 | Some(it) => Some(it), |
860 | None => return None, // FIXME: report errors somewhere | 860 | None => return None, // FIXME: report errors somewhere |
861 | } | 861 | } |
@@ -874,7 +874,7 @@ impl UseTreeLowering<'_> { | |||
874 | } else { | 874 | } else { |
875 | let is_glob = tree.star_token().is_some(); | 875 | let is_glob = tree.star_token().is_some(); |
876 | let path = match tree.path() { | 876 | let path = match tree.path() { |
877 | Some(path) => Some(ModPath::from_src(self.db, path, &self.hygiene)?), | 877 | Some(path) => Some(ModPath::from_src(self.db, path, self.hygiene)?), |
878 | None => None, | 878 | None => None, |
879 | }; | 879 | }; |
880 | let alias = tree.rename().map(|a| { | 880 | let alias = tree.rename().map(|a| { |
diff --git a/crates/hir_def/src/nameres/collector.rs b/crates/hir_def/src/nameres/collector.rs index 93f30f23d..6fab58f15 100644 --- a/crates/hir_def/src/nameres/collector.rs +++ b/crates/hir_def/src/nameres/collector.rs | |||
@@ -500,7 +500,7 @@ impl DefCollector<'_> { | |||
500 | let (per_ns, _) = self.def_map.resolve_path( | 500 | let (per_ns, _) = self.def_map.resolve_path( |
501 | self.db, | 501 | self.db, |
502 | self.def_map.root, | 502 | self.def_map.root, |
503 | &path, | 503 | path, |
504 | BuiltinShadowMode::Other, | 504 | BuiltinShadowMode::Other, |
505 | ); | 505 | ); |
506 | 506 | ||
@@ -722,7 +722,7 @@ impl DefCollector<'_> { | |||
722 | if import.is_extern_crate { | 722 | if import.is_extern_crate { |
723 | let res = self.def_map.resolve_name_in_extern_prelude( | 723 | let res = self.def_map.resolve_name_in_extern_prelude( |
724 | self.db, | 724 | self.db, |
725 | &import | 725 | import |
726 | .path | 726 | .path |
727 | .as_ident() | 727 | .as_ident() |
728 | .expect("extern crate should have been desugared to one-element path"), | 728 | .expect("extern crate should have been desugared to one-element path"), |
@@ -1351,7 +1351,7 @@ impl ModCollector<'_, '_> { | |||
1351 | let imports = Import::from_use( | 1351 | let imports = Import::from_use( |
1352 | self.def_collector.db, | 1352 | self.def_collector.db, |
1353 | krate, | 1353 | krate, |
1354 | &self.item_tree, | 1354 | self.item_tree, |
1355 | ItemTreeId::new(self.file_id, import_id), | 1355 | ItemTreeId::new(self.file_id, import_id), |
1356 | ); | 1356 | ); |
1357 | self.def_collector.unresolved_imports.extend(imports.into_iter().map( | 1357 | self.def_collector.unresolved_imports.extend(imports.into_iter().map( |
@@ -1368,7 +1368,7 @@ impl ModCollector<'_, '_> { | |||
1368 | import: Import::from_extern_crate( | 1368 | import: Import::from_extern_crate( |
1369 | self.def_collector.db, | 1369 | self.def_collector.db, |
1370 | krate, | 1370 | krate, |
1371 | &self.item_tree, | 1371 | self.item_tree, |
1372 | ItemTreeId::new(self.file_id, import_id), | 1372 | ItemTreeId::new(self.file_id, import_id), |
1373 | ), | 1373 | ), |
1374 | status: PartialResolvedImport::Unresolved, | 1374 | status: PartialResolvedImport::Unresolved, |
@@ -1889,7 +1889,7 @@ impl ModCollector<'_, '_> { | |||
1889 | self.def_collector.def_map.with_ancestor_maps( | 1889 | self.def_collector.def_map.with_ancestor_maps( |
1890 | self.def_collector.db, | 1890 | self.def_collector.db, |
1891 | self.module_id, | 1891 | self.module_id, |
1892 | &mut |map, module| map[module].scope.get_legacy_macro(&name), | 1892 | &mut |map, module| map[module].scope.get_legacy_macro(name), |
1893 | ) | 1893 | ) |
1894 | }) | 1894 | }) |
1895 | }, | 1895 | }, |
@@ -1993,7 +1993,7 @@ mod tests { | |||
1993 | } | 1993 | } |
1994 | 1994 | ||
1995 | fn do_resolve(code: &str) -> DefMap { | 1995 | fn do_resolve(code: &str) -> DefMap { |
1996 | let (db, _file_id) = TestDB::with_single_file(&code); | 1996 | let (db, _file_id) = TestDB::with_single_file(code); |
1997 | let krate = db.test_crate(); | 1997 | let krate = db.test_crate(); |
1998 | 1998 | ||
1999 | let edition = db.crate_graph()[krate].edition; | 1999 | let edition = db.crate_graph()[krate].edition; |
diff --git a/crates/hir_def/src/nameres/path_resolution.rs b/crates/hir_def/src/nameres/path_resolution.rs index c984148c3..629bc7952 100644 --- a/crates/hir_def/src/nameres/path_resolution.rs +++ b/crates/hir_def/src/nameres/path_resolution.rs | |||
@@ -93,7 +93,7 @@ impl DefMap { | |||
93 | let mut vis = match visibility { | 93 | let mut vis = match visibility { |
94 | RawVisibility::Module(path) => { | 94 | RawVisibility::Module(path) => { |
95 | let (result, remaining) = | 95 | let (result, remaining) = |
96 | self.resolve_path(db, original_module, &path, BuiltinShadowMode::Module); | 96 | self.resolve_path(db, original_module, path, BuiltinShadowMode::Module); |
97 | if remaining.is_some() { | 97 | if remaining.is_some() { |
98 | return None; | 98 | return None; |
99 | } | 99 | } |
@@ -205,7 +205,7 @@ impl DefMap { | |||
205 | None => return ResolvePathResult::empty(ReachedFixedPoint::Yes), | 205 | None => return ResolvePathResult::empty(ReachedFixedPoint::Yes), |
206 | }; | 206 | }; |
207 | log::debug!("resolving {:?} in crate root (+ extern prelude)", segment); | 207 | log::debug!("resolving {:?} in crate root (+ extern prelude)", segment); |
208 | self.resolve_name_in_crate_root_or_extern_prelude(db, &segment) | 208 | self.resolve_name_in_crate_root_or_extern_prelude(db, segment) |
209 | } | 209 | } |
210 | PathKind::Plain => { | 210 | PathKind::Plain => { |
211 | let (_, segment) = match segments.next() { | 211 | let (_, segment) = match segments.next() { |
@@ -222,7 +222,7 @@ impl DefMap { | |||
222 | if path.segments().len() == 1 { shadow } else { BuiltinShadowMode::Module }; | 222 | if path.segments().len() == 1 { shadow } else { BuiltinShadowMode::Module }; |
223 | 223 | ||
224 | log::debug!("resolving {:?} in module", segment); | 224 | log::debug!("resolving {:?} in module", segment); |
225 | self.resolve_name_in_module(db, original_module, &segment, prefer_module) | 225 | self.resolve_name_in_module(db, original_module, segment, prefer_module) |
226 | } | 226 | } |
227 | PathKind::Super(lvl) => { | 227 | PathKind::Super(lvl) => { |
228 | let mut module = original_module; | 228 | let mut module = original_module; |
@@ -269,7 +269,7 @@ impl DefMap { | |||
269 | Some((_, segment)) => segment, | 269 | Some((_, segment)) => segment, |
270 | None => return ResolvePathResult::empty(ReachedFixedPoint::Yes), | 270 | None => return ResolvePathResult::empty(ReachedFixedPoint::Yes), |
271 | }; | 271 | }; |
272 | if let Some(def) = self.extern_prelude.get(&segment) { | 272 | if let Some(def) = self.extern_prelude.get(segment) { |
273 | log::debug!("absolute path {:?} resolved to crate {:?}", path, def); | 273 | log::debug!("absolute path {:?} resolved to crate {:?}", path, def); |
274 | PerNs::types(*def, Visibility::Public) | 274 | PerNs::types(*def, Visibility::Public) |
275 | } else { | 275 | } else { |
@@ -319,13 +319,13 @@ impl DefMap { | |||
319 | }; | 319 | }; |
320 | 320 | ||
321 | // Since it is a qualified path here, it should not contains legacy macros | 321 | // Since it is a qualified path here, it should not contains legacy macros |
322 | module_data.scope.get(&segment) | 322 | module_data.scope.get(segment) |
323 | } | 323 | } |
324 | ModuleDefId::AdtId(AdtId::EnumId(e)) => { | 324 | ModuleDefId::AdtId(AdtId::EnumId(e)) => { |
325 | // enum variant | 325 | // enum variant |
326 | cov_mark::hit!(can_import_enum_variant); | 326 | cov_mark::hit!(can_import_enum_variant); |
327 | let enum_data = db.enum_data(e); | 327 | let enum_data = db.enum_data(e); |
328 | match enum_data.variant(&segment) { | 328 | match enum_data.variant(segment) { |
329 | Some(local_id) => { | 329 | Some(local_id) => { |
330 | let variant = EnumVariantId { parent: e, local_id }; | 330 | let variant = EnumVariantId { parent: e, local_id }; |
331 | match &*enum_data.variants[local_id].variant_data { | 331 | match &*enum_data.variants[local_id].variant_data { |
diff --git a/crates/hir_def/src/path/lower.rs b/crates/hir_def/src/path/lower.rs index f6220aa92..27345d07c 100644 --- a/crates/hir_def/src/path/lower.rs +++ b/crates/hir_def/src/path/lower.rs | |||
@@ -208,13 +208,13 @@ fn lower_generic_args_from_fn_path( | |||
208 | let params = params?; | 208 | let params = params?; |
209 | let mut param_types = Vec::new(); | 209 | let mut param_types = Vec::new(); |
210 | for param in params.params() { | 210 | for param in params.params() { |
211 | let type_ref = TypeRef::from_ast_opt(&ctx, param.ty()); | 211 | let type_ref = TypeRef::from_ast_opt(ctx, param.ty()); |
212 | param_types.push(type_ref); | 212 | param_types.push(type_ref); |
213 | } | 213 | } |
214 | let arg = GenericArg::Type(TypeRef::Tuple(param_types)); | 214 | let arg = GenericArg::Type(TypeRef::Tuple(param_types)); |
215 | args.push(arg); | 215 | args.push(arg); |
216 | if let Some(ret_type) = ret_type { | 216 | if let Some(ret_type) = ret_type { |
217 | let type_ref = TypeRef::from_ast_opt(&ctx, ret_type.ty()); | 217 | let type_ref = TypeRef::from_ast_opt(ctx, ret_type.ty()); |
218 | bindings.push(AssociatedTypeBinding { | 218 | bindings.push(AssociatedTypeBinding { |
219 | name: name![Output], | 219 | name: name![Output], |
220 | type_ref: Some(type_ref), | 220 | type_ref: Some(type_ref), |
diff --git a/crates/hir_def/src/resolver.rs b/crates/hir_def/src/resolver.rs index fb8a6f260..d4681fa3e 100644 --- a/crates/hir_def/src/resolver.rs +++ b/crates/hir_def/src/resolver.rs | |||
@@ -133,7 +133,7 @@ impl Resolver { | |||
133 | Some(it) => it, | 133 | Some(it) => it, |
134 | None => return PerNs::none(), | 134 | None => return PerNs::none(), |
135 | }; | 135 | }; |
136 | let (module_res, segment_index) = item_map.resolve_path(db, module, &path, shadow); | 136 | let (module_res, segment_index) = item_map.resolve_path(db, module, path, shadow); |
137 | if segment_index.is_some() { | 137 | if segment_index.is_some() { |
138 | return PerNs::none(); | 138 | return PerNs::none(); |
139 | } | 139 | } |
@@ -150,7 +150,7 @@ impl Resolver { | |||
150 | path: &ModPath, | 150 | path: &ModPath, |
151 | ) -> Option<TraitId> { | 151 | ) -> Option<TraitId> { |
152 | let (item_map, module) = self.module_scope()?; | 152 | let (item_map, module) = self.module_scope()?; |
153 | let (module_res, ..) = item_map.resolve_path(db, module, &path, BuiltinShadowMode::Module); | 153 | let (module_res, ..) = item_map.resolve_path(db, module, path, BuiltinShadowMode::Module); |
154 | match module_res.take_types()? { | 154 | match module_res.take_types()? { |
155 | ModuleDefId::TraitId(it) => Some(it), | 155 | ModuleDefId::TraitId(it) => Some(it), |
156 | _ => None, | 156 | _ => None, |
@@ -325,7 +325,7 @@ impl Resolver { | |||
325 | path: &ModPath, | 325 | path: &ModPath, |
326 | ) -> Option<MacroDefId> { | 326 | ) -> Option<MacroDefId> { |
327 | let (item_map, module) = self.module_scope()?; | 327 | let (item_map, module) = self.module_scope()?; |
328 | item_map.resolve_path(db, module, &path, BuiltinShadowMode::Other).0.take_macros() | 328 | item_map.resolve_path(db, module, path, BuiltinShadowMode::Other).0.take_macros() |
329 | } | 329 | } |
330 | 330 | ||
331 | pub fn process_all_names(&self, db: &dyn DefDatabase, f: &mut dyn FnMut(Name, ScopeDef)) { | 331 | pub fn process_all_names(&self, db: &dyn DefDatabase, f: &mut dyn FnMut(Name, ScopeDef)) { |
@@ -561,7 +561,7 @@ impl ModuleItemMap { | |||
561 | path: &ModPath, | 561 | path: &ModPath, |
562 | ) -> Option<ResolveValueResult> { | 562 | ) -> Option<ResolveValueResult> { |
563 | let (module_def, idx) = | 563 | let (module_def, idx) = |
564 | self.def_map.resolve_path_locally(db, self.module_id, &path, BuiltinShadowMode::Other); | 564 | self.def_map.resolve_path_locally(db, self.module_id, path, BuiltinShadowMode::Other); |
565 | match idx { | 565 | match idx { |
566 | None => { | 566 | None => { |
567 | let value = to_value_ns(module_def)?; | 567 | let value = to_value_ns(module_def)?; |
@@ -591,7 +591,7 @@ impl ModuleItemMap { | |||
591 | path: &ModPath, | 591 | path: &ModPath, |
592 | ) -> Option<(TypeNs, Option<usize>)> { | 592 | ) -> Option<(TypeNs, Option<usize>)> { |
593 | let (module_def, idx) = | 593 | let (module_def, idx) = |
594 | self.def_map.resolve_path_locally(db, self.module_id, &path, BuiltinShadowMode::Other); | 594 | self.def_map.resolve_path_locally(db, self.module_id, path, BuiltinShadowMode::Other); |
595 | let res = to_type_ns(module_def)?; | 595 | let res = to_type_ns(module_def)?; |
596 | Some((res, idx)) | 596 | Some((res, idx)) |
597 | } | 597 | } |
diff --git a/crates/hir_def/src/type_ref.rs b/crates/hir_def/src/type_ref.rs index cbde6b940..ffe499973 100644 --- a/crates/hir_def/src/type_ref.rs +++ b/crates/hir_def/src/type_ref.rs | |||
@@ -128,7 +128,7 @@ impl TypeRef { | |||
128 | /// Converts an `ast::TypeRef` to a `hir::TypeRef`. | 128 | /// Converts an `ast::TypeRef` to a `hir::TypeRef`. |
129 | pub fn from_ast(ctx: &LowerCtx, node: ast::Type) -> Self { | 129 | pub fn from_ast(ctx: &LowerCtx, node: ast::Type) -> Self { |
130 | match node { | 130 | match node { |
131 | ast::Type::ParenType(inner) => TypeRef::from_ast_opt(&ctx, inner.ty()), | 131 | ast::Type::ParenType(inner) => TypeRef::from_ast_opt(ctx, inner.ty()), |
132 | ast::Type::TupleType(inner) => { | 132 | ast::Type::TupleType(inner) => { |
133 | TypeRef::Tuple(inner.fields().map(|it| TypeRef::from_ast(ctx, it)).collect()) | 133 | TypeRef::Tuple(inner.fields().map(|it| TypeRef::from_ast(ctx, it)).collect()) |
134 | } | 134 | } |
@@ -142,7 +142,7 @@ impl TypeRef { | |||
142 | .unwrap_or(TypeRef::Error) | 142 | .unwrap_or(TypeRef::Error) |
143 | } | 143 | } |
144 | ast::Type::PtrType(inner) => { | 144 | ast::Type::PtrType(inner) => { |
145 | let inner_ty = TypeRef::from_ast_opt(&ctx, inner.ty()); | 145 | let inner_ty = TypeRef::from_ast_opt(ctx, inner.ty()); |
146 | let mutability = Mutability::from_mutable(inner.mut_token().is_some()); | 146 | let mutability = Mutability::from_mutable(inner.mut_token().is_some()); |
147 | TypeRef::RawPtr(Box::new(inner_ty), mutability) | 147 | TypeRef::RawPtr(Box::new(inner_ty), mutability) |
148 | } | 148 | } |
@@ -156,13 +156,13 @@ impl TypeRef { | |||
156 | .map(ConstScalar::usize_from_literal_expr) | 156 | .map(ConstScalar::usize_from_literal_expr) |
157 | .unwrap_or(ConstScalar::Unknown); | 157 | .unwrap_or(ConstScalar::Unknown); |
158 | 158 | ||
159 | TypeRef::Array(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty())), len) | 159 | TypeRef::Array(Box::new(TypeRef::from_ast_opt(ctx, inner.ty())), len) |
160 | } | 160 | } |
161 | ast::Type::SliceType(inner) => { | 161 | ast::Type::SliceType(inner) => { |
162 | TypeRef::Slice(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty()))) | 162 | TypeRef::Slice(Box::new(TypeRef::from_ast_opt(ctx, inner.ty()))) |
163 | } | 163 | } |
164 | ast::Type::RefType(inner) => { | 164 | ast::Type::RefType(inner) => { |
165 | let inner_ty = TypeRef::from_ast_opt(&ctx, inner.ty()); | 165 | let inner_ty = TypeRef::from_ast_opt(ctx, inner.ty()); |
166 | let lifetime = inner.lifetime().map(|lt| LifetimeRef::new(<)); | 166 | let lifetime = inner.lifetime().map(|lt| LifetimeRef::new(<)); |
167 | let mutability = Mutability::from_mutable(inner.mut_token().is_some()); | 167 | let mutability = Mutability::from_mutable(inner.mut_token().is_some()); |
168 | TypeRef::Reference(Box::new(inner_ty), lifetime, mutability) | 168 | TypeRef::Reference(Box::new(inner_ty), lifetime, mutability) |
@@ -180,7 +180,7 @@ impl TypeRef { | |||
180 | is_varargs = param.dotdotdot_token().is_some(); | 180 | is_varargs = param.dotdotdot_token().is_some(); |
181 | } | 181 | } |
182 | 182 | ||
183 | pl.params().map(|p| p.ty()).map(|it| TypeRef::from_ast_opt(&ctx, it)).collect() | 183 | pl.params().map(|p| p.ty()).map(|it| TypeRef::from_ast_opt(ctx, it)).collect() |
184 | } else { | 184 | } else { |
185 | Vec::new() | 185 | Vec::new() |
186 | }; | 186 | }; |
@@ -188,7 +188,7 @@ impl TypeRef { | |||
188 | TypeRef::Fn(params, is_varargs) | 188 | TypeRef::Fn(params, is_varargs) |
189 | } | 189 | } |
190 | // for types are close enough for our purposes to the inner type for now... | 190 | // for types are close enough for our purposes to the inner type for now... |
191 | ast::Type::ForType(inner) => TypeRef::from_ast_opt(&ctx, inner.ty()), | 191 | ast::Type::ForType(inner) => TypeRef::from_ast_opt(ctx, inner.ty()), |
192 | ast::Type::ImplTraitType(inner) => { | 192 | ast::Type::ImplTraitType(inner) => { |
193 | TypeRef::ImplTrait(type_bounds_from_ast(ctx, inner.type_bound_list())) | 193 | TypeRef::ImplTrait(type_bounds_from_ast(ctx, inner.type_bound_list())) |
194 | } | 194 | } |
@@ -229,7 +229,7 @@ impl TypeRef { | |||
229 | TypeRef::RawPtr(type_ref, _) | 229 | TypeRef::RawPtr(type_ref, _) |
230 | | TypeRef::Reference(type_ref, ..) | 230 | | TypeRef::Reference(type_ref, ..) |
231 | | TypeRef::Array(type_ref, _) | 231 | | TypeRef::Array(type_ref, _) |
232 | | TypeRef::Slice(type_ref) => go(&type_ref, f), | 232 | | TypeRef::Slice(type_ref) => go(type_ref, f), |
233 | TypeRef::ImplTrait(bounds) | TypeRef::DynTrait(bounds) => { | 233 | TypeRef::ImplTrait(bounds) | TypeRef::DynTrait(bounds) => { |
234 | for bound in bounds { | 234 | for bound in bounds { |
235 | match bound.as_ref() { | 235 | match bound.as_ref() { |
diff --git a/crates/hir_expand/src/builtin_macro.rs b/crates/hir_expand/src/builtin_macro.rs index 0b310ba2f..51572226e 100644 --- a/crates/hir_expand/src/builtin_macro.rs +++ b/crates/hir_expand/src/builtin_macro.rs | |||
@@ -354,7 +354,7 @@ fn concat_expand( | |||
354 | // concat works with string and char literals, so remove any quotes. | 354 | // concat works with string and char literals, so remove any quotes. |
355 | // It also works with integer, float and boolean literals, so just use the rest | 355 | // It also works with integer, float and boolean literals, so just use the rest |
356 | // as-is. | 356 | // as-is. |
357 | let component = unquote_str(&it).unwrap_or_else(|| it.text.to_string()); | 357 | let component = unquote_str(it).unwrap_or_else(|| it.text.to_string()); |
358 | text.push_str(&component); | 358 | text.push_str(&component); |
359 | } | 359 | } |
360 | // handle boolean literals | 360 | // handle boolean literals |
@@ -417,7 +417,7 @@ fn parse_string(tt: &tt::Subtree) -> Result<String, mbe::ExpandError> { | |||
417 | tt.token_trees | 417 | tt.token_trees |
418 | .get(0) | 418 | .get(0) |
419 | .and_then(|tt| match tt { | 419 | .and_then(|tt| match tt { |
420 | tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => unquote_str(&it), | 420 | tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => unquote_str(it), |
421 | _ => None, | 421 | _ => None, |
422 | }) | 422 | }) |
423 | .ok_or_else(|| mbe::ExpandError::ConversionError) | 423 | .ok_or_else(|| mbe::ExpandError::ConversionError) |
@@ -561,7 +561,7 @@ mod tests { | |||
561 | use syntax::ast::NameOwner; | 561 | use syntax::ast::NameOwner; |
562 | 562 | ||
563 | fn expand_builtin_macro(ra_fixture: &str) -> String { | 563 | fn expand_builtin_macro(ra_fixture: &str) -> String { |
564 | let (db, file_id) = TestDB::with_single_file(&ra_fixture); | 564 | let (db, file_id) = TestDB::with_single_file(ra_fixture); |
565 | let parsed = db.parse(file_id); | 565 | let parsed = db.parse(file_id); |
566 | let mut macro_rules: Vec<_> = | 566 | let mut macro_rules: Vec<_> = |
567 | parsed.syntax_node().descendants().filter_map(ast::MacroRules::cast).collect(); | 567 | parsed.syntax_node().descendants().filter_map(ast::MacroRules::cast).collect(); |
diff --git a/crates/hir_expand/src/input.rs b/crates/hir_expand/src/input.rs index 82dc7f326..bc3ecc593 100644 --- a/crates/hir_expand/src/input.rs +++ b/crates/hir_expand/src/input.rs | |||
@@ -78,7 +78,7 @@ mod tests { | |||
78 | use super::*; | 78 | use super::*; |
79 | 79 | ||
80 | fn test_remove_derives_up_to(attr: usize, ra_fixture: &str, expect: Expect) { | 80 | fn test_remove_derives_up_to(attr: usize, ra_fixture: &str, expect: Expect) { |
81 | let (db, file_id) = TestDB::with_single_file(&ra_fixture); | 81 | let (db, file_id) = TestDB::with_single_file(ra_fixture); |
82 | let parsed = db.parse(file_id); | 82 | let parsed = db.parse(file_id); |
83 | 83 | ||
84 | let mut items: Vec<_> = | 84 | let mut items: Vec<_> = |
diff --git a/crates/hir_expand/src/proc_macro.rs b/crates/hir_expand/src/proc_macro.rs index dbe1b446e..3ad2d3bf7 100644 --- a/crates/hir_expand/src/proc_macro.rs +++ b/crates/hir_expand/src/proc_macro.rs | |||
@@ -51,7 +51,7 @@ impl ProcMacroExpander { | |||
51 | // Proc macros have access to the environment variables of the invoking crate. | 51 | // Proc macros have access to the environment variables of the invoking crate. |
52 | let env = &krate_graph[calling_crate].env; | 52 | let env = &krate_graph[calling_crate].env; |
53 | 53 | ||
54 | proc_macro.expander.expand(&tt, attr_arg, &env).map_err(mbe::ExpandError::from) | 54 | proc_macro.expander.expand(tt, attr_arg, env).map_err(mbe::ExpandError::from) |
55 | } | 55 | } |
56 | None => Err(mbe::ExpandError::UnresolvedProcMacro), | 56 | None => Err(mbe::ExpandError::UnresolvedProcMacro), |
57 | } | 57 | } |
diff --git a/crates/hir_ty/src/diagnostics/match_check/deconstruct_pat.rs b/crates/hir_ty/src/diagnostics/match_check/deconstruct_pat.rs index 222141bd6..088d2791e 100644 --- a/crates/hir_ty/src/diagnostics/match_check/deconstruct_pat.rs +++ b/crates/hir_ty/src/diagnostics/match_check/deconstruct_pat.rs | |||
@@ -528,7 +528,7 @@ impl SplitWildcard { | |||
528 | smallvec![NonExhaustive] | 528 | smallvec![NonExhaustive] |
529 | } | 529 | } |
530 | TyKind::Never => SmallVec::new(), | 530 | TyKind::Never => SmallVec::new(), |
531 | _ if cx.is_uninhabited(&pcx.ty) => SmallVec::new(), | 531 | _ if cx.is_uninhabited(pcx.ty) => SmallVec::new(), |
532 | TyKind::Adt(..) | TyKind::Tuple(..) | TyKind::Ref(..) => smallvec![Single], | 532 | TyKind::Adt(..) | TyKind::Tuple(..) | TyKind::Ref(..) => smallvec![Single], |
533 | // This type is one for which we cannot list constructors, like `str` or `f64`. | 533 | // This type is one for which we cannot list constructors, like `str` or `f64`. |
534 | _ => smallvec![NonExhaustive], | 534 | _ => smallvec![NonExhaustive], |
diff --git a/crates/hir_ty/src/diagnostics/match_check/usefulness.rs b/crates/hir_ty/src/diagnostics/match_check/usefulness.rs index bd76a606c..f5ac71444 100644 --- a/crates/hir_ty/src/diagnostics/match_check/usefulness.rs +++ b/crates/hir_ty/src/diagnostics/match_check/usefulness.rs | |||
@@ -645,7 +645,7 @@ impl SubPatSet { | |||
645 | (Seq { subpats: s_set }, Seq { subpats: mut o_set }) => { | 645 | (Seq { subpats: s_set }, Seq { subpats: mut o_set }) => { |
646 | s_set.retain(|i, s_sub_set| { | 646 | s_set.retain(|i, s_sub_set| { |
647 | // Missing entries count as full. | 647 | // Missing entries count as full. |
648 | let o_sub_set = o_set.remove(&i).unwrap_or(Full); | 648 | let o_sub_set = o_set.remove(i).unwrap_or(Full); |
649 | s_sub_set.union(o_sub_set); | 649 | s_sub_set.union(o_sub_set); |
650 | // We drop full entries. | 650 | // We drop full entries. |
651 | !s_sub_set.is_full() | 651 | !s_sub_set.is_full() |
@@ -656,7 +656,7 @@ impl SubPatSet { | |||
656 | (Alt { subpats: s_set, .. }, Alt { subpats: mut o_set, .. }) => { | 656 | (Alt { subpats: s_set, .. }, Alt { subpats: mut o_set, .. }) => { |
657 | s_set.retain(|i, s_sub_set| { | 657 | s_set.retain(|i, s_sub_set| { |
658 | // Missing entries count as empty. | 658 | // Missing entries count as empty. |
659 | let o_sub_set = o_set.remove(&i).unwrap_or(Empty); | 659 | let o_sub_set = o_set.remove(i).unwrap_or(Empty); |
660 | s_sub_set.union(o_sub_set); | 660 | s_sub_set.union(o_sub_set); |
661 | // We drop empty entries. | 661 | // We drop empty entries. |
662 | !s_sub_set.is_empty() | 662 | !s_sub_set.is_empty() |
@@ -898,7 +898,7 @@ impl Usefulness { | |||
898 | } else { | 898 | } else { |
899 | witnesses | 899 | witnesses |
900 | .into_iter() | 900 | .into_iter() |
901 | .map(|witness| witness.apply_constructor(pcx, &ctor, ctor_wild_subpatterns)) | 901 | .map(|witness| witness.apply_constructor(pcx, ctor, ctor_wild_subpatterns)) |
902 | .collect() | 902 | .collect() |
903 | }; | 903 | }; |
904 | WithWitnesses(new_witnesses) | 904 | WithWitnesses(new_witnesses) |
diff --git a/crates/hir_ty/src/infer.rs b/crates/hir_ty/src/infer.rs index f023c1fb7..9590c2e47 100644 --- a/crates/hir_ty/src/infer.rs +++ b/crates/hir_ty/src/infer.rs | |||
@@ -782,7 +782,7 @@ impl Expectation { | |||
782 | fn adjust_for_branches(&self, table: &mut unify::InferenceTable) -> Expectation { | 782 | fn adjust_for_branches(&self, table: &mut unify::InferenceTable) -> Expectation { |
783 | match self { | 783 | match self { |
784 | Expectation::HasType(ety) => { | 784 | Expectation::HasType(ety) => { |
785 | let ety = table.resolve_ty_shallow(&ety); | 785 | let ety = table.resolve_ty_shallow(ety); |
786 | if !ety.is_ty_var() { | 786 | if !ety.is_ty_var() { |
787 | Expectation::HasType(ety) | 787 | Expectation::HasType(ety) |
788 | } else { | 788 | } else { |
diff --git a/crates/hir_ty/src/infer/coerce.rs b/crates/hir_ty/src/infer/coerce.rs index 03b97e7db..8647d7437 100644 --- a/crates/hir_ty/src/infer/coerce.rs +++ b/crates/hir_ty/src/infer/coerce.rs | |||
@@ -109,7 +109,7 @@ impl<'a> InferenceContext<'a> { | |||
109 | } | 109 | } |
110 | 110 | ||
111 | // Consider coercing the subtype to a DST | 111 | // Consider coercing the subtype to a DST |
112 | if let Ok(ret) = self.try_coerce_unsized(&from_ty, &to_ty) { | 112 | if let Ok(ret) = self.try_coerce_unsized(&from_ty, to_ty) { |
113 | return Ok(ret); | 113 | return Ok(ret); |
114 | } | 114 | } |
115 | 115 | ||
diff --git a/crates/hir_ty/src/infer/expr.rs b/crates/hir_ty/src/infer/expr.rs index e34f194ff..4805c0a00 100644 --- a/crates/hir_ty/src/infer/expr.rs +++ b/crates/hir_ty/src/infer/expr.rs | |||
@@ -54,7 +54,7 @@ impl<'a> InferenceContext<'a> { | |||
54 | /// Infer type of expression with possibly implicit coerce to the expected type. | 54 | /// Infer type of expression with possibly implicit coerce to the expected type. |
55 | /// Return the type after possible coercion. | 55 | /// Return the type after possible coercion. |
56 | pub(super) fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty { | 56 | pub(super) fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty { |
57 | let ty = self.infer_expr_inner(expr, &expected); | 57 | let ty = self.infer_expr_inner(expr, expected); |
58 | let ty = if let Some(target) = expected.only_has_type(&mut self.table) { | 58 | let ty = if let Some(target) = expected.only_has_type(&mut self.table) { |
59 | if !self.coerce(&ty, &target) { | 59 | if !self.coerce(&ty, &target) { |
60 | self.result | 60 | self.result |
@@ -135,11 +135,11 @@ impl<'a> InferenceContext<'a> { | |||
135 | let mut both_arms_diverge = Diverges::Always; | 135 | let mut both_arms_diverge = Diverges::Always; |
136 | 136 | ||
137 | let mut result_ty = self.table.new_type_var(); | 137 | let mut result_ty = self.table.new_type_var(); |
138 | let then_ty = self.infer_expr_inner(*then_branch, &expected); | 138 | let then_ty = self.infer_expr_inner(*then_branch, expected); |
139 | both_arms_diverge &= mem::replace(&mut self.diverges, Diverges::Maybe); | 139 | both_arms_diverge &= mem::replace(&mut self.diverges, Diverges::Maybe); |
140 | result_ty = self.coerce_merge_branch(Some(*then_branch), &result_ty, &then_ty); | 140 | result_ty = self.coerce_merge_branch(Some(*then_branch), &result_ty, &then_ty); |
141 | let else_ty = match else_branch { | 141 | let else_ty = match else_branch { |
142 | Some(else_branch) => self.infer_expr_inner(*else_branch, &expected), | 142 | Some(else_branch) => self.infer_expr_inner(*else_branch, expected), |
143 | None => TyBuilder::unit(), | 143 | None => TyBuilder::unit(), |
144 | }; | 144 | }; |
145 | both_arms_diverge &= self.diverges; | 145 | both_arms_diverge &= self.diverges; |
@@ -330,8 +330,8 @@ impl<'a> InferenceContext<'a> { | |||
330 | .infer_method_call( | 330 | .infer_method_call( |
331 | tgt_expr, | 331 | tgt_expr, |
332 | *receiver, | 332 | *receiver, |
333 | &args, | 333 | args, |
334 | &method_name, | 334 | method_name, |
335 | generic_args.as_deref(), | 335 | generic_args.as_deref(), |
336 | ), | 336 | ), |
337 | Expr::Match { expr, arms } => { | 337 | Expr::Match { expr, arms } => { |
@@ -993,7 +993,7 @@ impl<'a> InferenceContext<'a> { | |||
993 | } | 993 | } |
994 | 994 | ||
995 | fn register_obligations_for_call(&mut self, callable_ty: &Ty) { | 995 | fn register_obligations_for_call(&mut self, callable_ty: &Ty) { |
996 | let callable_ty = self.resolve_ty_shallow(&callable_ty); | 996 | let callable_ty = self.resolve_ty_shallow(callable_ty); |
997 | if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind(&Interner) { | 997 | if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind(&Interner) { |
998 | let def: CallableDefId = from_chalk(self.db, *fn_def); | 998 | let def: CallableDefId = from_chalk(self.db, *fn_def); |
999 | let generic_predicates = self.db.generic_predicates(def.into()); | 999 | let generic_predicates = self.db.generic_predicates(def.into()); |
diff --git a/crates/hir_ty/src/infer/pat.rs b/crates/hir_ty/src/infer/pat.rs index 25dff7e49..8f5db1f40 100644 --- a/crates/hir_ty/src/infer/pat.rs +++ b/crates/hir_ty/src/infer/pat.rs | |||
@@ -192,7 +192,7 @@ impl<'a> InferenceContext<'a> { | |||
192 | Pat::Path(path) => { | 192 | Pat::Path(path) => { |
193 | // FIXME use correct resolver for the surrounding expression | 193 | // FIXME use correct resolver for the surrounding expression |
194 | let resolver = self.resolver.clone(); | 194 | let resolver = self.resolver.clone(); |
195 | self.infer_path(&resolver, &path, pat.into()).unwrap_or(self.err_ty()) | 195 | self.infer_path(&resolver, path, pat.into()).unwrap_or(self.err_ty()) |
196 | } | 196 | } |
197 | Pat::Bind { mode, name: _, subpat } => { | 197 | Pat::Bind { mode, name: _, subpat } => { |
198 | let mode = if mode == &BindingAnnotation::Unannotated { | 198 | let mode = if mode == &BindingAnnotation::Unannotated { |
diff --git a/crates/hir_ty/src/infer/path.rs b/crates/hir_ty/src/infer/path.rs index 14c99eafd..056cdb5d5 100644 --- a/crates/hir_ty/src/infer/path.rs +++ b/crates/hir_ty/src/infer/path.rs | |||
@@ -43,11 +43,11 @@ impl<'a> InferenceContext<'a> { | |||
43 | } | 43 | } |
44 | let ty = self.make_ty(type_ref); | 44 | let ty = self.make_ty(type_ref); |
45 | let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1); | 45 | let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1); |
46 | let ctx = crate::lower::TyLoweringContext::new(self.db, &resolver); | 46 | let ctx = crate::lower::TyLoweringContext::new(self.db, resolver); |
47 | let (ty, _) = ctx.lower_ty_relative_path(ty, None, remaining_segments_for_ty); | 47 | let (ty, _) = ctx.lower_ty_relative_path(ty, None, remaining_segments_for_ty); |
48 | self.resolve_ty_assoc_item( | 48 | self.resolve_ty_assoc_item( |
49 | ty, | 49 | ty, |
50 | &path.segments().last().expect("path had at least one segment").name, | 50 | path.segments().last().expect("path had at least one segment").name, |
51 | id, | 51 | id, |
52 | )? | 52 | )? |
53 | } else { | 53 | } else { |
@@ -154,7 +154,7 @@ impl<'a> InferenceContext<'a> { | |||
154 | let segment = | 154 | let segment = |
155 | remaining_segments.last().expect("there should be at least one segment here"); | 155 | remaining_segments.last().expect("there should be at least one segment here"); |
156 | 156 | ||
157 | self.resolve_ty_assoc_item(ty, &segment.name, id) | 157 | self.resolve_ty_assoc_item(ty, segment.name, id) |
158 | } | 158 | } |
159 | } | 159 | } |
160 | } | 160 | } |
diff --git a/crates/hir_ty/src/interner.rs b/crates/hir_ty/src/interner.rs index 29ffdd9b7..5fef878e8 100644 --- a/crates/hir_ty/src/interner.rs +++ b/crates/hir_ty/src/interner.rs | |||
@@ -331,7 +331,7 @@ impl chalk_ir::interner::Interner for Interner { | |||
331 | &self, | 331 | &self, |
332 | clauses: &'a Self::InternedProgramClauses, | 332 | clauses: &'a Self::InternedProgramClauses, |
333 | ) -> &'a [chalk_ir::ProgramClause<Self>] { | 333 | ) -> &'a [chalk_ir::ProgramClause<Self>] { |
334 | &clauses | 334 | clauses |
335 | } | 335 | } |
336 | 336 | ||
337 | fn intern_quantified_where_clauses<E>( | 337 | fn intern_quantified_where_clauses<E>( |
@@ -373,7 +373,7 @@ impl chalk_ir::interner::Interner for Interner { | |||
373 | &self, | 373 | &self, |
374 | canonical_var_kinds: &'a Self::InternedCanonicalVarKinds, | 374 | canonical_var_kinds: &'a Self::InternedCanonicalVarKinds, |
375 | ) -> &'a [chalk_ir::CanonicalVarKind<Self>] { | 375 | ) -> &'a [chalk_ir::CanonicalVarKind<Self>] { |
376 | &canonical_var_kinds | 376 | canonical_var_kinds |
377 | } | 377 | } |
378 | 378 | ||
379 | fn intern_constraints<E>( | 379 | fn intern_constraints<E>( |
@@ -413,7 +413,7 @@ impl chalk_ir::interner::Interner for Interner { | |||
413 | &self, | 413 | &self, |
414 | variances: &'a Self::InternedVariances, | 414 | variances: &'a Self::InternedVariances, |
415 | ) -> &'a [chalk_ir::Variance] { | 415 | ) -> &'a [chalk_ir::Variance] { |
416 | &variances | 416 | variances |
417 | } | 417 | } |
418 | } | 418 | } |
419 | 419 | ||
diff --git a/crates/hir_ty/src/lower.rs b/crates/hir_ty/src/lower.rs index c83933c73..0b8f21e5d 100644 --- a/crates/hir_ty/src/lower.rs +++ b/crates/hir_ty/src/lower.rs | |||
@@ -238,7 +238,7 @@ impl<'a> TyLoweringContext<'a> { | |||
238 | // away instead of two. | 238 | // away instead of two. |
239 | let actual_opaque_type_data = self | 239 | let actual_opaque_type_data = self |
240 | .with_debruijn(DebruijnIndex::INNERMOST, |ctx| { | 240 | .with_debruijn(DebruijnIndex::INNERMOST, |ctx| { |
241 | ctx.lower_impl_trait(&bounds) | 241 | ctx.lower_impl_trait(bounds) |
242 | }); | 242 | }); |
243 | self.opaque_type_data.borrow_mut()[idx as usize] = actual_opaque_type_data; | 243 | self.opaque_type_data.borrow_mut()[idx as usize] = actual_opaque_type_data; |
244 | 244 | ||
@@ -421,7 +421,7 @@ impl<'a> TyLoweringContext<'a> { | |||
421 | let found = self | 421 | let found = self |
422 | .db | 422 | .db |
423 | .trait_data(trait_ref.hir_trait_id()) | 423 | .trait_data(trait_ref.hir_trait_id()) |
424 | .associated_type_by_name(&segment.name); | 424 | .associated_type_by_name(segment.name); |
425 | match found { | 425 | match found { |
426 | Some(associated_ty) => { | 426 | Some(associated_ty) => { |
427 | // FIXME handle type parameters on the segment | 427 | // FIXME handle type parameters on the segment |
@@ -505,7 +505,7 @@ impl<'a> TyLoweringContext<'a> { | |||
505 | pub(crate) fn lower_path(&self, path: &Path) -> (Ty, Option<TypeNs>) { | 505 | pub(crate) fn lower_path(&self, path: &Path) -> (Ty, Option<TypeNs>) { |
506 | // Resolve the path (in type namespace) | 506 | // Resolve the path (in type namespace) |
507 | if let Some(type_ref) = path.type_anchor() { | 507 | if let Some(type_ref) = path.type_anchor() { |
508 | let (ty, res) = self.lower_ty_ext(&type_ref); | 508 | let (ty, res) = self.lower_ty_ext(type_ref); |
509 | return self.lower_ty_relative_path(ty, res, path.segments()); | 509 | return self.lower_ty_relative_path(ty, res, path.segments()); |
510 | } | 510 | } |
511 | let (resolution, remaining_index) = | 511 | let (resolution, remaining_index) = |
diff --git a/crates/hir_ty/src/method_resolution.rs b/crates/hir_ty/src/method_resolution.rs index a23527f7d..8c00a6369 100644 --- a/crates/hir_ty/src/method_resolution.rs +++ b/crates/hir_ty/src/method_resolution.rs | |||
@@ -372,7 +372,7 @@ pub(crate) fn lookup_method( | |||
372 | db, | 372 | db, |
373 | env, | 373 | env, |
374 | krate, | 374 | krate, |
375 | &traits_in_scope, | 375 | traits_in_scope, |
376 | visible_from_module, | 376 | visible_from_module, |
377 | Some(name), | 377 | Some(name), |
378 | LookupMode::MethodCall, | 378 | LookupMode::MethodCall, |
@@ -484,7 +484,7 @@ fn iterate_method_candidates_impl( | |||
484 | LookupMode::Path => { | 484 | LookupMode::Path => { |
485 | // No autoderef for path lookups | 485 | // No autoderef for path lookups |
486 | iterate_method_candidates_for_self_ty( | 486 | iterate_method_candidates_for_self_ty( |
487 | &ty, | 487 | ty, |
488 | db, | 488 | db, |
489 | env, | 489 | env, |
490 | krate, | 490 | krate, |
@@ -513,7 +513,7 @@ fn iterate_method_candidates_with_autoref( | |||
513 | db, | 513 | db, |
514 | env.clone(), | 514 | env.clone(), |
515 | krate, | 515 | krate, |
516 | &traits_in_scope, | 516 | traits_in_scope, |
517 | visible_from_module, | 517 | visible_from_module, |
518 | name, | 518 | name, |
519 | &mut callback, | 519 | &mut callback, |
@@ -531,7 +531,7 @@ fn iterate_method_candidates_with_autoref( | |||
531 | db, | 531 | db, |
532 | env.clone(), | 532 | env.clone(), |
533 | krate, | 533 | krate, |
534 | &traits_in_scope, | 534 | traits_in_scope, |
535 | visible_from_module, | 535 | visible_from_module, |
536 | name, | 536 | name, |
537 | &mut callback, | 537 | &mut callback, |
@@ -549,7 +549,7 @@ fn iterate_method_candidates_with_autoref( | |||
549 | db, | 549 | db, |
550 | env, | 550 | env, |
551 | krate, | 551 | krate, |
552 | &traits_in_scope, | 552 | traits_in_scope, |
553 | visible_from_module, | 553 | visible_from_module, |
554 | name, | 554 | name, |
555 | &mut callback, | 555 | &mut callback, |
@@ -593,7 +593,7 @@ fn iterate_method_candidates_by_receiver( | |||
593 | db, | 593 | db, |
594 | env.clone(), | 594 | env.clone(), |
595 | krate, | 595 | krate, |
596 | &traits_in_scope, | 596 | traits_in_scope, |
597 | name, | 597 | name, |
598 | Some(receiver_ty), | 598 | Some(receiver_ty), |
599 | &mut callback, | 599 | &mut callback, |
@@ -870,7 +870,7 @@ fn transform_receiver_ty( | |||
870 | .fill_with_unknown() | 870 | .fill_with_unknown() |
871 | .build(), | 871 | .build(), |
872 | AssocContainerId::ImplId(impl_id) => { | 872 | AssocContainerId::ImplId(impl_id) => { |
873 | let impl_substs = inherent_impl_substs(db, env, impl_id, &self_ty)?; | 873 | let impl_substs = inherent_impl_substs(db, env, impl_id, self_ty)?; |
874 | TyBuilder::subst_for_def(db, function_id) | 874 | TyBuilder::subst_for_def(db, function_id) |
875 | .use_parent_substs(&impl_substs) | 875 | .use_parent_substs(&impl_substs) |
876 | .fill_with_unknown() | 876 | .fill_with_unknown() |
diff --git a/crates/ide/src/diagnostics.rs b/crates/ide/src/diagnostics.rs index 4193aabf5..31d5cfedc 100644 --- a/crates/ide/src/diagnostics.rs +++ b/crates/ide/src/diagnostics.rs | |||
@@ -208,7 +208,7 @@ pub(crate) fn diagnostics( | |||
208 | match sema.to_module_def(file_id) { | 208 | match sema.to_module_def(file_id) { |
209 | Some(m) => m.diagnostics(db, &mut sink, internal_diagnostics), | 209 | Some(m) => m.diagnostics(db, &mut sink, internal_diagnostics), |
210 | None => { | 210 | None => { |
211 | sink.push(UnlinkedFile { file_id, node: SyntaxNodePtr::new(&parse.tree().syntax()) }); | 211 | sink.push(UnlinkedFile { file_id, node: SyntaxNodePtr::new(parse.tree().syntax()) }); |
212 | } | 212 | } |
213 | } | 213 | } |
214 | 214 | ||
@@ -222,7 +222,7 @@ fn diagnostic_with_fix<D: DiagnosticWithFixes>( | |||
222 | resolve: &AssistResolveStrategy, | 222 | resolve: &AssistResolveStrategy, |
223 | ) -> Diagnostic { | 223 | ) -> Diagnostic { |
224 | Diagnostic::error(sema.diagnostics_display_range(d.display_source()).range, d.message()) | 224 | Diagnostic::error(sema.diagnostics_display_range(d.display_source()).range, d.message()) |
225 | .with_fixes(d.fixes(&sema, resolve)) | 225 | .with_fixes(d.fixes(sema, resolve)) |
226 | .with_code(Some(d.code())) | 226 | .with_code(Some(d.code())) |
227 | } | 227 | } |
228 | 228 | ||
@@ -232,7 +232,7 @@ fn warning_with_fix<D: DiagnosticWithFixes>( | |||
232 | resolve: &AssistResolveStrategy, | 232 | resolve: &AssistResolveStrategy, |
233 | ) -> Diagnostic { | 233 | ) -> Diagnostic { |
234 | Diagnostic::hint(sema.diagnostics_display_range(d.display_source()).range, d.message()) | 234 | Diagnostic::hint(sema.diagnostics_display_range(d.display_source()).range, d.message()) |
235 | .with_fixes(d.fixes(&sema, resolve)) | 235 | .with_fixes(d.fixes(sema, resolve)) |
236 | .with_code(Some(d.code())) | 236 | .with_code(Some(d.code())) |
237 | } | 237 | } |
238 | 238 | ||
diff --git a/crates/ide/src/diagnostics/fixes/create_field.rs b/crates/ide/src/diagnostics/fixes/create_field.rs index a5f457dce..f6e45967a 100644 --- a/crates/ide/src/diagnostics/fixes/create_field.rs +++ b/crates/ide/src/diagnostics/fixes/create_field.rs | |||
@@ -18,7 +18,7 @@ impl DiagnosticWithFixes for NoSuchField { | |||
18 | ) -> Option<Vec<Assist>> { | 18 | ) -> Option<Vec<Assist>> { |
19 | let root = sema.db.parse_or_expand(self.file)?; | 19 | let root = sema.db.parse_or_expand(self.file)?; |
20 | missing_record_expr_field_fixes( | 20 | missing_record_expr_field_fixes( |
21 | &sema, | 21 | sema, |
22 | self.file.original_file(sema.db), | 22 | self.file.original_file(sema.db), |
23 | &self.field.to_node(&root), | 23 | &self.field.to_node(&root), |
24 | ) | 24 | ) |
diff --git a/crates/ide/src/diagnostics/fixes/fill_missing_fields.rs b/crates/ide/src/diagnostics/fixes/fill_missing_fields.rs index b5dd64c08..c76f6008a 100644 --- a/crates/ide/src/diagnostics/fixes/fill_missing_fields.rs +++ b/crates/ide/src/diagnostics/fixes/fill_missing_fields.rs | |||
@@ -37,7 +37,7 @@ impl DiagnosticWithFixes for MissingFields { | |||
37 | 37 | ||
38 | let edit = { | 38 | let edit = { |
39 | let mut builder = TextEdit::builder(); | 39 | let mut builder = TextEdit::builder(); |
40 | algo::diff(&old_field_list.syntax(), &new_field_list.syntax()) | 40 | algo::diff(old_field_list.syntax(), new_field_list.syntax()) |
41 | .into_text_edit(&mut builder); | 41 | .into_text_edit(&mut builder); |
42 | builder.finish() | 42 | builder.finish() |
43 | }; | 43 | }; |
@@ -45,7 +45,7 @@ impl DiagnosticWithFixes for MissingFields { | |||
45 | "fill_missing_fields", | 45 | "fill_missing_fields", |
46 | "Fill struct fields", | 46 | "Fill struct fields", |
47 | SourceChange::from_text_edit(self.file.original_file(sema.db), edit), | 47 | SourceChange::from_text_edit(self.file.original_file(sema.db), edit), |
48 | sema.original_range(&field_list_parent.syntax()).range, | 48 | sema.original_range(field_list_parent.syntax()).range, |
49 | )]) | 49 | )]) |
50 | } | 50 | } |
51 | } | 51 | } |
diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs index ec3828ab2..774952d96 100644 --- a/crates/ide/src/doc_links.rs +++ b/crates/ide/src/doc_links.rs | |||
@@ -151,18 +151,18 @@ pub(crate) fn resolve_doc_path_for_def( | |||
151 | ) -> Option<hir::ModuleDef> { | 151 | ) -> Option<hir::ModuleDef> { |
152 | match def { | 152 | match def { |
153 | Definition::ModuleDef(def) => match def { | 153 | Definition::ModuleDef(def) => match def { |
154 | hir::ModuleDef::Module(it) => it.resolve_doc_path(db, &link, ns), | 154 | hir::ModuleDef::Module(it) => it.resolve_doc_path(db, link, ns), |
155 | hir::ModuleDef::Function(it) => it.resolve_doc_path(db, &link, ns), | 155 | hir::ModuleDef::Function(it) => it.resolve_doc_path(db, link, ns), |
156 | hir::ModuleDef::Adt(it) => it.resolve_doc_path(db, &link, ns), | 156 | hir::ModuleDef::Adt(it) => it.resolve_doc_path(db, link, ns), |
157 | hir::ModuleDef::Variant(it) => it.resolve_doc_path(db, &link, ns), | 157 | hir::ModuleDef::Variant(it) => it.resolve_doc_path(db, link, ns), |
158 | hir::ModuleDef::Const(it) => it.resolve_doc_path(db, &link, ns), | 158 | hir::ModuleDef::Const(it) => it.resolve_doc_path(db, link, ns), |
159 | hir::ModuleDef::Static(it) => it.resolve_doc_path(db, &link, ns), | 159 | hir::ModuleDef::Static(it) => it.resolve_doc_path(db, link, ns), |
160 | hir::ModuleDef::Trait(it) => it.resolve_doc_path(db, &link, ns), | 160 | hir::ModuleDef::Trait(it) => it.resolve_doc_path(db, link, ns), |
161 | hir::ModuleDef::TypeAlias(it) => it.resolve_doc_path(db, &link, ns), | 161 | hir::ModuleDef::TypeAlias(it) => it.resolve_doc_path(db, link, ns), |
162 | hir::ModuleDef::BuiltinType(_) => None, | 162 | hir::ModuleDef::BuiltinType(_) => None, |
163 | }, | 163 | }, |
164 | Definition::Macro(it) => it.resolve_doc_path(db, &link, ns), | 164 | Definition::Macro(it) => it.resolve_doc_path(db, link, ns), |
165 | Definition::Field(it) => it.resolve_doc_path(db, &link, ns), | 165 | Definition::Field(it) => it.resolve_doc_path(db, link, ns), |
166 | Definition::SelfType(_) | 166 | Definition::SelfType(_) |
167 | | Definition::Local(_) | 167 | | Definition::Local(_) |
168 | | Definition::GenericParam(_) | 168 | | Definition::GenericParam(_) |
diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs index 7032889ac..c7ec87edf 100644 --- a/crates/ide/src/extend_selection.rs +++ b/crates/ide/src/extend_selection.rs | |||
@@ -328,7 +328,7 @@ mod tests { | |||
328 | use super::*; | 328 | use super::*; |
329 | 329 | ||
330 | fn do_check(before: &str, afters: &[&str]) { | 330 | fn do_check(before: &str, afters: &[&str]) { |
331 | let (analysis, position) = fixture::position(&before); | 331 | let (analysis, position) = fixture::position(before); |
332 | let before = analysis.file_text(position.file_id).unwrap(); | 332 | let before = analysis.file_text(position.file_id).unwrap(); |
333 | let range = TextRange::empty(position.offset); | 333 | let range = TextRange::empty(position.offset); |
334 | let mut frange = FileRange { file_id: position.file_id, range }; | 334 | let mut frange = FileRange { file_id: position.file_id, range }; |
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index 2d36c34e9..27a292d83 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs | |||
@@ -57,7 +57,7 @@ pub(crate) fn goto_definition( | |||
57 | }, | 57 | }, |
58 | ast::Name(name) => { | 58 | ast::Name(name) => { |
59 | let def = NameClass::classify(&sema, &name)?.referenced_or_defined(sema.db); | 59 | let def = NameClass::classify(&sema, &name)?.referenced_or_defined(sema.db); |
60 | try_find_trait_item_definition(&sema.db, &def) | 60 | try_find_trait_item_definition(sema.db, &def) |
61 | .or_else(|| def.try_to_nav(sema.db)) | 61 | .or_else(|| def.try_to_nav(sema.db)) |
62 | }, | 62 | }, |
63 | ast::Lifetime(lt) => if let Some(name_class) = NameClass::classify_lifetime(&sema, <) { | 63 | ast::Lifetime(lt) => if let Some(name_class) = NameClass::classify_lifetime(&sema, <) { |
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 1c6d36939..b4b3b45b5 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs | |||
@@ -288,7 +288,7 @@ fn runnable_action( | |||
288 | ) -> Option<HoverAction> { | 288 | ) -> Option<HoverAction> { |
289 | match def { | 289 | match def { |
290 | Definition::ModuleDef(it) => match it { | 290 | Definition::ModuleDef(it) => match it { |
291 | ModuleDef::Module(it) => runnable_mod(&sema, it).map(|it| HoverAction::Runnable(it)), | 291 | ModuleDef::Module(it) => runnable_mod(sema, it).map(|it| HoverAction::Runnable(it)), |
292 | ModuleDef::Function(func) => { | 292 | ModuleDef::Function(func) => { |
293 | let src = func.source(sema.db)?; | 293 | let src = func.source(sema.db)?; |
294 | if src.file_id != file_id.into() { | 294 | if src.file_id != file_id.into() { |
@@ -297,7 +297,7 @@ fn runnable_action( | |||
297 | return None; | 297 | return None; |
298 | } | 298 | } |
299 | 299 | ||
300 | runnable_fn(&sema, func).map(HoverAction::Runnable) | 300 | runnable_fn(sema, func).map(HoverAction::Runnable) |
301 | } | 301 | } |
302 | _ => None, | 302 | _ => None, |
303 | }, | 303 | }, |
@@ -432,7 +432,7 @@ fn hover_for_definition( | |||
432 | return match def { | 432 | return match def { |
433 | Definition::Macro(it) => match &it.source(db)?.value { | 433 | Definition::Macro(it) => match &it.source(db)?.value { |
434 | Either::Left(mac) => { | 434 | Either::Left(mac) => { |
435 | let label = macro_label(&mac); | 435 | let label = macro_label(mac); |
436 | from_def_source_labeled(db, it, Some(label), mod_path) | 436 | from_def_source_labeled(db, it, Some(label), mod_path) |
437 | } | 437 | } |
438 | Either::Right(_) => { | 438 | Either::Right(_) => { |
@@ -516,7 +516,7 @@ fn hover_for_keyword( | |||
516 | if !token.kind().is_keyword() { | 516 | if !token.kind().is_keyword() { |
517 | return None; | 517 | return None; |
518 | } | 518 | } |
519 | let famous_defs = FamousDefs(&sema, sema.scope(&token.parent()?).krate()); | 519 | let famous_defs = FamousDefs(sema, sema.scope(&token.parent()?).krate()); |
520 | // std exposes {}_keyword modules with docstrings on the root to document keywords | 520 | // std exposes {}_keyword modules with docstrings on the root to document keywords |
521 | let keyword_mod = format!("{}_keyword", token.text()); | 521 | let keyword_mod = format!("{}_keyword", token.text()); |
522 | let doc_owner = find_std_module(&famous_defs, &keyword_mod)?; | 522 | let doc_owner = find_std_module(&famous_defs, &keyword_mod)?; |
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 821c61403..9cd33d0e4 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs | |||
@@ -96,7 +96,7 @@ fn get_chaining_hints( | |||
96 | } | 96 | } |
97 | 97 | ||
98 | let krate = sema.scope(expr.syntax()).module().map(|it| it.krate()); | 98 | let krate = sema.scope(expr.syntax()).module().map(|it| it.krate()); |
99 | let famous_defs = FamousDefs(&sema, krate); | 99 | let famous_defs = FamousDefs(sema, krate); |
100 | 100 | ||
101 | let mut tokens = expr | 101 | let mut tokens = expr |
102 | .syntax() | 102 | .syntax() |
@@ -165,7 +165,7 @@ fn get_param_name_hints( | |||
165 | }; | 165 | }; |
166 | Some((param_name, arg)) | 166 | Some((param_name, arg)) |
167 | }) | 167 | }) |
168 | .filter(|(param_name, arg)| !should_hide_param_name_hint(sema, &callable, param_name, &arg)) | 168 | .filter(|(param_name, arg)| !should_hide_param_name_hint(sema, &callable, param_name, arg)) |
169 | .map(|(param_name, arg)| InlayHint { | 169 | .map(|(param_name, arg)| InlayHint { |
170 | range: arg.syntax().text_range(), | 170 | range: arg.syntax().text_range(), |
171 | kind: InlayKind::ParameterHint, | 171 | kind: InlayKind::ParameterHint, |
@@ -187,7 +187,7 @@ fn get_bind_pat_hints( | |||
187 | } | 187 | } |
188 | 188 | ||
189 | let krate = sema.scope(pat.syntax()).module().map(|it| it.krate()); | 189 | let krate = sema.scope(pat.syntax()).module().map(|it| it.krate()); |
190 | let famous_defs = FamousDefs(&sema, krate); | 190 | let famous_defs = FamousDefs(sema, krate); |
191 | 191 | ||
192 | let ty = sema.type_of_pat(&pat.clone().into())?; | 192 | let ty = sema.type_of_pat(&pat.clone().into())?; |
193 | 193 | ||
diff --git a/crates/ide/src/join_lines.rs b/crates/ide/src/join_lines.rs index c67ccd1a9..93d3760bf 100644 --- a/crates/ide/src/join_lines.rs +++ b/crates/ide/src/join_lines.rs | |||
@@ -60,7 +60,7 @@ fn remove_newlines(edit: &mut TextEditBuilder, token: &SyntaxToken, range: TextR | |||
60 | let pos: TextSize = (pos as u32).into(); | 60 | let pos: TextSize = (pos as u32).into(); |
61 | let offset = token.text_range().start() + range.start() + pos; | 61 | let offset = token.text_range().start() + range.start() + pos; |
62 | if !edit.invalidates_offset(offset) { | 62 | if !edit.invalidates_offset(offset) { |
63 | remove_newline(edit, &token, offset); | 63 | remove_newline(edit, token, offset); |
64 | } | 64 | } |
65 | } | 65 | } |
66 | } | 66 | } |
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 97c9e5d2b..0511efae3 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs | |||
@@ -282,20 +282,20 @@ impl Analysis { | |||
282 | file_id: FileId, | 282 | file_id: FileId, |
283 | text_range: Option<TextRange>, | 283 | text_range: Option<TextRange>, |
284 | ) -> Cancellable<String> { | 284 | ) -> Cancellable<String> { |
285 | self.with_db(|db| syntax_tree::syntax_tree(&db, file_id, text_range)) | 285 | self.with_db(|db| syntax_tree::syntax_tree(db, file_id, text_range)) |
286 | } | 286 | } |
287 | 287 | ||
288 | pub fn view_hir(&self, position: FilePosition) -> Cancellable<String> { | 288 | pub fn view_hir(&self, position: FilePosition) -> Cancellable<String> { |
289 | self.with_db(|db| view_hir::view_hir(&db, position)) | 289 | self.with_db(|db| view_hir::view_hir(db, position)) |
290 | } | 290 | } |
291 | 291 | ||
292 | pub fn view_item_tree(&self, file_id: FileId) -> Cancellable<String> { | 292 | pub fn view_item_tree(&self, file_id: FileId) -> Cancellable<String> { |
293 | self.with_db(|db| view_item_tree::view_item_tree(&db, file_id)) | 293 | self.with_db(|db| view_item_tree::view_item_tree(db, file_id)) |
294 | } | 294 | } |
295 | 295 | ||
296 | /// Renders the crate graph to GraphViz "dot" syntax. | 296 | /// Renders the crate graph to GraphViz "dot" syntax. |
297 | pub fn view_crate_graph(&self) -> Cancellable<Result<String, String>> { | 297 | pub fn view_crate_graph(&self) -> Cancellable<Result<String, String>> { |
298 | self.with_db(|db| view_crate_graph::view_crate_graph(&db)) | 298 | self.with_db(|db| view_crate_graph::view_crate_graph(db)) |
299 | } | 299 | } |
300 | 300 | ||
301 | pub fn expand_macro(&self, position: FilePosition) -> Cancellable<Option<ExpandedMacro>> { | 301 | pub fn expand_macro(&self, position: FilePosition) -> Cancellable<Option<ExpandedMacro>> { |
@@ -315,7 +315,7 @@ impl Analysis { | |||
315 | /// up minor stuff like continuing the comment. | 315 | /// up minor stuff like continuing the comment. |
316 | /// The edit will be a snippet (with `$0`). | 316 | /// The edit will be a snippet (with `$0`). |
317 | pub fn on_enter(&self, position: FilePosition) -> Cancellable<Option<TextEdit>> { | 317 | pub fn on_enter(&self, position: FilePosition) -> Cancellable<Option<TextEdit>> { |
318 | self.with_db(|db| typing::on_enter(&db, position)) | 318 | self.with_db(|db| typing::on_enter(db, position)) |
319 | } | 319 | } |
320 | 320 | ||
321 | /// Returns an edit which should be applied after a character was typed. | 321 | /// Returns an edit which should be applied after a character was typed. |
@@ -331,7 +331,7 @@ impl Analysis { | |||
331 | if !typing::TRIGGER_CHARS.contains(char_typed) { | 331 | if !typing::TRIGGER_CHARS.contains(char_typed) { |
332 | return Ok(None); | 332 | return Ok(None); |
333 | } | 333 | } |
334 | self.with_db(|db| typing::on_char_typed(&db, position, char_typed)) | 334 | self.with_db(|db| typing::on_char_typed(db, position, char_typed)) |
335 | } | 335 | } |
336 | 336 | ||
337 | /// Returns a tree representation of symbols in the file. Useful to draw a | 337 | /// Returns a tree representation of symbols in the file. Useful to draw a |
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs index f8b64a669..a0fdead2c 100644 --- a/crates/ide/src/references.rs +++ b/crates/ide/src/references.rs | |||
@@ -62,7 +62,7 @@ pub(crate) fn find_all_refs( | |||
62 | if let Some(name) = get_name_of_item_declaration(&syntax, position) { | 62 | if let Some(name) = get_name_of_item_declaration(&syntax, position) { |
63 | (NameClass::classify(sema, &name)?.referenced_or_defined(sema.db), true) | 63 | (NameClass::classify(sema, &name)?.referenced_or_defined(sema.db), true) |
64 | } else { | 64 | } else { |
65 | (find_def(&sema, &syntax, position)?, false) | 65 | (find_def(sema, &syntax, position)?, false) |
66 | }; | 66 | }; |
67 | 67 | ||
68 | let mut usages = def.usages(sema).set_scope(search_scope).include_self_refs().all(); | 68 | let mut usages = def.usages(sema).set_scope(search_scope).include_self_refs().all(); |
diff --git a/crates/ide/src/references/rename.rs b/crates/ide/src/references/rename.rs index 7dfc5043e..02b171bda 100644 --- a/crates/ide/src/references/rename.rs +++ b/crates/ide/src/references/rename.rs | |||
@@ -64,7 +64,7 @@ pub(crate) fn prepare_rename( | |||
64 | } | 64 | } |
65 | }; | 65 | }; |
66 | let name_like = sema | 66 | let name_like = sema |
67 | .find_node_at_offset_with_descend(&syntax, position.offset) | 67 | .find_node_at_offset_with_descend(syntax, position.offset) |
68 | .ok_or_else(|| format_err!("No references found at position"))?; | 68 | .ok_or_else(|| format_err!("No references found at position"))?; |
69 | let node = match &name_like { | 69 | let node = match &name_like { |
70 | ast::NameLike::Name(it) => it.syntax(), | 70 | ast::NameLike::Name(it) => it.syntax(), |
@@ -104,7 +104,7 @@ pub(crate) fn rename_with_semantics( | |||
104 | 104 | ||
105 | let def = find_definition(sema, syntax, position)?; | 105 | let def = find_definition(sema, syntax, position)?; |
106 | match def { | 106 | match def { |
107 | Definition::ModuleDef(ModuleDef::Module(module)) => rename_mod(&sema, module, new_name), | 107 | Definition::ModuleDef(ModuleDef::Module(module)) => rename_mod(sema, module, new_name), |
108 | Definition::SelfType(_) => bail!("Cannot rename `Self`"), | 108 | Definition::SelfType(_) => bail!("Cannot rename `Self`"), |
109 | Definition::ModuleDef(ModuleDef::BuiltinType(_)) => bail!("Cannot rename builtin type"), | 109 | Definition::ModuleDef(ModuleDef::BuiltinType(_)) => bail!("Cannot rename builtin type"), |
110 | def => rename_reference(sema, def, new_name), | 110 | def => rename_reference(sema, def, new_name), |
@@ -323,7 +323,7 @@ fn rename_reference( | |||
323 | } | 323 | } |
324 | let mut source_change = SourceChange::default(); | 324 | let mut source_change = SourceChange::default(); |
325 | source_change.extend(usages.iter().map(|(&file_id, references)| { | 325 | source_change.extend(usages.iter().map(|(&file_id, references)| { |
326 | (file_id, source_edit_from_references(&references, def, new_name)) | 326 | (file_id, source_edit_from_references(references, def, new_name)) |
327 | })); | 327 | })); |
328 | 328 | ||
329 | let (file_id, edit) = source_edit_from_def(sema, def, new_name)?; | 329 | let (file_id, edit) = source_edit_from_def(sema, def, new_name)?; |
@@ -413,7 +413,7 @@ fn rename_self_to_param( | |||
413 | let mut source_change = SourceChange::default(); | 413 | let mut source_change = SourceChange::default(); |
414 | source_change.insert_source_edit(file_id.original_file(sema.db), edit); | 414 | source_change.insert_source_edit(file_id.original_file(sema.db), edit); |
415 | source_change.extend(usages.iter().map(|(&file_id, references)| { | 415 | source_change.extend(usages.iter().map(|(&file_id, references)| { |
416 | (file_id, source_edit_from_references(&references, def, new_name)) | 416 | (file_id, source_edit_from_references(references, def, new_name)) |
417 | })); | 417 | })); |
418 | Ok(source_change) | 418 | Ok(source_change) |
419 | } | 419 | } |
diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index 552054951..03faabadc 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs | |||
@@ -158,7 +158,7 @@ fn find_related_tests( | |||
158 | search_scope: Option<SearchScope>, | 158 | search_scope: Option<SearchScope>, |
159 | tests: &mut FxHashSet<Runnable>, | 159 | tests: &mut FxHashSet<Runnable>, |
160 | ) { | 160 | ) { |
161 | if let Some(refs) = references::find_all_refs(&sema, position, search_scope) { | 161 | if let Some(refs) = references::find_all_refs(sema, position, search_scope) { |
162 | for (file_id, refs) in refs.references { | 162 | for (file_id, refs) in refs.references { |
163 | let file = sema.parse(file_id); | 163 | let file = sema.parse(file_id); |
164 | let file = file.syntax(); | 164 | let file = file.syntax(); |
@@ -169,10 +169,10 @@ fn find_related_tests( | |||
169 | }); | 169 | }); |
170 | 170 | ||
171 | for fn_def in functions { | 171 | for fn_def in functions { |
172 | if let Some(runnable) = as_test_runnable(&sema, &fn_def) { | 172 | if let Some(runnable) = as_test_runnable(sema, &fn_def) { |
173 | // direct test | 173 | // direct test |
174 | tests.insert(runnable); | 174 | tests.insert(runnable); |
175 | } else if let Some(module) = parent_test_module(&sema, &fn_def) { | 175 | } else if let Some(module) = parent_test_module(sema, &fn_def) { |
176 | // indirect test | 176 | // indirect test |
177 | find_related_tests_in_module(sema, &fn_def, &module, tests); | 177 | find_related_tests_in_module(sema, &fn_def, &module, tests); |
178 | } | 178 | } |
@@ -203,7 +203,7 @@ fn find_related_tests_in_module( | |||
203 | } | 203 | } |
204 | 204 | ||
205 | fn as_test_runnable(sema: &Semantics<RootDatabase>, fn_def: &ast::Fn) -> Option<Runnable> { | 205 | fn as_test_runnable(sema: &Semantics<RootDatabase>, fn_def: &ast::Fn) -> Option<Runnable> { |
206 | if test_related_attribute(&fn_def).is_some() { | 206 | if test_related_attribute(fn_def).is_some() { |
207 | let function = sema.to_def(fn_def)?; | 207 | let function = sema.to_def(fn_def)?; |
208 | runnable_fn(sema, function) | 208 | runnable_fn(sema, function) |
209 | } else { | 209 | } else { |
diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index b03f1c71f..e186b82b7 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs | |||
@@ -323,7 +323,7 @@ fn traverse( | |||
323 | if let Some(token) = element.as_token().cloned().and_then(ast::String::cast) { | 323 | if let Some(token) = element.as_token().cloned().and_then(ast::String::cast) { |
324 | if token.is_raw() { | 324 | if token.is_raw() { |
325 | let expanded = element_to_highlight.as_token().unwrap().clone(); | 325 | let expanded = element_to_highlight.as_token().unwrap().clone(); |
326 | if inject::ra_fixture(hl, &sema, token, expanded).is_some() { | 326 | if inject::ra_fixture(hl, sema, token, expanded).is_some() { |
327 | continue; | 327 | continue; |
328 | } | 328 | } |
329 | } | 329 | } |
@@ -334,7 +334,7 @@ fn traverse( | |||
334 | } | 334 | } |
335 | 335 | ||
336 | if let Some((mut highlight, binding_hash)) = highlight::element( | 336 | if let Some((mut highlight, binding_hash)) = highlight::element( |
337 | &sema, | 337 | sema, |
338 | krate, | 338 | krate, |
339 | &mut bindings_shadow_count, | 339 | &mut bindings_shadow_count, |
340 | syntactic_name_ref_highlighting, | 340 | syntactic_name_ref_highlighting, |
diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs index 84012227d..8c0e553c0 100644 --- a/crates/ide/src/syntax_highlighting/highlight.rs +++ b/crates/ide/src/syntax_highlighting/highlight.rs | |||
@@ -449,12 +449,12 @@ fn highlight_method_call( | |||
449 | krate: Option<hir::Crate>, | 449 | krate: Option<hir::Crate>, |
450 | method_call: &ast::MethodCallExpr, | 450 | method_call: &ast::MethodCallExpr, |
451 | ) -> Option<Highlight> { | 451 | ) -> Option<Highlight> { |
452 | let func = sema.resolve_method_call(&method_call)?; | 452 | let func = sema.resolve_method_call(method_call)?; |
453 | 453 | ||
454 | let mut h = SymbolKind::Function.into(); | 454 | let mut h = SymbolKind::Function.into(); |
455 | h |= HlMod::Associated; | 455 | h |= HlMod::Associated; |
456 | 456 | ||
457 | if func.is_unsafe(sema.db) || sema.is_unsafe_method_call(&method_call) { | 457 | if func.is_unsafe(sema.db) || sema.is_unsafe_method_call(method_call) { |
458 | h |= HlMod::Unsafe; | 458 | h |= HlMod::Unsafe; |
459 | } | 459 | } |
460 | if func.is_async(sema.db) { | 460 | if func.is_async(sema.db) { |
diff --git a/crates/ide/src/syntax_highlighting/html.rs b/crates/ide/src/syntax_highlighting/html.rs index 5327af845..478facfee 100644 --- a/crates/ide/src/syntax_highlighting/html.rs +++ b/crates/ide/src/syntax_highlighting/html.rs | |||
@@ -23,7 +23,7 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo | |||
23 | let hl_ranges = highlight(db, file_id, None, false); | 23 | let hl_ranges = highlight(db, file_id, None, false); |
24 | let text = parse.tree().syntax().to_string(); | 24 | let text = parse.tree().syntax().to_string(); |
25 | let mut buf = String::new(); | 25 | let mut buf = String::new(); |
26 | buf.push_str(&STYLE); | 26 | buf.push_str(STYLE); |
27 | buf.push_str("<pre><code>"); | 27 | buf.push_str("<pre><code>"); |
28 | for r in &hl_ranges { | 28 | for r in &hl_ranges { |
29 | let chunk = html_escape(&text[r.range]); | 29 | let chunk = html_escape(&text[r.range]); |
diff --git a/crates/ide/src/syntax_highlighting/inject.rs b/crates/ide/src/syntax_highlighting/inject.rs index 4269d339e..883252c0e 100644 --- a/crates/ide/src/syntax_highlighting/inject.rs +++ b/crates/ide/src/syntax_highlighting/inject.rs | |||
@@ -23,7 +23,7 @@ pub(super) fn ra_fixture( | |||
23 | literal: ast::String, | 23 | literal: ast::String, |
24 | expanded: SyntaxToken, | 24 | expanded: SyntaxToken, |
25 | ) -> Option<()> { | 25 | ) -> Option<()> { |
26 | let active_parameter = ActiveParameter::at_token(&sema, expanded)?; | 26 | let active_parameter = ActiveParameter::at_token(sema, expanded)?; |
27 | if !active_parameter.ident().map_or(false, |name| name.text().starts_with("ra_fixture")) { | 27 | if !active_parameter.ident().map_or(false, |name| name.text().starts_with("ra_fixture")) { |
28 | return None; | 28 | return None; |
29 | } | 29 | } |
@@ -124,7 +124,7 @@ pub(super) fn doc_comment( | |||
124 | } | 124 | } |
125 | 125 | ||
126 | for attr in attributes.by_key("doc").attrs() { | 126 | for attr in attributes.by_key("doc").attrs() { |
127 | let InFile { file_id, value: src } = attrs_source_map.source_of(&attr); | 127 | let InFile { file_id, value: src } = attrs_source_map.source_of(attr); |
128 | if file_id != node.file_id { | 128 | if file_id != node.file_id { |
129 | continue; | 129 | continue; |
130 | } | 130 | } |
diff --git a/crates/ide/src/typing/on_enter.rs b/crates/ide/src/typing/on_enter.rs index 81c4d95b1..5cba9d11d 100644 --- a/crates/ide/src/typing/on_enter.rs +++ b/crates/ide/src/typing/on_enter.rs | |||
@@ -88,12 +88,12 @@ fn on_enter_in_comment( | |||
88 | if comment.text().ends_with(' ') { | 88 | if comment.text().ends_with(' ') { |
89 | cov_mark::hit!(continues_end_of_line_comment_with_space); | 89 | cov_mark::hit!(continues_end_of_line_comment_with_space); |
90 | remove_trailing_whitespace = true; | 90 | remove_trailing_whitespace = true; |
91 | } else if !followed_by_comment(&comment) { | 91 | } else if !followed_by_comment(comment) { |
92 | return None; | 92 | return None; |
93 | } | 93 | } |
94 | } | 94 | } |
95 | 95 | ||
96 | let indent = node_indent(&file, comment.syntax())?; | 96 | let indent = node_indent(file, comment.syntax())?; |
97 | let inserted = format!("\n{}{} $0", indent, prefix); | 97 | let inserted = format!("\n{}{} $0", indent, prefix); |
98 | let delete = if remove_trailing_whitespace { | 98 | let delete = if remove_trailing_whitespace { |
99 | let trimmed_len = comment.text().trim_end().len() as u32; | 99 | let trimmed_len = comment.text().trim_end().len() as u32; |
@@ -188,7 +188,7 @@ mod tests { | |||
188 | use crate::fixture; | 188 | use crate::fixture; |
189 | 189 | ||
190 | fn apply_on_enter(before: &str) -> Option<String> { | 190 | fn apply_on_enter(before: &str) -> Option<String> { |
191 | let (analysis, position) = fixture::position(&before); | 191 | let (analysis, position) = fixture::position(before); |
192 | let result = analysis.on_enter(position).unwrap()?; | 192 | let result = analysis.on_enter(position).unwrap()?; |
193 | 193 | ||
194 | let mut actual = analysis.file_text(position.file_id).unwrap().to_string(); | 194 | let mut actual = analysis.file_text(position.file_id).unwrap().to_string(); |
diff --git a/crates/ide_assists/src/handlers/convert_comment_block.rs b/crates/ide_assists/src/handlers/convert_comment_block.rs index d202a85f9..749e8685b 100644 --- a/crates/ide_assists/src/handlers/convert_comment_block.rs +++ b/crates/ide_assists/src/handlers/convert_comment_block.rs | |||
@@ -88,7 +88,7 @@ fn line_to_block(acc: &mut Assists, comment: ast::Comment) -> Option<()> { | |||
88 | // We pick a single indentation level for the whole block comment based on the | 88 | // We pick a single indentation level for the whole block comment based on the |
89 | // comment where the assist was invoked. This will be prepended to the | 89 | // comment where the assist was invoked. This will be prepended to the |
90 | // contents of each line comment when they're put into the block comment. | 90 | // contents of each line comment when they're put into the block comment. |
91 | let indentation = IndentLevel::from_token(&comment.syntax()); | 91 | let indentation = IndentLevel::from_token(comment.syntax()); |
92 | 92 | ||
93 | let block_comment_body = | 93 | let block_comment_body = |
94 | comments.into_iter().map(|c| line_comment_text(indentation, c)).join("\n"); | 94 | comments.into_iter().map(|c| line_comment_text(indentation, c)).join("\n"); |
@@ -167,7 +167,7 @@ fn line_comment_text(indentation: IndentLevel, comm: ast::Comment) -> String { | |||
167 | if contents.is_empty() { | 167 | if contents.is_empty() { |
168 | contents.to_owned() | 168 | contents.to_owned() |
169 | } else { | 169 | } else { |
170 | indentation.to_string() + &contents | 170 | indentation.to_string() + contents |
171 | } | 171 | } |
172 | } | 172 | } |
173 | 173 | ||
diff --git a/crates/ide_assists/src/handlers/early_return.rs b/crates/ide_assists/src/handlers/early_return.rs index 5eb6a57f0..ef4a7cb50 100644 --- a/crates/ide_assists/src/handlers/early_return.rs +++ b/crates/ide_assists/src/handlers/early_return.rs | |||
@@ -108,7 +108,7 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext) | |||
108 | "Convert to guarded return", | 108 | "Convert to guarded return", |
109 | target, | 109 | target, |
110 | |edit| { | 110 | |edit| { |
111 | let if_indent_level = IndentLevel::from_node(&if_expr.syntax()); | 111 | let if_indent_level = IndentLevel::from_node(if_expr.syntax()); |
112 | let new_block = match if_let_pat { | 112 | let new_block = match if_let_pat { |
113 | None => { | 113 | None => { |
114 | // If. | 114 | // If. |
@@ -174,7 +174,7 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext) | |||
174 | .take_while(|i| *i != end_of_then), | 174 | .take_while(|i| *i != end_of_then), |
175 | ); | 175 | ); |
176 | replace_children( | 176 | replace_children( |
177 | &parent_block.syntax(), | 177 | parent_block.syntax(), |
178 | RangeInclusive::new( | 178 | RangeInclusive::new( |
179 | if_expr.clone().syntax().clone().into(), | 179 | if_expr.clone().syntax().clone().into(), |
180 | if_expr.syntax().clone().into(), | 180 | if_expr.syntax().clone().into(), |
diff --git a/crates/ide_assists/src/handlers/extract_function.rs b/crates/ide_assists/src/handlers/extract_function.rs index a2dba915c..63d28480a 100644 --- a/crates/ide_assists/src/handlers/extract_function.rs +++ b/crates/ide_assists/src/handlers/extract_function.rs | |||
@@ -76,7 +76,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext) -> Option | |||
76 | let module = ctx.sema.scope(&insert_after).module()?; | 76 | let module = ctx.sema.scope(&insert_after).module()?; |
77 | 77 | ||
78 | let vars_defined_in_body_and_outlive = | 78 | let vars_defined_in_body_and_outlive = |
79 | vars_defined_in_body_and_outlive(ctx, &body, &node.parent().as_ref().unwrap_or(&node)); | 79 | vars_defined_in_body_and_outlive(ctx, &body, node.parent().as_ref().unwrap_or(&node)); |
80 | let ret_ty = body_return_ty(ctx, &body)?; | 80 | let ret_ty = body_return_ty(ctx, &body)?; |
81 | 81 | ||
82 | // FIXME: we compute variables that outlive here just to check `never!` condition | 82 | // FIXME: we compute variables that outlive here just to check `never!` condition |
@@ -808,7 +808,7 @@ trait HasTokenAtOffset { | |||
808 | 808 | ||
809 | impl HasTokenAtOffset for SyntaxNode { | 809 | impl HasTokenAtOffset for SyntaxNode { |
810 | fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken> { | 810 | fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken> { |
811 | SyntaxNode::token_at_offset(&self, offset) | 811 | SyntaxNode::token_at_offset(self, offset) |
812 | } | 812 | } |
813 | } | 813 | } |
814 | 814 | ||
@@ -854,7 +854,7 @@ fn vars_defined_in_body_and_outlive( | |||
854 | body: &FunctionBody, | 854 | body: &FunctionBody, |
855 | parent: &SyntaxNode, | 855 | parent: &SyntaxNode, |
856 | ) -> Vec<OutlivedLocal> { | 856 | ) -> Vec<OutlivedLocal> { |
857 | let vars_defined_in_body = vars_defined_in_body(&body, ctx); | 857 | let vars_defined_in_body = vars_defined_in_body(body, ctx); |
858 | vars_defined_in_body | 858 | vars_defined_in_body |
859 | .into_iter() | 859 | .into_iter() |
860 | .filter_map(|var| var_outlives_body(ctx, body, var, parent)) | 860 | .filter_map(|var| var_outlives_body(ctx, body, var, parent)) |
@@ -868,7 +868,7 @@ fn is_defined_before( | |||
868 | src: &hir::InFile<Either<ast::IdentPat, ast::SelfParam>>, | 868 | src: &hir::InFile<Either<ast::IdentPat, ast::SelfParam>>, |
869 | ) -> bool { | 869 | ) -> bool { |
870 | src.file_id.original_file(ctx.db()) == ctx.frange.file_id | 870 | src.file_id.original_file(ctx.db()) == ctx.frange.file_id |
871 | && !body.contains_node(&either_syntax(&src.value)) | 871 | && !body.contains_node(either_syntax(&src.value)) |
872 | } | 872 | } |
873 | 873 | ||
874 | fn either_syntax(value: &Either<ast::IdentPat, ast::SelfParam>) -> &SyntaxNode { | 874 | fn either_syntax(value: &Either<ast::IdentPat, ast::SelfParam>) -> &SyntaxNode { |
diff --git a/crates/ide_assists/src/handlers/fill_match_arms.rs b/crates/ide_assists/src/handlers/fill_match_arms.rs index 3d2cd739a..c8bc923f5 100644 --- a/crates/ide_assists/src/handlers/fill_match_arms.rs +++ b/crates/ide_assists/src/handlers/fill_match_arms.rs | |||
@@ -212,7 +212,7 @@ impl ExtendedEnum { | |||
212 | } | 212 | } |
213 | 213 | ||
214 | fn resolve_enum_def(sema: &Semantics<RootDatabase>, expr: &ast::Expr) -> Option<ExtendedEnum> { | 214 | fn resolve_enum_def(sema: &Semantics<RootDatabase>, expr: &ast::Expr) -> Option<ExtendedEnum> { |
215 | sema.type_of_expr(&expr)?.autoderef(sema.db).find_map(|ty| match ty.as_adt() { | 215 | sema.type_of_expr(expr)?.autoderef(sema.db).find_map(|ty| match ty.as_adt() { |
216 | Some(Adt::Enum(e)) => Some(ExtendedEnum::Enum(e)), | 216 | Some(Adt::Enum(e)) => Some(ExtendedEnum::Enum(e)), |
217 | _ => { | 217 | _ => { |
218 | if ty.is_bool() { | 218 | if ty.is_bool() { |
@@ -228,7 +228,7 @@ fn resolve_tuple_of_enum_def( | |||
228 | sema: &Semantics<RootDatabase>, | 228 | sema: &Semantics<RootDatabase>, |
229 | expr: &ast::Expr, | 229 | expr: &ast::Expr, |
230 | ) -> Option<Vec<ExtendedEnum>> { | 230 | ) -> Option<Vec<ExtendedEnum>> { |
231 | sema.type_of_expr(&expr)? | 231 | sema.type_of_expr(expr)? |
232 | .tuple_fields(sema.db) | 232 | .tuple_fields(sema.db) |
233 | .iter() | 233 | .iter() |
234 | .map(|ty| { | 234 | .map(|ty| { |
diff --git a/crates/ide_assists/src/handlers/fix_visibility.rs b/crates/ide_assists/src/handlers/fix_visibility.rs index 89f7b2c2c..9b432e92f 100644 --- a/crates/ide_assists/src/handlers/fix_visibility.rs +++ b/crates/ide_assists/src/handlers/fix_visibility.rs | |||
@@ -43,7 +43,7 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext) -> O | |||
43 | _ => return None, | 43 | _ => return None, |
44 | }; | 44 | }; |
45 | 45 | ||
46 | let current_module = ctx.sema.scope(&path.syntax()).module()?; | 46 | let current_module = ctx.sema.scope(path.syntax()).module()?; |
47 | let target_module = def.module(ctx.db())?; | 47 | let target_module = def.module(ctx.db())?; |
48 | 48 | ||
49 | let vis = target_module.visibility_of(ctx.db(), &def)?; | 49 | let vis = target_module.visibility_of(ctx.db(), &def)?; |
diff --git a/crates/ide_assists/src/handlers/generate_enum_is_method.rs b/crates/ide_assists/src/handlers/generate_enum_is_method.rs index a9f71a703..24939f262 100644 --- a/crates/ide_assists/src/handlers/generate_enum_is_method.rs +++ b/crates/ide_assists/src/handlers/generate_enum_is_method.rs | |||
@@ -47,7 +47,7 @@ pub(crate) fn generate_enum_is_method(acc: &mut Assists, ctx: &AssistContext) -> | |||
47 | let fn_name = format!("is_{}", &to_lower_snake_case(&variant_name.text())); | 47 | let fn_name = format!("is_{}", &to_lower_snake_case(&variant_name.text())); |
48 | 48 | ||
49 | // Return early if we've found an existing new fn | 49 | // Return early if we've found an existing new fn |
50 | let impl_def = find_struct_impl(&ctx, &parent_enum, &fn_name)?; | 50 | let impl_def = find_struct_impl(ctx, &parent_enum, &fn_name)?; |
51 | 51 | ||
52 | let target = variant.syntax().text_range(); | 52 | let target = variant.syntax().text_range(); |
53 | acc.add( | 53 | acc.add( |
diff --git a/crates/ide_assists/src/handlers/generate_enum_projection_method.rs b/crates/ide_assists/src/handlers/generate_enum_projection_method.rs index e2f572ba3..986fb2315 100644 --- a/crates/ide_assists/src/handlers/generate_enum_projection_method.rs +++ b/crates/ide_assists/src/handlers/generate_enum_projection_method.rs | |||
@@ -136,7 +136,7 @@ fn generate_enum_projection_method( | |||
136 | format!("{}_{}", props.fn_name_prefix, &to_lower_snake_case(&variant_name.text())); | 136 | format!("{}_{}", props.fn_name_prefix, &to_lower_snake_case(&variant_name.text())); |
137 | 137 | ||
138 | // Return early if we've found an existing new fn | 138 | // Return early if we've found an existing new fn |
139 | let impl_def = find_struct_impl(&ctx, &parent_enum, &fn_name)?; | 139 | let impl_def = find_struct_impl(ctx, &parent_enum, &fn_name)?; |
140 | 140 | ||
141 | let target = variant.syntax().text_range(); | 141 | let target = variant.syntax().text_range(); |
142 | acc.add(AssistId(assist_id, AssistKind::Generate), assist_description, target, |builder| { | 142 | acc.add(AssistId(assist_id, AssistKind::Generate), assist_description, target, |builder| { |
diff --git a/crates/ide_assists/src/handlers/generate_function.rs b/crates/ide_assists/src/handlers/generate_function.rs index bc9fc524b..706c995ac 100644 --- a/crates/ide_assists/src/handlers/generate_function.rs +++ b/crates/ide_assists/src/handlers/generate_function.rs | |||
@@ -59,7 +59,7 @@ pub(crate) fn generate_function(acc: &mut Assists, ctx: &AssistContext) -> Optio | |||
59 | None => None, | 59 | None => None, |
60 | }; | 60 | }; |
61 | 61 | ||
62 | let function_builder = FunctionBuilder::from_call(&ctx, &call, &path, target_module)?; | 62 | let function_builder = FunctionBuilder::from_call(ctx, &call, &path, target_module)?; |
63 | 63 | ||
64 | let target = call.syntax().text_range(); | 64 | let target = call.syntax().text_range(); |
65 | acc.add( | 65 | acc.add( |
@@ -128,12 +128,12 @@ impl FunctionBuilder { | |||
128 | file = in_file; | 128 | file = in_file; |
129 | target | 129 | target |
130 | } | 130 | } |
131 | None => next_space_for_fn_after_call_site(&call)?, | 131 | None => next_space_for_fn_after_call_site(call)?, |
132 | }; | 132 | }; |
133 | let needs_pub = target_module.is_some(); | 133 | let needs_pub = target_module.is_some(); |
134 | let target_module = target_module.or_else(|| ctx.sema.scope(target.syntax()).module())?; | 134 | let target_module = target_module.or_else(|| ctx.sema.scope(target.syntax()).module())?; |
135 | let fn_name = fn_name(&path)?; | 135 | let fn_name = fn_name(path)?; |
136 | let (type_params, params) = fn_args(ctx, target_module, &call)?; | 136 | let (type_params, params) = fn_args(ctx, target_module, call)?; |
137 | 137 | ||
138 | // should_render_snippet intends to express a rough level of confidence about | 138 | // should_render_snippet intends to express a rough level of confidence about |
139 | // the correctness of the return type. | 139 | // the correctness of the return type. |
diff --git a/crates/ide_assists/src/handlers/generate_getter.rs b/crates/ide_assists/src/handlers/generate_getter.rs index 09971226e..cc020c92c 100644 --- a/crates/ide_assists/src/handlers/generate_getter.rs +++ b/crates/ide_assists/src/handlers/generate_getter.rs | |||
@@ -75,7 +75,7 @@ pub(crate) fn generate_getter_impl( | |||
75 | if mutable { | 75 | if mutable { |
76 | format_to!(fn_name, "_mut"); | 76 | format_to!(fn_name, "_mut"); |
77 | } | 77 | } |
78 | let impl_def = find_struct_impl(&ctx, &ast::Adt::Struct(strukt.clone()), fn_name.as_str())?; | 78 | let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), fn_name.as_str())?; |
79 | 79 | ||
80 | let (id, label) = if mutable { | 80 | let (id, label) = if mutable { |
81 | ("generate_getter_mut", "Generate a mut getter method") | 81 | ("generate_getter_mut", "Generate a mut getter method") |
diff --git a/crates/ide_assists/src/handlers/generate_new.rs b/crates/ide_assists/src/handlers/generate_new.rs index 959a1f86c..b65e8387b 100644 --- a/crates/ide_assists/src/handlers/generate_new.rs +++ b/crates/ide_assists/src/handlers/generate_new.rs | |||
@@ -36,7 +36,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext) -> Option<()> | |||
36 | }; | 36 | }; |
37 | 37 | ||
38 | // Return early if we've found an existing new fn | 38 | // Return early if we've found an existing new fn |
39 | let impl_def = find_struct_impl(&ctx, &ast::Adt::Struct(strukt.clone()), "new")?; | 39 | let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), "new")?; |
40 | 40 | ||
41 | let target = strukt.syntax().text_range(); | 41 | let target = strukt.syntax().text_range(); |
42 | acc.add(AssistId("generate_new", AssistKind::Generate), "Generate `new`", target, |builder| { | 42 | acc.add(AssistId("generate_new", AssistKind::Generate), "Generate `new`", target, |builder| { |
diff --git a/crates/ide_assists/src/handlers/generate_setter.rs b/crates/ide_assists/src/handlers/generate_setter.rs index 288cf745d..5bdf6b3f4 100644 --- a/crates/ide_assists/src/handlers/generate_setter.rs +++ b/crates/ide_assists/src/handlers/generate_setter.rs | |||
@@ -39,7 +39,7 @@ pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext) -> Option< | |||
39 | // Return early if we've found an existing fn | 39 | // Return early if we've found an existing fn |
40 | let fn_name = to_lower_snake_case(&field_name.to_string()); | 40 | let fn_name = to_lower_snake_case(&field_name.to_string()); |
41 | let impl_def = find_struct_impl( | 41 | let impl_def = find_struct_impl( |
42 | &ctx, | 42 | ctx, |
43 | &ast::Adt::Struct(strukt.clone()), | 43 | &ast::Adt::Struct(strukt.clone()), |
44 | format!("set_{}", fn_name).as_str(), | 44 | format!("set_{}", fn_name).as_str(), |
45 | )?; | 45 | )?; |
diff --git a/crates/ide_assists/src/handlers/remove_dbg.rs b/crates/ide_assists/src/handlers/remove_dbg.rs index c8226550f..b20fe992d 100644 --- a/crates/ide_assists/src/handlers/remove_dbg.rs +++ b/crates/ide_assists/src/handlers/remove_dbg.rs | |||
@@ -85,7 +85,7 @@ fn whitespace_start(it: SyntaxElement) -> Option<TextSize> { | |||
85 | } | 85 | } |
86 | 86 | ||
87 | fn adjusted_macro_contents(macro_call: &ast::MacroCall) -> Option<String> { | 87 | fn adjusted_macro_contents(macro_call: &ast::MacroCall) -> Option<String> { |
88 | let contents = get_valid_macrocall_contents(¯o_call, "dbg")?; | 88 | let contents = get_valid_macrocall_contents(macro_call, "dbg")?; |
89 | let macro_text_with_brackets = macro_call.token_tree()?.syntax().text(); | 89 | let macro_text_with_brackets = macro_call.token_tree()?.syntax().text(); |
90 | let macro_text_in_brackets = macro_text_with_brackets.slice(TextRange::new( | 90 | let macro_text_in_brackets = macro_text_with_brackets.slice(TextRange::new( |
91 | TextSize::of('('), | 91 | TextSize::of('('), |
diff --git a/crates/ide_assists/src/handlers/reorder_fields.rs b/crates/ide_assists/src/handlers/reorder_fields.rs index 933acead1..f6a926042 100644 --- a/crates/ide_assists/src/handlers/reorder_fields.rs +++ b/crates/ide_assists/src/handlers/reorder_fields.rs | |||
@@ -28,7 +28,7 @@ pub(crate) fn reorder_fields(acc: &mut Assists, ctx: &AssistContext) -> Option<( | |||
28 | .or_else(|| ctx.find_node_at_offset::<ast::RecordPat>().map(Either::Right))?; | 28 | .or_else(|| ctx.find_node_at_offset::<ast::RecordPat>().map(Either::Right))?; |
29 | 29 | ||
30 | let path = record.as_ref().either(|it| it.path(), |it| it.path())?; | 30 | let path = record.as_ref().either(|it| it.path(), |it| it.path())?; |
31 | let ranks = compute_fields_ranks(&path, &ctx)?; | 31 | let ranks = compute_fields_ranks(&path, ctx)?; |
32 | let get_rank_of_field = | 32 | let get_rank_of_field = |
33 | |of: Option<_>| *ranks.get(&of.unwrap_or_default()).unwrap_or(&usize::MAX); | 33 | |of: Option<_>| *ranks.get(&of.unwrap_or_default()).unwrap_or(&usize::MAX); |
34 | 34 | ||
diff --git a/crates/ide_assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/ide_assists/src/handlers/replace_derive_with_manual_impl.rs index 10d9cec31..f9474c9f5 100644 --- a/crates/ide_assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/crates/ide_assists/src/handlers/replace_derive_with_manual_impl.rs | |||
@@ -112,7 +112,7 @@ fn add_assist( | |||
112 | let insert_pos = adt.syntax().text_range().end(); | 112 | let insert_pos = adt.syntax().text_range().end(); |
113 | let impl_def_with_items = | 113 | let impl_def_with_items = |
114 | impl_def_from_trait(&ctx.sema, &annotated_name, trait_, trait_path); | 114 | impl_def_from_trait(&ctx.sema, &annotated_name, trait_, trait_path); |
115 | update_attribute(builder, &input, &trait_name, &attr); | 115 | update_attribute(builder, input, &trait_name, attr); |
116 | let trait_path = format!("{}", trait_path); | 116 | let trait_path = format!("{}", trait_path); |
117 | match (ctx.config.snippet_cap, impl_def_with_items) { | 117 | match (ctx.config.snippet_cap, impl_def_with_items) { |
118 | (None, _) => { | 118 | (None, _) => { |
diff --git a/crates/ide_assists/src/handlers/replace_if_let_with_match.rs b/crates/ide_assists/src/handlers/replace_if_let_with_match.rs index aee880625..9404aa26d 100644 --- a/crates/ide_assists/src/handlers/replace_if_let_with_match.rs +++ b/crates/ide_assists/src/handlers/replace_if_let_with_match.rs | |||
@@ -169,7 +169,7 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext) | |||
169 | } | 169 | } |
170 | 170 | ||
171 | fn is_pat_wildcard_or_sad(sema: &hir::Semantics<RootDatabase>, pat: &ast::Pat) -> bool { | 171 | fn is_pat_wildcard_or_sad(sema: &hir::Semantics<RootDatabase>, pat: &ast::Pat) -> bool { |
172 | sema.type_of_pat(&pat) | 172 | sema.type_of_pat(pat) |
173 | .and_then(|ty| TryEnum::from_ty(sema, &ty)) | 173 | .and_then(|ty| TryEnum::from_ty(sema, &ty)) |
174 | .map(|it| it.sad_pattern().syntax().text() == pat.syntax().text()) | 174 | .map(|it| it.sad_pattern().syntax().text() == pat.syntax().text()) |
175 | .unwrap_or_else(|| matches!(pat, ast::Pat::WildcardPat(_))) | 175 | .unwrap_or_else(|| matches!(pat, ast::Pat::WildcardPat(_))) |
diff --git a/crates/ide_assists/src/handlers/wrap_return_type_in_result.rs b/crates/ide_assists/src/handlers/wrap_return_type_in_result.rs index 2f1da82c7..140e27356 100644 --- a/crates/ide_assists/src/handlers/wrap_return_type_in_result.rs +++ b/crates/ide_assists/src/handlers/wrap_return_type_in_result.rs | |||
@@ -123,7 +123,7 @@ impl TailReturnCollector { | |||
123 | fn handle_exprs(&mut self, expr: &Expr, collect_break: bool) { | 123 | fn handle_exprs(&mut self, expr: &Expr, collect_break: bool) { |
124 | match expr { | 124 | match expr { |
125 | Expr::BlockExpr(block_expr) => { | 125 | Expr::BlockExpr(block_expr) => { |
126 | self.collect_jump_exprs(&block_expr, collect_break); | 126 | self.collect_jump_exprs(block_expr, collect_break); |
127 | } | 127 | } |
128 | Expr::ReturnExpr(ret_expr) => { | 128 | Expr::ReturnExpr(ret_expr) => { |
129 | if let Some(ret_expr_arg) = &ret_expr.expr() { | 129 | if let Some(ret_expr_arg) = &ret_expr.expr() { |
diff --git a/crates/ide_assists/src/tests.rs b/crates/ide_assists/src/tests.rs index 2b7c2d581..bdf9cb71c 100644 --- a/crates/ide_assists/src/tests.rs +++ b/crates/ide_assists/src/tests.rs | |||
@@ -74,7 +74,7 @@ pub(crate) fn check_assist_unresolved(assist: Handler, ra_fixture: &str) { | |||
74 | #[track_caller] | 74 | #[track_caller] |
75 | fn check_doc_test(assist_id: &str, before: &str, after: &str) { | 75 | fn check_doc_test(assist_id: &str, before: &str, after: &str) { |
76 | let after = trim_indent(after); | 76 | let after = trim_indent(after); |
77 | let (db, file_id, selection) = RootDatabase::with_range_or_offset(&before); | 77 | let (db, file_id, selection) = RootDatabase::with_range_or_offset(before); |
78 | let before = db.file_text(file_id).to_string(); | 78 | let before = db.file_text(file_id).to_string(); |
79 | let frange = FileRange { file_id, range: selection.into() }; | 79 | let frange = FileRange { file_id, range: selection.into() }; |
80 | 80 | ||
diff --git a/crates/ide_assists/src/utils.rs b/crates/ide_assists/src/utils.rs index 30128a24a..068df005b 100644 --- a/crates/ide_assists/src/utils.rs +++ b/crates/ide_assists/src/utils.rs | |||
@@ -492,7 +492,7 @@ pub(crate) fn add_method_to_adt( | |||
492 | let start_offset = impl_def | 492 | let start_offset = impl_def |
493 | .and_then(|impl_def| find_impl_block_end(impl_def, &mut buf)) | 493 | .and_then(|impl_def| find_impl_block_end(impl_def, &mut buf)) |
494 | .unwrap_or_else(|| { | 494 | .unwrap_or_else(|| { |
495 | buf = generate_impl_text(&adt, &buf); | 495 | buf = generate_impl_text(adt, &buf); |
496 | adt.syntax().text_range().end() | 496 | adt.syntax().text_range().end() |
497 | }); | 497 | }); |
498 | 498 | ||
diff --git a/crates/ide_assists/src/utils/suggest_name.rs b/crates/ide_assists/src/utils/suggest_name.rs index b3aabeab3..cb8bc8b2f 100644 --- a/crates/ide_assists/src/utils/suggest_name.rs +++ b/crates/ide_assists/src/utils/suggest_name.rs | |||
@@ -187,7 +187,7 @@ fn from_method_call(expr: &ast::Expr) -> Option<String> { | |||
187 | } | 187 | } |
188 | } | 188 | } |
189 | 189 | ||
190 | normalize(&name) | 190 | normalize(name) |
191 | } | 191 | } |
192 | 192 | ||
193 | fn from_param(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> Option<String> { | 193 | fn from_param(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> Option<String> { |
diff --git a/crates/ide_completion/src/completions/dot.rs b/crates/ide_completion/src/completions/dot.rs index 8ad57a069..9552875c1 100644 --- a/crates/ide_completion/src/completions/dot.rs +++ b/crates/ide_completion/src/completions/dot.rs | |||
@@ -13,7 +13,7 @@ pub(crate) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) { | |||
13 | _ => return complete_undotted_self(acc, ctx), | 13 | _ => return complete_undotted_self(acc, ctx), |
14 | }; | 14 | }; |
15 | 15 | ||
16 | let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) { | 16 | let receiver_ty = match ctx.sema.type_of_expr(dot_receiver) { |
17 | Some(ty) => ty, | 17 | Some(ty) => ty, |
18 | _ => return, | 18 | _ => return, |
19 | }; | 19 | }; |
diff --git a/crates/ide_completion/src/completions/postfix.rs b/crates/ide_completion/src/completions/postfix.rs index 86eb21714..9f98b21be 100644 --- a/crates/ide_completion/src/completions/postfix.rs +++ b/crates/ide_completion/src/completions/postfix.rs | |||
@@ -34,7 +34,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { | |||
34 | 34 | ||
35 | let receiver_text = get_receiver_text(dot_receiver, receiver_is_ambiguous_float_literal); | 35 | let receiver_text = get_receiver_text(dot_receiver, receiver_is_ambiguous_float_literal); |
36 | 36 | ||
37 | let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) { | 37 | let receiver_ty = match ctx.sema.type_of_expr(dot_receiver) { |
38 | Some(it) => it, | 38 | Some(it) => it, |
39 | None => return, | 39 | None => return, |
40 | }; | 40 | }; |
@@ -50,7 +50,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { | |||
50 | postfix_snippet( | 50 | postfix_snippet( |
51 | ctx, | 51 | ctx, |
52 | cap, | 52 | cap, |
53 | &dot_receiver, | 53 | dot_receiver, |
54 | "ifl", | 54 | "ifl", |
55 | "if let Ok {}", | 55 | "if let Ok {}", |
56 | &format!("if let Ok($1) = {} {{\n $0\n}}", receiver_text), | 56 | &format!("if let Ok($1) = {} {{\n $0\n}}", receiver_text), |
@@ -60,7 +60,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { | |||
60 | postfix_snippet( | 60 | postfix_snippet( |
61 | ctx, | 61 | ctx, |
62 | cap, | 62 | cap, |
63 | &dot_receiver, | 63 | dot_receiver, |
64 | "while", | 64 | "while", |
65 | "while let Ok {}", | 65 | "while let Ok {}", |
66 | &format!("while let Ok($1) = {} {{\n $0\n}}", receiver_text), | 66 | &format!("while let Ok($1) = {} {{\n $0\n}}", receiver_text), |
@@ -71,7 +71,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { | |||
71 | postfix_snippet( | 71 | postfix_snippet( |
72 | ctx, | 72 | ctx, |
73 | cap, | 73 | cap, |
74 | &dot_receiver, | 74 | dot_receiver, |
75 | "ifl", | 75 | "ifl", |
76 | "if let Some {}", | 76 | "if let Some {}", |
77 | &format!("if let Some($1) = {} {{\n $0\n}}", receiver_text), | 77 | &format!("if let Some($1) = {} {{\n $0\n}}", receiver_text), |
@@ -81,7 +81,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { | |||
81 | postfix_snippet( | 81 | postfix_snippet( |
82 | ctx, | 82 | ctx, |
83 | cap, | 83 | cap, |
84 | &dot_receiver, | 84 | dot_receiver, |
85 | "while", | 85 | "while", |
86 | "while let Some {}", | 86 | "while let Some {}", |
87 | &format!("while let Some($1) = {} {{\n $0\n}}", receiver_text), | 87 | &format!("while let Some($1) = {} {{\n $0\n}}", receiver_text), |
@@ -93,7 +93,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { | |||
93 | postfix_snippet( | 93 | postfix_snippet( |
94 | ctx, | 94 | ctx, |
95 | cap, | 95 | cap, |
96 | &dot_receiver, | 96 | dot_receiver, |
97 | "if", | 97 | "if", |
98 | "if expr {}", | 98 | "if expr {}", |
99 | &format!("if {} {{\n $0\n}}", receiver_text), | 99 | &format!("if {} {{\n $0\n}}", receiver_text), |
@@ -102,22 +102,22 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { | |||
102 | postfix_snippet( | 102 | postfix_snippet( |
103 | ctx, | 103 | ctx, |
104 | cap, | 104 | cap, |
105 | &dot_receiver, | 105 | dot_receiver, |
106 | "while", | 106 | "while", |
107 | "while expr {}", | 107 | "while expr {}", |
108 | &format!("while {} {{\n $0\n}}", receiver_text), | 108 | &format!("while {} {{\n $0\n}}", receiver_text), |
109 | ) | 109 | ) |
110 | .add_to(acc); | 110 | .add_to(acc); |
111 | postfix_snippet(ctx, cap, &dot_receiver, "not", "!expr", &format!("!{}", receiver_text)) | 111 | postfix_snippet(ctx, cap, dot_receiver, "not", "!expr", &format!("!{}", receiver_text)) |
112 | .add_to(acc); | 112 | .add_to(acc); |
113 | } | 113 | } |
114 | 114 | ||
115 | postfix_snippet(ctx, cap, &dot_receiver, "ref", "&expr", &format!("&{}", receiver_text)) | 115 | postfix_snippet(ctx, cap, dot_receiver, "ref", "&expr", &format!("&{}", receiver_text)) |
116 | .add_to(acc); | 116 | .add_to(acc); |
117 | postfix_snippet( | 117 | postfix_snippet( |
118 | ctx, | 118 | ctx, |
119 | cap, | 119 | cap, |
120 | &dot_receiver, | 120 | dot_receiver, |
121 | "refm", | 121 | "refm", |
122 | "&mut expr", | 122 | "&mut expr", |
123 | &format!("&mut {}", receiver_text), | 123 | &format!("&mut {}", receiver_text), |
diff --git a/crates/ide_completion/src/completions/postfix/format_like.rs b/crates/ide_completion/src/completions/postfix/format_like.rs index 9ebe1dcc0..f619f8b52 100644 --- a/crates/ide_completion/src/completions/postfix/format_like.rs +++ b/crates/ide_completion/src/completions/postfix/format_like.rs | |||
@@ -53,7 +53,7 @@ pub(crate) fn add_format_like_completions( | |||
53 | for (label, macro_name) in KINDS { | 53 | for (label, macro_name) in KINDS { |
54 | let snippet = parser.into_suggestion(macro_name); | 54 | let snippet = parser.into_suggestion(macro_name); |
55 | 55 | ||
56 | postfix_snippet(ctx, cap, &dot_receiver, label, macro_name, &snippet).add_to(acc); | 56 | postfix_snippet(ctx, cap, dot_receiver, label, macro_name, &snippet).add_to(acc); |
57 | } | 57 | } |
58 | } | 58 | } |
59 | } | 59 | } |
diff --git a/crates/ide_completion/src/completions/qualified_path.rs b/crates/ide_completion/src/completions/qualified_path.rs index 58d4dd9ee..6083537b7 100644 --- a/crates/ide_completion/src/completions/qualified_path.rs +++ b/crates/ide_completion/src/completions/qualified_path.rs | |||
@@ -15,7 +15,7 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon | |||
15 | None => return, | 15 | None => return, |
16 | }; | 16 | }; |
17 | 17 | ||
18 | let resolution = match ctx.sema.resolve_path(&path) { | 18 | let resolution = match ctx.sema.resolve_path(path) { |
19 | Some(res) => res, | 19 | Some(res) => res, |
20 | None => return, | 20 | None => return, |
21 | }; | 21 | }; |
diff --git a/crates/ide_completion/src/context.rs b/crates/ide_completion/src/context.rs index 2c2a4aa6b..e4abe2742 100644 --- a/crates/ide_completion/src/context.rs +++ b/crates/ide_completion/src/context.rs | |||
@@ -467,7 +467,7 @@ impl<'a> CompletionContext<'a> { | |||
467 | self.expected_type = expected_type; | 467 | self.expected_type = expected_type; |
468 | self.expected_name = expected_name; | 468 | self.expected_name = expected_name; |
469 | 469 | ||
470 | let name_like = match find_node_at_offset(&&file_with_fake_ident, offset) { | 470 | let name_like = match find_node_at_offset(&file_with_fake_ident, offset) { |
471 | Some(it) => it, | 471 | Some(it) => it, |
472 | None => return, | 472 | None => return, |
473 | }; | 473 | }; |
diff --git a/crates/ide_completion/src/patterns.rs b/crates/ide_completion/src/patterns.rs index 81d7a1a1d..c567ac63d 100644 --- a/crates/ide_completion/src/patterns.rs +++ b/crates/ide_completion/src/patterns.rs | |||
@@ -115,12 +115,12 @@ pub(crate) fn determine_location( | |||
115 | ) -> Option<ImmediateLocation> { | 115 | ) -> Option<ImmediateLocation> { |
116 | let node = match name_like { | 116 | let node = match name_like { |
117 | ast::NameLike::NameRef(name_ref) => { | 117 | ast::NameLike::NameRef(name_ref) => { |
118 | if ast::RecordExprField::for_field_name(&name_ref).is_some() { | 118 | if ast::RecordExprField::for_field_name(name_ref).is_some() { |
119 | return sema | 119 | return sema |
120 | .find_node_at_offset_with_macros(original_file, offset) | 120 | .find_node_at_offset_with_macros(original_file, offset) |
121 | .map(ImmediateLocation::RecordExpr); | 121 | .map(ImmediateLocation::RecordExpr); |
122 | } | 122 | } |
123 | if ast::RecordPatField::for_field_name_ref(&name_ref).is_some() { | 123 | if ast::RecordPatField::for_field_name_ref(name_ref).is_some() { |
124 | return sema | 124 | return sema |
125 | .find_node_at_offset_with_macros(original_file, offset) | 125 | .find_node_at_offset_with_macros(original_file, offset) |
126 | .map(ImmediateLocation::RecordPat); | 126 | .map(ImmediateLocation::RecordPat); |
@@ -128,7 +128,7 @@ pub(crate) fn determine_location( | |||
128 | maximize_name_ref(name_ref) | 128 | maximize_name_ref(name_ref) |
129 | } | 129 | } |
130 | ast::NameLike::Name(name) => { | 130 | ast::NameLike::Name(name) => { |
131 | if ast::RecordPatField::for_field_name(&name).is_some() { | 131 | if ast::RecordPatField::for_field_name(name).is_some() { |
132 | return sema | 132 | return sema |
133 | .find_node_at_offset_with_macros(original_file, offset) | 133 | .find_node_at_offset_with_macros(original_file, offset) |
134 | .map(ImmediateLocation::RecordPat); | 134 | .map(ImmediateLocation::RecordPat); |
diff --git a/crates/ide_completion/src/render.rs b/crates/ide_completion/src/render.rs index d3db55c35..a55081631 100644 --- a/crates/ide_completion/src/render.rs +++ b/crates/ide_completion/src/render.rs | |||
@@ -86,7 +86,7 @@ impl<'a> RenderContext<'a> { | |||
86 | } | 86 | } |
87 | 87 | ||
88 | fn db(&self) -> &'a RootDatabase { | 88 | fn db(&self) -> &'a RootDatabase { |
89 | &self.completion.db | 89 | self.completion.db |
90 | } | 90 | } |
91 | 91 | ||
92 | fn source_range(&self) -> TextRange { | 92 | fn source_range(&self) -> TextRange { |
diff --git a/crates/ide_completion/src/render/pattern.rs b/crates/ide_completion/src/render/pattern.rs index b4e80f424..3717a0409 100644 --- a/crates/ide_completion/src/render/pattern.rs +++ b/crates/ide_completion/src/render/pattern.rs | |||
@@ -75,10 +75,10 @@ fn render_pat( | |||
75 | ) -> Option<String> { | 75 | ) -> Option<String> { |
76 | let mut pat = match kind { | 76 | let mut pat = match kind { |
77 | StructKind::Tuple if ctx.snippet_cap().is_some() => { | 77 | StructKind::Tuple if ctx.snippet_cap().is_some() => { |
78 | render_tuple_as_pat(&fields, &name, fields_omitted) | 78 | render_tuple_as_pat(fields, name, fields_omitted) |
79 | } | 79 | } |
80 | StructKind::Record => { | 80 | StructKind::Record => { |
81 | render_record_as_pat(ctx.db(), ctx.snippet_cap(), &fields, &name, fields_omitted) | 81 | render_record_as_pat(ctx.db(), ctx.snippet_cap(), fields, name, fields_omitted) |
82 | } | 82 | } |
83 | _ => return None, | 83 | _ => return None, |
84 | }; | 84 | }; |
@@ -86,7 +86,7 @@ fn render_pat( | |||
86 | if ctx.completion.is_param { | 86 | if ctx.completion.is_param { |
87 | pat.push(':'); | 87 | pat.push(':'); |
88 | pat.push(' '); | 88 | pat.push(' '); |
89 | pat.push_str(&name); | 89 | pat.push_str(name); |
90 | } | 90 | } |
91 | if ctx.snippet_cap().is_some() { | 91 | if ctx.snippet_cap().is_some() { |
92 | pat.push_str("$0"); | 92 | pat.push_str("$0"); |
diff --git a/crates/ide_db/src/call_info.rs b/crates/ide_db/src/call_info.rs index 933bcad55..4795e2565 100644 --- a/crates/ide_db/src/call_info.rs +++ b/crates/ide_db/src/call_info.rs | |||
@@ -162,7 +162,7 @@ impl ActiveParameter { | |||
162 | } | 162 | } |
163 | 163 | ||
164 | pub fn at_token(sema: &Semantics<RootDatabase>, token: SyntaxToken) -> Option<Self> { | 164 | pub fn at_token(sema: &Semantics<RootDatabase>, token: SyntaxToken) -> Option<Self> { |
165 | let (signature, active_parameter) = call_info_impl(&sema, token)?; | 165 | let (signature, active_parameter) = call_info_impl(sema, token)?; |
166 | 166 | ||
167 | let idx = active_parameter?; | 167 | let idx = active_parameter?; |
168 | let mut params = signature.params(sema.db); | 168 | let mut params = signature.params(sema.db); |
diff --git a/crates/ide_db/src/helpers/import_assets.rs b/crates/ide_db/src/helpers/import_assets.rs index ae52dd8bb..9634d872e 100644 --- a/crates/ide_db/src/helpers/import_assets.rs +++ b/crates/ide_db/src/helpers/import_assets.rs | |||
@@ -323,7 +323,7 @@ fn import_for_item( | |||
323 | } | 323 | } |
324 | 324 | ||
325 | let segment_import = | 325 | let segment_import = |
326 | find_import_for_segment(db, original_item_candidate, &unresolved_first_segment)?; | 326 | find_import_for_segment(db, original_item_candidate, unresolved_first_segment)?; |
327 | let trait_item_to_import = item_as_assoc(db, original_item) | 327 | let trait_item_to_import = item_as_assoc(db, original_item) |
328 | .and_then(|assoc| assoc.containing_trait(db)) | 328 | .and_then(|assoc| assoc.containing_trait(db)) |
329 | .map(|trait_| ItemInNs::from(ModuleDef::from(trait_))); | 329 | .map(|trait_| ItemInNs::from(ModuleDef::from(trait_))); |
@@ -383,7 +383,7 @@ fn find_import_for_segment( | |||
383 | original_item | 383 | original_item |
384 | } else { | 384 | } else { |
385 | let matching_module = | 385 | let matching_module = |
386 | module_with_segment_name(db, &unresolved_first_segment, original_item)?; | 386 | module_with_segment_name(db, unresolved_first_segment, original_item)?; |
387 | ItemInNs::from(ModuleDef::from(matching_module)) | 387 | ItemInNs::from(ModuleDef::from(matching_module)) |
388 | }) | 388 | }) |
389 | } | 389 | } |
diff --git a/crates/ide_db/src/helpers/merge_imports.rs b/crates/ide_db/src/helpers/merge_imports.rs index 0dbabb44f..ec29476a4 100644 --- a/crates/ide_db/src/helpers/merge_imports.rs +++ b/crates/ide_db/src/helpers/merge_imports.rs | |||
@@ -124,7 +124,7 @@ fn recursive_merge( | |||
124 | .map(|tree_list| tree_list.use_trees().any(tree_is_self)) | 124 | .map(|tree_list| tree_list.use_trees().any(tree_is_self)) |
125 | .unwrap_or(false) | 125 | .unwrap_or(false) |
126 | }; | 126 | }; |
127 | match (tree_contains_self(&lhs_t), tree_contains_self(&rhs_t)) { | 127 | match (tree_contains_self(lhs_t), tree_contains_self(&rhs_t)) { |
128 | (true, false) => continue, | 128 | (true, false) => continue, |
129 | (false, true) => { | 129 | (false, true) => { |
130 | *lhs_t = rhs_t; | 130 | *lhs_t = rhs_t; |
diff --git a/crates/ide_db/src/search.rs b/crates/ide_db/src/search.rs index 8152630f5..8bfbba4bb 100644 --- a/crates/ide_db/src/search.rs +++ b/crates/ide_db/src/search.rs | |||
@@ -409,7 +409,7 @@ impl<'a> FindUsages<'a> { | |||
409 | if let Some(ast::NameLike::NameRef(name_ref)) = | 409 | if let Some(ast::NameLike::NameRef(name_ref)) = |
410 | sema.find_node_at_offset_with_descend(&tree, offset) | 410 | sema.find_node_at_offset_with_descend(&tree, offset) |
411 | { | 411 | { |
412 | if self.found_self_ty_name_ref(&self_ty, &name_ref, sink) { | 412 | if self.found_self_ty_name_ref(self_ty, &name_ref, sink) { |
413 | return; | 413 | return; |
414 | } | 414 | } |
415 | } | 415 | } |
@@ -424,7 +424,7 @@ impl<'a> FindUsages<'a> { | |||
424 | name_ref: &ast::NameRef, | 424 | name_ref: &ast::NameRef, |
425 | sink: &mut dyn FnMut(FileId, FileReference) -> bool, | 425 | sink: &mut dyn FnMut(FileId, FileReference) -> bool, |
426 | ) -> bool { | 426 | ) -> bool { |
427 | match NameRefClass::classify(self.sema, &name_ref) { | 427 | match NameRefClass::classify(self.sema, name_ref) { |
428 | Some(NameRefClass::Definition(Definition::SelfType(impl_))) | 428 | Some(NameRefClass::Definition(Definition::SelfType(impl_))) |
429 | if impl_.self_ty(self.sema.db) == *self_ty => | 429 | if impl_.self_ty(self.sema.db) == *self_ty => |
430 | { | 430 | { |
@@ -464,13 +464,13 @@ impl<'a> FindUsages<'a> { | |||
464 | name_ref: &ast::NameRef, | 464 | name_ref: &ast::NameRef, |
465 | sink: &mut dyn FnMut(FileId, FileReference) -> bool, | 465 | sink: &mut dyn FnMut(FileId, FileReference) -> bool, |
466 | ) -> bool { | 466 | ) -> bool { |
467 | match NameRefClass::classify(self.sema, &name_ref) { | 467 | match NameRefClass::classify(self.sema, name_ref) { |
468 | Some(NameRefClass::Definition(def)) if def == self.def => { | 468 | Some(NameRefClass::Definition(def)) if def == self.def => { |
469 | let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); | 469 | let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); |
470 | let reference = FileReference { | 470 | let reference = FileReference { |
471 | range, | 471 | range, |
472 | name: ast::NameLike::NameRef(name_ref.clone()), | 472 | name: ast::NameLike::NameRef(name_ref.clone()), |
473 | access: reference_access(&def, &name_ref), | 473 | access: reference_access(&def, name_ref), |
474 | }; | 474 | }; |
475 | sink(file_id, reference) | 475 | sink(file_id, reference) |
476 | } | 476 | } |
@@ -480,7 +480,7 @@ impl<'a> FindUsages<'a> { | |||
480 | let reference = FileReference { | 480 | let reference = FileReference { |
481 | range, | 481 | range, |
482 | name: ast::NameLike::NameRef(name_ref.clone()), | 482 | name: ast::NameLike::NameRef(name_ref.clone()), |
483 | access: reference_access(&def, &name_ref), | 483 | access: reference_access(&def, name_ref), |
484 | }; | 484 | }; |
485 | sink(file_id, reference) | 485 | sink(file_id, reference) |
486 | } else { | 486 | } else { |
@@ -491,10 +491,10 @@ impl<'a> FindUsages<'a> { | |||
491 | let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); | 491 | let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); |
492 | let access = match self.def { | 492 | let access = match self.def { |
493 | Definition::Field(_) if field == self.def => { | 493 | Definition::Field(_) if field == self.def => { |
494 | reference_access(&field, &name_ref) | 494 | reference_access(&field, name_ref) |
495 | } | 495 | } |
496 | Definition::Local(l) if local == l => { | 496 | Definition::Local(l) if local == l => { |
497 | reference_access(&Definition::Local(local), &name_ref) | 497 | reference_access(&Definition::Local(local), name_ref) |
498 | } | 498 | } |
499 | _ => return false, | 499 | _ => return false, |
500 | }; | 500 | }; |
diff --git a/crates/ide_ssr/src/matching.rs b/crates/ide_ssr/src/matching.rs index b3072fb9f..fb92a0ccc 100644 --- a/crates/ide_ssr/src/matching.rs +++ b/crates/ide_ssr/src/matching.rs | |||
@@ -382,7 +382,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> { | |||
382 | code: Option<T>, | 382 | code: Option<T>, |
383 | ) -> Result<(), MatchFailed> { | 383 | ) -> Result<(), MatchFailed> { |
384 | match (pattern, code) { | 384 | match (pattern, code) { |
385 | (Some(p), Some(c)) => self.attempt_match_node(phase, &p.syntax(), &c.syntax()), | 385 | (Some(p), Some(c)) => self.attempt_match_node(phase, p.syntax(), c.syntax()), |
386 | (None, None) => Ok(()), | 386 | (None, None) => Ok(()), |
387 | (Some(p), None) => fail_match!("Pattern `{}` had nothing to match", p.syntax().text()), | 387 | (Some(p), None) => fail_match!("Pattern `{}` had nothing to match", p.syntax().text()), |
388 | (None, Some(c)) => { | 388 | (None, Some(c)) => { |
@@ -478,7 +478,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> { | |||
478 | if Some(first_token.text()) == next_pattern_token.as_deref() { | 478 | if Some(first_token.text()) == next_pattern_token.as_deref() { |
479 | if let Some(SyntaxElement::Node(p)) = pattern.next() { | 479 | if let Some(SyntaxElement::Node(p)) = pattern.next() { |
480 | // We have a subtree that starts with the next token in our pattern. | 480 | // We have a subtree that starts with the next token in our pattern. |
481 | self.attempt_match_token_tree(phase, &p, &n)?; | 481 | self.attempt_match_token_tree(phase, &p, n)?; |
482 | break; | 482 | break; |
483 | } | 483 | } |
484 | } | 484 | } |
@@ -609,7 +609,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> { | |||
609 | expr: &ast::Expr, | 609 | expr: &ast::Expr, |
610 | ) -> Result<usize, MatchFailed> { | 610 | ) -> Result<usize, MatchFailed> { |
611 | use hir::HirDisplay; | 611 | use hir::HirDisplay; |
612 | let code_type = self.sema.type_of_expr(&expr).ok_or_else(|| { | 612 | let code_type = self.sema.type_of_expr(expr).ok_or_else(|| { |
613 | match_error!("Failed to get receiver type for `{}`", expr.syntax().text()) | 613 | match_error!("Failed to get receiver type for `{}`", expr.syntax().text()) |
614 | })?; | 614 | })?; |
615 | // Temporary needed to make the borrow checker happy. | 615 | // Temporary needed to make the borrow checker happy. |
diff --git a/crates/ide_ssr/src/replacing.rs b/crates/ide_ssr/src/replacing.rs index c9ccc1961..9265af7c1 100644 --- a/crates/ide_ssr/src/replacing.rs +++ b/crates/ide_ssr/src/replacing.rs | |||
@@ -84,16 +84,16 @@ impl ReplacementRenderer<'_> { | |||
84 | fn render_node_or_token(&mut self, node_or_token: &SyntaxElement) { | 84 | fn render_node_or_token(&mut self, node_or_token: &SyntaxElement) { |
85 | match node_or_token { | 85 | match node_or_token { |
86 | SyntaxElement::Token(token) => { | 86 | SyntaxElement::Token(token) => { |
87 | self.render_token(&token); | 87 | self.render_token(token); |
88 | } | 88 | } |
89 | SyntaxElement::Node(child_node) => { | 89 | SyntaxElement::Node(child_node) => { |
90 | self.render_node(&child_node); | 90 | self.render_node(child_node); |
91 | } | 91 | } |
92 | } | 92 | } |
93 | } | 93 | } |
94 | 94 | ||
95 | fn render_node(&mut self, node: &SyntaxNode) { | 95 | fn render_node(&mut self, node: &SyntaxNode) { |
96 | if let Some(mod_path) = self.match_info.rendered_template_paths.get(&node) { | 96 | if let Some(mod_path) = self.match_info.rendered_template_paths.get(node) { |
97 | self.out.push_str(&mod_path.to_string()); | 97 | self.out.push_str(&mod_path.to_string()); |
98 | // Emit everything except for the segment's name-ref, since we already effectively | 98 | // Emit everything except for the segment's name-ref, since we already effectively |
99 | // emitted that as part of `mod_path`. | 99 | // emitted that as part of `mod_path`. |
@@ -107,12 +107,12 @@ impl ReplacementRenderer<'_> { | |||
107 | } | 107 | } |
108 | } | 108 | } |
109 | } else { | 109 | } else { |
110 | self.render_node_children(&node); | 110 | self.render_node_children(node); |
111 | } | 111 | } |
112 | } | 112 | } |
113 | 113 | ||
114 | fn render_token(&mut self, token: &SyntaxToken) { | 114 | fn render_token(&mut self, token: &SyntaxToken) { |
115 | if let Some(placeholder) = self.rule.get_placeholder(&token) { | 115 | if let Some(placeholder) = self.rule.get_placeholder(token) { |
116 | if let Some(placeholder_value) = | 116 | if let Some(placeholder_value) = |
117 | self.match_info.placeholder_values.get(&placeholder.ident) | 117 | self.match_info.placeholder_values.get(&placeholder.ident) |
118 | { | 118 | { |
diff --git a/crates/ide_ssr/src/resolving.rs b/crates/ide_ssr/src/resolving.rs index 541da4122..a66a7a4a8 100644 --- a/crates/ide_ssr/src/resolving.rs +++ b/crates/ide_ssr/src/resolving.rs | |||
@@ -211,7 +211,7 @@ impl<'db> ResolutionScope<'db> { | |||
211 | // First try resolving the whole path. This will work for things like | 211 | // First try resolving the whole path. This will work for things like |
212 | // `std::collections::HashMap`, but will fail for things like | 212 | // `std::collections::HashMap`, but will fail for things like |
213 | // `std::collections::HashMap::new`. | 213 | // `std::collections::HashMap::new`. |
214 | if let Some(resolution) = self.scope.speculative_resolve(&path) { | 214 | if let Some(resolution) = self.scope.speculative_resolve(path) { |
215 | return Some(resolution); | 215 | return Some(resolution); |
216 | } | 216 | } |
217 | // Resolution failed, try resolving the qualifier (e.g. `std::collections::HashMap` and if | 217 | // Resolution failed, try resolving the qualifier (e.g. `std::collections::HashMap` and if |
diff --git a/crates/ide_ssr/src/search.rs b/crates/ide_ssr/src/search.rs index 28cef742c..f2056919e 100644 --- a/crates/ide_ssr/src/search.rs +++ b/crates/ide_ssr/src/search.rs | |||
@@ -173,7 +173,7 @@ impl<'db> MatchFinder<'db> { | |||
173 | if !is_search_permitted(code) { | 173 | if !is_search_permitted(code) { |
174 | return; | 174 | return; |
175 | } | 175 | } |
176 | self.try_add_match(rule, &code, restrict_range, matches_out); | 176 | self.try_add_match(rule, code, restrict_range, matches_out); |
177 | // If we've got a macro call, we already tried matching it pre-expansion, which is the only | 177 | // If we've got a macro call, we already tried matching it pre-expansion, which is the only |
178 | // way to match the whole macro, now try expanding it and matching the expansion. | 178 | // way to match the whole macro, now try expanding it and matching the expansion. |
179 | if let Some(macro_call) = ast::MacroCall::cast(code.clone()) { | 179 | if let Some(macro_call) = ast::MacroCall::cast(code.clone()) { |
diff --git a/crates/ide_ssr/src/tests.rs b/crates/ide_ssr/src/tests.rs index 1d8565dc0..5dd0d600f 100644 --- a/crates/ide_ssr/src/tests.rs +++ b/crates/ide_ssr/src/tests.rs | |||
@@ -129,7 +129,7 @@ fn assert_matches(pattern: &str, code: &str, expected: &[&str]) { | |||
129 | let matched_strings: Vec<String> = | 129 | let matched_strings: Vec<String> = |
130 | match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect(); | 130 | match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect(); |
131 | if matched_strings != expected && !expected.is_empty() { | 131 | if matched_strings != expected && !expected.is_empty() { |
132 | print_match_debug_info(&match_finder, position.file_id, &expected[0]); | 132 | print_match_debug_info(&match_finder, position.file_id, expected[0]); |
133 | } | 133 | } |
134 | assert_eq!(matched_strings, expected); | 134 | assert_eq!(matched_strings, expected); |
135 | } | 135 | } |
diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index c982eb58f..c0e1705c0 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs | |||
@@ -121,7 +121,7 @@ impl Match { | |||
121 | 121 | ||
122 | /// Matching errors are added to the `Match`. | 122 | /// Matching errors are added to the `Match`. |
123 | pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree) -> Match { | 123 | pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree) -> Match { |
124 | let mut res = match_loop(pattern, &input); | 124 | let mut res = match_loop(pattern, input); |
125 | res.bound_count = count(res.bindings.bindings()); | 125 | res.bound_count = count(res.bindings.bindings()); |
126 | return res; | 126 | return res; |
127 | 127 | ||
@@ -202,7 +202,7 @@ impl BindingsBuilder { | |||
202 | } | 202 | } |
203 | 203 | ||
204 | fn push_nested(&mut self, parent: &mut BindingsIdx, child: &BindingsIdx) { | 204 | fn push_nested(&mut self, parent: &mut BindingsIdx, child: &BindingsIdx) { |
205 | let BindingsIdx(idx, nidx) = self.copy(&child); | 205 | let BindingsIdx(idx, nidx) = self.copy(child); |
206 | self.nodes[parent.0].push(LinkNode::Node(Rc::new(BindingKind::Nested(idx, nidx)))); | 206 | self.nodes[parent.0].push(LinkNode::Node(Rc::new(BindingKind::Nested(idx, nidx)))); |
207 | } | 207 | } |
208 | 208 | ||
@@ -221,7 +221,7 @@ impl BindingsBuilder { | |||
221 | 221 | ||
222 | fn build_inner(&self, bindings: &mut Bindings, link_nodes: &[LinkNode<Rc<BindingKind>>]) { | 222 | fn build_inner(&self, bindings: &mut Bindings, link_nodes: &[LinkNode<Rc<BindingKind>>]) { |
223 | let mut nodes = Vec::new(); | 223 | let mut nodes = Vec::new(); |
224 | self.collect_nodes(&link_nodes, &mut nodes); | 224 | self.collect_nodes(link_nodes, &mut nodes); |
225 | 225 | ||
226 | for cmd in nodes { | 226 | for cmd in nodes { |
227 | match &**cmd { | 227 | match &**cmd { |
@@ -282,7 +282,7 @@ impl BindingsBuilder { | |||
282 | 282 | ||
283 | nested_refs.into_iter().for_each(|iter| { | 283 | nested_refs.into_iter().for_each(|iter| { |
284 | let mut child_bindings = Bindings::default(); | 284 | let mut child_bindings = Bindings::default(); |
285 | self.build_inner(&mut child_bindings, &iter); | 285 | self.build_inner(&mut child_bindings, iter); |
286 | nested.push(child_bindings) | 286 | nested.push(child_bindings) |
287 | }) | 287 | }) |
288 | } | 288 | } |
@@ -417,7 +417,7 @@ fn match_loop_inner<'t>( | |||
417 | let sep_len = item.sep.as_ref().map_or(0, Separator::tt_count); | 417 | let sep_len = item.sep.as_ref().map_or(0, Separator::tt_count); |
418 | if item.sep.is_some() && sep_idx != sep_len { | 418 | if item.sep.is_some() && sep_idx != sep_len { |
419 | let sep = item.sep.as_ref().unwrap(); | 419 | let sep = item.sep.as_ref().unwrap(); |
420 | if src.clone().expect_separator(&sep, sep_idx) { | 420 | if src.clone().expect_separator(sep, sep_idx) { |
421 | item.dot.next(); | 421 | item.dot.next(); |
422 | item.sep_parsed = Some(sep_idx + 1); | 422 | item.sep_parsed = Some(sep_idx + 1); |
423 | try_push!(next_items, item); | 423 | try_push!(next_items, item); |
@@ -487,7 +487,7 @@ fn match_loop_inner<'t>( | |||
487 | item.meta_result = Some((fork, match_res)); | 487 | item.meta_result = Some((fork, match_res)); |
488 | try_push!(bb_items, item); | 488 | try_push!(bb_items, item); |
489 | } else { | 489 | } else { |
490 | bindings_builder.push_optional(&mut item.bindings, &name); | 490 | bindings_builder.push_optional(&mut item.bindings, name); |
491 | item.dot.next(); | 491 | item.dot.next(); |
492 | cur_items.push(item); | 492 | cur_items.push(item); |
493 | } | 493 | } |
@@ -495,7 +495,7 @@ fn match_loop_inner<'t>( | |||
495 | Some(err) => { | 495 | Some(err) => { |
496 | res.add_err(err); | 496 | res.add_err(err); |
497 | if let Some(fragment) = match_res.value { | 497 | if let Some(fragment) = match_res.value { |
498 | bindings_builder.push_fragment(&mut item.bindings, &name, fragment); | 498 | bindings_builder.push_fragment(&mut item.bindings, name, fragment); |
499 | } | 499 | } |
500 | item.is_error = true; | 500 | item.is_error = true; |
501 | error_items.push(item); | 501 | error_items.push(item); |
@@ -504,7 +504,7 @@ fn match_loop_inner<'t>( | |||
504 | } | 504 | } |
505 | } | 505 | } |
506 | OpDelimited::Op(Op::Leaf(leaf)) => { | 506 | OpDelimited::Op(Op::Leaf(leaf)) => { |
507 | if let Err(err) = match_leaf(&leaf, &mut src.clone()) { | 507 | if let Err(err) = match_leaf(leaf, &mut src.clone()) { |
508 | res.add_err(err); | 508 | res.add_err(err); |
509 | item.is_error = true; | 509 | item.is_error = true; |
510 | } else { | 510 | } else { |
@@ -640,10 +640,10 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match { | |||
640 | let (iter, match_res) = item.meta_result.take().unwrap(); | 640 | let (iter, match_res) = item.meta_result.take().unwrap(); |
641 | match match_res.value { | 641 | match match_res.value { |
642 | Some(fragment) => { | 642 | Some(fragment) => { |
643 | bindings_builder.push_fragment(&mut item.bindings, &name, fragment); | 643 | bindings_builder.push_fragment(&mut item.bindings, name, fragment); |
644 | } | 644 | } |
645 | None if match_res.err.is_none() => { | 645 | None if match_res.err.is_none() => { |
646 | bindings_builder.push_optional(&mut item.bindings, &name); | 646 | bindings_builder.push_optional(&mut item.bindings, name); |
647 | } | 647 | } |
648 | _ => {} | 648 | _ => {} |
649 | } | 649 | } |
diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index dd7fa97d7..9a9c1a467 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs | |||
@@ -55,7 +55,7 @@ pub(super) fn transcribe( | |||
55 | template: &MetaTemplate, | 55 | template: &MetaTemplate, |
56 | bindings: &Bindings, | 56 | bindings: &Bindings, |
57 | ) -> ExpandResult<tt::Subtree> { | 57 | ) -> ExpandResult<tt::Subtree> { |
58 | let mut ctx = ExpandCtx { bindings: &bindings, nesting: Vec::new() }; | 58 | let mut ctx = ExpandCtx { bindings: bindings, nesting: Vec::new() }; |
59 | let mut arena: Vec<tt::TokenTree> = Vec::new(); | 59 | let mut arena: Vec<tt::TokenTree> = Vec::new(); |
60 | expand_subtree(&mut ctx, template, None, &mut arena) | 60 | expand_subtree(&mut ctx, template, None, &mut arena) |
61 | } | 61 | } |
@@ -91,12 +91,12 @@ fn expand_subtree( | |||
91 | Op::Leaf(tt) => arena.push(tt.clone().into()), | 91 | Op::Leaf(tt) => arena.push(tt.clone().into()), |
92 | Op::Subtree { tokens, delimiter } => { | 92 | Op::Subtree { tokens, delimiter } => { |
93 | let ExpandResult { value: tt, err: e } = | 93 | let ExpandResult { value: tt, err: e } = |
94 | expand_subtree(ctx, &tokens, *delimiter, arena); | 94 | expand_subtree(ctx, tokens, *delimiter, arena); |
95 | err = err.or(e); | 95 | err = err.or(e); |
96 | arena.push(tt.into()); | 96 | arena.push(tt.into()); |
97 | } | 97 | } |
98 | Op::Var { name, id, .. } => { | 98 | Op::Var { name, id, .. } => { |
99 | let ExpandResult { value: fragment, err: e } = expand_var(ctx, &name, *id); | 99 | let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id); |
100 | err = err.or(e); | 100 | err = err.or(e); |
101 | push_fragment(arena, fragment); | 101 | push_fragment(arena, fragment); |
102 | } | 102 | } |
@@ -141,7 +141,7 @@ fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr, id: tt::TokenId) -> ExpandResult | |||
141 | .into(); | 141 | .into(); |
142 | ExpandResult::ok(Fragment::Tokens(tt)) | 142 | ExpandResult::ok(Fragment::Tokens(tt)) |
143 | } else { | 143 | } else { |
144 | ctx.bindings.get(&v, &mut ctx.nesting).map_or_else( | 144 | ctx.bindings.get(v, &mut ctx.nesting).map_or_else( |
145 | |e| ExpandResult { value: Fragment::Tokens(tt::TokenTree::empty()), err: Some(e) }, | 145 | |e| ExpandResult { value: Fragment::Tokens(tt::TokenTree::empty()), err: Some(e) }, |
146 | |b| ExpandResult::ok(b.clone()), | 146 | |b| ExpandResult::ok(b.clone()), |
147 | ) | 147 | ) |
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 380a50744..8c8528aaf 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs | |||
@@ -280,8 +280,8 @@ impl Rule { | |||
280 | .expect_subtree() | 280 | .expect_subtree() |
281 | .map_err(|()| ParseError::Expected("expected subtree".to_string()))?; | 281 | .map_err(|()| ParseError::Expected("expected subtree".to_string()))?; |
282 | 282 | ||
283 | let lhs = MetaTemplate(parse_pattern(&lhs)?); | 283 | let lhs = MetaTemplate(parse_pattern(lhs)?); |
284 | let rhs = MetaTemplate(parse_template(&rhs)?); | 284 | let rhs = MetaTemplate(parse_template(rhs)?); |
285 | 285 | ||
286 | Ok(crate::Rule { lhs, rhs }) | 286 | Ok(crate::Rule { lhs, rhs }) |
287 | } | 287 | } |
@@ -290,7 +290,7 @@ impl Rule { | |||
290 | fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> { | 290 | fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> { |
291 | for op in pattern.iter() { | 291 | for op in pattern.iter() { |
292 | match op { | 292 | match op { |
293 | Op::Subtree { tokens, .. } => validate(&tokens)?, | 293 | Op::Subtree { tokens, .. } => validate(tokens)?, |
294 | Op::Repeat { tokens: subtree, separator, .. } => { | 294 | Op::Repeat { tokens: subtree, separator, .. } => { |
295 | // Checks that no repetition which could match an empty token | 295 | // Checks that no repetition which could match an empty token |
296 | // https://github.com/rust-lang/rust/blob/a58b1ed44f5e06976de2bdc4d7dc81c36a96934f/src/librustc_expand/mbe/macro_rules.rs#L558 | 296 | // https://github.com/rust-lang/rust/blob/a58b1ed44f5e06976de2bdc4d7dc81c36a96934f/src/librustc_expand/mbe/macro_rules.rs#L558 |
diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs index 04c0d3e75..deed884d2 100644 --- a/crates/mbe/src/parser.rs +++ b/crates/mbe/src/parser.rs | |||
@@ -42,7 +42,7 @@ impl<'a> OpDelimitedIter<'a> { | |||
42 | } | 42 | } |
43 | 43 | ||
44 | pub(crate) fn reset(&self) -> Self { | 44 | pub(crate) fn reset(&self) -> Self { |
45 | Self { inner: &self.inner, idx: 0, delimited: self.delimited } | 45 | Self { inner: self.inner, idx: 0, delimited: self.delimited } |
46 | } | 46 | } |
47 | } | 47 | } |
48 | 48 | ||
@@ -126,11 +126,11 @@ impl Separator { | |||
126 | } | 126 | } |
127 | 127 | ||
128 | pub(crate) fn parse_template(template: &tt::Subtree) -> Result<Vec<Op>, ParseError> { | 128 | pub(crate) fn parse_template(template: &tt::Subtree) -> Result<Vec<Op>, ParseError> { |
129 | parse_inner(&template, Mode::Template).into_iter().collect() | 129 | parse_inner(template, Mode::Template).into_iter().collect() |
130 | } | 130 | } |
131 | 131 | ||
132 | pub(crate) fn parse_pattern(pattern: &tt::Subtree) -> Result<Vec<Op>, ParseError> { | 132 | pub(crate) fn parse_pattern(pattern: &tt::Subtree) -> Result<Vec<Op>, ParseError> { |
133 | parse_inner(&pattern, Mode::Pattern).into_iter().collect() | 133 | parse_inner(pattern, Mode::Pattern).into_iter().collect() |
134 | } | 134 | } |
135 | 135 | ||
136 | #[derive(Clone, Copy)] | 136 | #[derive(Clone, Copy)] |
@@ -140,7 +140,7 @@ enum Mode { | |||
140 | } | 140 | } |
141 | 141 | ||
142 | fn parse_inner(tt: &tt::Subtree, mode: Mode) -> Vec<Result<Op, ParseError>> { | 142 | fn parse_inner(tt: &tt::Subtree, mode: Mode) -> Vec<Result<Op, ParseError>> { |
143 | let mut src = TtIter::new(&tt); | 143 | let mut src = TtIter::new(tt); |
144 | std::iter::from_fn(move || { | 144 | std::iter::from_fn(move || { |
145 | let first = src.next()?; | 145 | let first = src.next()?; |
146 | Some(next_op(first, &mut src, mode)) | 146 | Some(next_op(first, &mut src, mode)) |
@@ -171,7 +171,7 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul | |||
171 | match second { | 171 | match second { |
172 | tt::TokenTree::Subtree(subtree) => { | 172 | tt::TokenTree::Subtree(subtree) => { |
173 | let (separator, kind) = parse_repeat(src)?; | 173 | let (separator, kind) = parse_repeat(src)?; |
174 | let tokens = parse_inner(&subtree, mode) | 174 | let tokens = parse_inner(subtree, mode) |
175 | .into_iter() | 175 | .into_iter() |
176 | .collect::<Result<Vec<Op>, ParseError>>()?; | 176 | .collect::<Result<Vec<Op>, ParseError>>()?; |
177 | Op::Repeat { tokens: MetaTemplate(tokens), separator, kind } | 177 | Op::Repeat { tokens: MetaTemplate(tokens), separator, kind } |
@@ -191,7 +191,7 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul | |||
191 | Op::Var { name, kind, id } | 191 | Op::Var { name, kind, id } |
192 | } | 192 | } |
193 | tt::Leaf::Literal(lit) => { | 193 | tt::Leaf::Literal(lit) => { |
194 | if is_boolean_literal(&lit) { | 194 | if is_boolean_literal(lit) { |
195 | let name = lit.text.clone(); | 195 | let name = lit.text.clone(); |
196 | let kind = eat_fragment_kind(src, mode)?; | 196 | let kind = eat_fragment_kind(src, mode)?; |
197 | let id = lit.id; | 197 | let id = lit.id; |
@@ -206,7 +206,7 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul | |||
206 | tt::TokenTree::Leaf(tt) => Op::Leaf(tt.clone()), | 206 | tt::TokenTree::Leaf(tt) => Op::Leaf(tt.clone()), |
207 | tt::TokenTree::Subtree(subtree) => { | 207 | tt::TokenTree::Subtree(subtree) => { |
208 | let tokens = | 208 | let tokens = |
209 | parse_inner(&subtree, mode).into_iter().collect::<Result<Vec<Op>, ParseError>>()?; | 209 | parse_inner(subtree, mode).into_iter().collect::<Result<Vec<Op>, ParseError>>()?; |
210 | Op::Subtree { tokens: MetaTemplate(tokens), delimiter: subtree.delimiter } | 210 | Op::Subtree { tokens: MetaTemplate(tokens), delimiter: subtree.delimiter } |
211 | } | 211 | } |
212 | }; | 212 | }; |
diff --git a/crates/mbe/src/subtree_source.rs b/crates/mbe/src/subtree_source.rs index bde370fdb..ee80807ad 100644 --- a/crates/mbe/src/subtree_source.rs +++ b/crates/mbe/src/subtree_source.rs | |||
@@ -22,7 +22,7 @@ impl<'a> SubtreeTokenSource { | |||
22 | #[cfg(test)] | 22 | #[cfg(test)] |
23 | pub(crate) fn text(&self) -> SmolStr { | 23 | pub(crate) fn text(&self) -> SmolStr { |
24 | match self.cached.get(self.curr.1) { | 24 | match self.cached.get(self.curr.1) { |
25 | Some(ref tt) => tt.text.clone(), | 25 | Some(tt) => tt.text.clone(), |
26 | _ => SmolStr::new(""), | 26 | _ => SmolStr::new(""), |
27 | } | 27 | } |
28 | } | 28 | } |
@@ -59,7 +59,7 @@ impl<'a> SubtreeTokenSource { | |||
59 | 59 | ||
60 | current = match tt { | 60 | current = match tt { |
61 | Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => { | 61 | Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => { |
62 | cached.push(convert_leaf(&leaf)); | 62 | cached.push(convert_leaf(leaf)); |
63 | cursor.bump() | 63 | cursor.bump() |
64 | } | 64 | } |
65 | Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => { | 65 | Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => { |
@@ -114,7 +114,7 @@ impl<'a> TokenSource for SubtreeTokenSource { | |||
114 | /// Is the current token a specified keyword? | 114 | /// Is the current token a specified keyword? |
115 | fn is_keyword(&self, kw: &str) -> bool { | 115 | fn is_keyword(&self, kw: &str) -> bool { |
116 | match self.cached.get(self.curr.1) { | 116 | match self.cached.get(self.curr.1) { |
117 | Some(ref t) => t.text == *kw, | 117 | Some(t) => t.text == *kw, |
118 | _ => false, | 118 | _ => false, |
119 | } | 119 | } |
120 | } | 120 | } |
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 978c75747..cdc22425d 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs | |||
@@ -633,7 +633,7 @@ impl<'a> TreeSink for TtTreeSink<'a> { | |||
633 | } | 633 | } |
634 | } | 634 | } |
635 | }; | 635 | }; |
636 | self.buf += &text; | 636 | self.buf += text; |
637 | self.text_pos += TextSize::of(text); | 637 | self.text_pos += TextSize::of(text); |
638 | } | 638 | } |
639 | 639 | ||
diff --git a/crates/mbe/src/tests/expand.rs b/crates/mbe/src/tests/expand.rs index 75c88687c..c788e427e 100644 --- a/crates/mbe/src/tests/expand.rs +++ b/crates/mbe/src/tests/expand.rs | |||
@@ -490,7 +490,7 @@ [email protected] | |||
490 | 490 | ||
491 | fn to_subtree(tt: &tt::TokenTree) -> &tt::Subtree { | 491 | fn to_subtree(tt: &tt::TokenTree) -> &tt::Subtree { |
492 | if let tt::TokenTree::Subtree(subtree) = tt { | 492 | if let tt::TokenTree::Subtree(subtree) = tt { |
493 | return &subtree; | 493 | return subtree; |
494 | } | 494 | } |
495 | unreachable!("It is not a subtree"); | 495 | unreachable!("It is not a subtree"); |
496 | } | 496 | } |
diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index bd54f2442..5a4eca7bf 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs | |||
@@ -115,7 +115,7 @@ impl<'a> TtIter<'a> { | |||
115 | } | 115 | } |
116 | } | 116 | } |
117 | 117 | ||
118 | let buffer = TokenBuffer::from_tokens(&self.inner.as_slice()); | 118 | let buffer = TokenBuffer::from_tokens(self.inner.as_slice()); |
119 | let mut src = SubtreeTokenSource::new(&buffer); | 119 | let mut src = SubtreeTokenSource::new(&buffer); |
120 | let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false }; | 120 | let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false }; |
121 | 121 | ||
diff --git a/crates/proc_macro_api/src/msg.rs b/crates/proc_macro_api/src/msg.rs index 14eed4289..899895578 100644 --- a/crates/proc_macro_api/src/msg.rs +++ b/crates/proc_macro_api/src/msg.rs | |||
@@ -59,7 +59,7 @@ pub trait Message: Serialize + DeserializeOwned { | |||
59 | Ok(match read_json(inp, buf)? { | 59 | Ok(match read_json(inp, buf)? { |
60 | None => None, | 60 | None => None, |
61 | Some(text) => { | 61 | Some(text) => { |
62 | let mut deserializer = serde_json::Deserializer::from_str(&text); | 62 | let mut deserializer = serde_json::Deserializer::from_str(text); |
63 | // Note that some proc-macro generate very deep syntax tree | 63 | // Note that some proc-macro generate very deep syntax tree |
64 | // We have to disable the current limit of serde here | 64 | // We have to disable the current limit of serde here |
65 | deserializer.disable_recursion_limit(); | 65 | deserializer.disable_recursion_limit(); |
diff --git a/crates/project_model/src/build_data.rs b/crates/project_model/src/build_data.rs index 33a4f8168..53cb4bae7 100644 --- a/crates/project_model/src/build_data.rs +++ b/crates/project_model/src/build_data.rs | |||
@@ -184,7 +184,7 @@ impl WorkspaceBuildData { | |||
184 | 184 | ||
185 | // Copy-pasted from existing cargo_metadata. It seems like we | 185 | // Copy-pasted from existing cargo_metadata. It seems like we |
186 | // should be using sered_stacker here? | 186 | // should be using sered_stacker here? |
187 | let mut deserializer = serde_json::Deserializer::from_str(&line); | 187 | let mut deserializer = serde_json::Deserializer::from_str(line); |
188 | deserializer.disable_recursion_limit(); | 188 | deserializer.disable_recursion_limit(); |
189 | let message = Message::deserialize(&mut deserializer) | 189 | let message = Message::deserialize(&mut deserializer) |
190 | .unwrap_or(Message::TextLine(line.to_string())); | 190 | .unwrap_or(Message::TextLine(line.to_string())); |
diff --git a/crates/project_model/src/cargo_workspace.rs b/crates/project_model/src/cargo_workspace.rs index b8ad08364..ac079f83e 100644 --- a/crates/project_model/src/cargo_workspace.rs +++ b/crates/project_model/src/cargo_workspace.rs | |||
@@ -278,7 +278,7 @@ impl CargoWorkspace { | |||
278 | id, edition, name, manifest_path, version, metadata, .. | 278 | id, edition, name, manifest_path, version, metadata, .. |
279 | } = meta_pkg; | 279 | } = meta_pkg; |
280 | let meta = from_value::<PackageMetadata>(metadata.clone()).unwrap_or_default(); | 280 | let meta = from_value::<PackageMetadata>(metadata.clone()).unwrap_or_default(); |
281 | let is_member = ws_members.contains(&id); | 281 | let is_member = ws_members.contains(id); |
282 | let edition = edition | 282 | let edition = edition |
283 | .parse::<Edition>() | 283 | .parse::<Edition>() |
284 | .with_context(|| format!("Failed to parse edition {}", edition))?; | 284 | .with_context(|| format!("Failed to parse edition {}", edition))?; |
diff --git a/crates/project_model/src/sysroot.rs b/crates/project_model/src/sysroot.rs index 4e39d6dd3..a22f79c15 100644 --- a/crates/project_model/src/sysroot.rs +++ b/crates/project_model/src/sysroot.rs | |||
@@ -142,12 +142,12 @@ fn discover_sysroot_src_dir( | |||
142 | log::debug!("RUST_SRC_PATH is set, but is invalid (no core: {:?}), ignoring", core); | 142 | log::debug!("RUST_SRC_PATH is set, but is invalid (no core: {:?}), ignoring", core); |
143 | } | 143 | } |
144 | 144 | ||
145 | get_rust_src(&sysroot_path) | 145 | get_rust_src(sysroot_path) |
146 | .or_else(|| { | 146 | .or_else(|| { |
147 | let mut rustup = Command::new(toolchain::rustup()); | 147 | let mut rustup = Command::new(toolchain::rustup()); |
148 | rustup.current_dir(current_dir).args(&["component", "add", "rust-src"]); | 148 | rustup.current_dir(current_dir).args(&["component", "add", "rust-src"]); |
149 | utf8_stdout(rustup).ok()?; | 149 | utf8_stdout(rustup).ok()?; |
150 | get_rust_src(&sysroot_path) | 150 | get_rust_src(sysroot_path) |
151 | }) | 151 | }) |
152 | .ok_or_else(|| { | 152 | .ok_or_else(|| { |
153 | format_err!( | 153 | format_err!( |
diff --git a/crates/project_model/src/workspace.rs b/crates/project_model/src/workspace.rs index 84990075f..ef0f3c9e4 100644 --- a/crates/project_model/src/workspace.rs +++ b/crates/project_model/src/workspace.rs | |||
@@ -185,7 +185,7 @@ impl ProjectWorkspace { | |||
185 | 185 | ||
186 | pub fn load_detached_files(detached_files: Vec<AbsPathBuf>) -> Result<ProjectWorkspace> { | 186 | pub fn load_detached_files(detached_files: Vec<AbsPathBuf>) -> Result<ProjectWorkspace> { |
187 | let sysroot = Sysroot::discover( | 187 | let sysroot = Sysroot::discover( |
188 | &detached_files.first().ok_or_else(|| format_err!("No detached files to load"))?, | 188 | detached_files.first().ok_or_else(|| format_err!("No detached files to load"))?, |
189 | )?; | 189 | )?; |
190 | let rustc_cfg = rustc_cfg::get(None, None); | 190 | let rustc_cfg = rustc_cfg::get(None, None); |
191 | Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg }) | 191 | Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg }) |
@@ -324,7 +324,7 @@ impl ProjectWorkspace { | |||
324 | pub fn collect_build_data_configs(&self, collector: &mut BuildDataCollector) { | 324 | pub fn collect_build_data_configs(&self, collector: &mut BuildDataCollector) { |
325 | match self { | 325 | match self { |
326 | ProjectWorkspace::Cargo { cargo, .. } => { | 326 | ProjectWorkspace::Cargo { cargo, .. } => { |
327 | collector.add_config(&cargo.workspace_root(), cargo.build_data_config().clone()); | 327 | collector.add_config(cargo.workspace_root(), cargo.build_data_config().clone()); |
328 | } | 328 | } |
329 | _ => {} | 329 | _ => {} |
330 | } | 330 | } |
@@ -348,7 +348,7 @@ fn project_json_to_crate_graph( | |||
348 | .crates() | 348 | .crates() |
349 | .filter_map(|(crate_id, krate)| { | 349 | .filter_map(|(crate_id, krate)| { |
350 | let file_path = &krate.root_module; | 350 | let file_path = &krate.root_module; |
351 | let file_id = load(&file_path)?; | 351 | let file_id = load(file_path)?; |
352 | Some((crate_id, krate, file_id)) | 352 | Some((crate_id, krate, file_id)) |
353 | }) | 353 | }) |
354 | .map(|(crate_id, krate, file_id)| { | 354 | .map(|(crate_id, krate, file_id)| { |
@@ -534,7 +534,7 @@ fn detached_files_to_crate_graph( | |||
534 | cfg_options.extend(rustc_cfg); | 534 | cfg_options.extend(rustc_cfg); |
535 | 535 | ||
536 | for detached_file in detached_files { | 536 | for detached_file in detached_files { |
537 | let file_id = match load(&detached_file) { | 537 | let file_id = match load(detached_file) { |
538 | Some(file_id) => file_id, | 538 | Some(file_id) => file_id, |
539 | None => { | 539 | None => { |
540 | log::error!("Failed to load detached file {:?}", detached_file); | 540 | log::error!("Failed to load detached file {:?}", detached_file); |
@@ -602,7 +602,7 @@ fn handle_rustc_crates( | |||
602 | crate_graph, | 602 | crate_graph, |
603 | &rustc_workspace[pkg], | 603 | &rustc_workspace[pkg], |
604 | rustc_build_data_map.and_then(|it| it.get(&rustc_workspace[pkg].id)), | 604 | rustc_build_data_map.and_then(|it| it.get(&rustc_workspace[pkg].id)), |
605 | &cfg_options, | 605 | cfg_options, |
606 | proc_macro_loader, | 606 | proc_macro_loader, |
607 | file_id, | 607 | file_id, |
608 | &rustc_workspace[tgt].name, | 608 | &rustc_workspace[tgt].name, |
@@ -685,7 +685,7 @@ fn add_target_crate_root( | |||
685 | let proc_macro = build_data | 685 | let proc_macro = build_data |
686 | .as_ref() | 686 | .as_ref() |
687 | .and_then(|it| it.proc_macro_dylib_path.as_ref()) | 687 | .and_then(|it| it.proc_macro_dylib_path.as_ref()) |
688 | .map(|it| proc_macro_loader(&it)) | 688 | .map(|it| proc_macro_loader(it)) |
689 | .unwrap_or_default(); | 689 | .unwrap_or_default(); |
690 | 690 | ||
691 | let display_name = CrateDisplayName::from_canonical_name(cargo_name.to_string()); | 691 | let display_name = CrateDisplayName::from_canonical_name(cargo_name.to_string()); |
diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/cargo_target_spec.rs index f4cd43448..5d8547152 100644 --- a/crates/rust-analyzer/src/cargo_target_spec.rs +++ b/crates/rust-analyzer/src/cargo_target_spec.rs | |||
@@ -123,7 +123,7 @@ impl CargoTargetSpec { | |||
123 | let res = CargoTargetSpec { | 123 | let res = CargoTargetSpec { |
124 | workspace_root: cargo_ws.workspace_root().to_path_buf(), | 124 | workspace_root: cargo_ws.workspace_root().to_path_buf(), |
125 | cargo_toml: package_data.manifest.clone(), | 125 | cargo_toml: package_data.manifest.clone(), |
126 | package: cargo_ws.package_flag(&package_data), | 126 | package: cargo_ws.package_flag(package_data), |
127 | target: target_data.name.clone(), | 127 | target: target_data.name.clone(), |
128 | target_kind: target_data.kind, | 128 | target_kind: target_data.kind, |
129 | }; | 129 | }; |
diff --git a/crates/rust-analyzer/src/cli/load_cargo.rs b/crates/rust-analyzer/src/cli/load_cargo.rs index 19cb1c046..b5f5519b4 100644 --- a/crates/rust-analyzer/src/cli/load_cargo.rs +++ b/crates/rust-analyzer/src/cli/load_cargo.rs | |||
@@ -126,7 +126,7 @@ fn load_crate_graph( | |||
126 | } | 126 | } |
127 | } | 127 | } |
128 | } | 128 | } |
129 | let source_roots = source_root_config.partition(&vfs); | 129 | let source_roots = source_root_config.partition(vfs); |
130 | analysis_change.set_roots(source_roots); | 130 | analysis_change.set_roots(source_roots); |
131 | 131 | ||
132 | analysis_change.set_crate_graph(crate_graph); | 132 | analysis_change.set_crate_graph(crate_graph); |
diff --git a/crates/rust-analyzer/src/diagnostics.rs b/crates/rust-analyzer/src/diagnostics.rs index d4b9db362..2f63c26ce 100644 --- a/crates/rust-analyzer/src/diagnostics.rs +++ b/crates/rust-analyzer/src/diagnostics.rs | |||
@@ -47,7 +47,7 @@ impl DiagnosticCollection { | |||
47 | ) { | 47 | ) { |
48 | let diagnostics = self.check.entry(file_id).or_default(); | 48 | let diagnostics = self.check.entry(file_id).or_default(); |
49 | for existing_diagnostic in diagnostics.iter() { | 49 | for existing_diagnostic in diagnostics.iter() { |
50 | if are_diagnostics_equal(&existing_diagnostic, &diagnostic) { | 50 | if are_diagnostics_equal(existing_diagnostic, &diagnostic) { |
51 | return; | 51 | return; |
52 | } | 52 | } |
53 | } | 53 | } |
diff --git a/crates/rust-analyzer/src/diagnostics/to_proto.rs b/crates/rust-analyzer/src/diagnostics/to_proto.rs index 82dd0da9a..8594d923c 100644 --- a/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/crates/rust-analyzer/src/diagnostics/to_proto.rs | |||
@@ -224,7 +224,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp( | |||
224 | 224 | ||
225 | let mut message = rd.message.clone(); | 225 | let mut message = rd.message.clone(); |
226 | for child in &rd.children { | 226 | for child in &rd.children { |
227 | let child = map_rust_child_diagnostic(config, workspace_root, &child); | 227 | let child = map_rust_child_diagnostic(config, workspace_root, child); |
228 | match child { | 228 | match child { |
229 | MappedRustChildDiagnostic::SubDiagnostic(sub) => { | 229 | MappedRustChildDiagnostic::SubDiagnostic(sub) => { |
230 | subdiagnostics.push(sub); | 230 | subdiagnostics.push(sub); |
@@ -268,7 +268,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp( | |||
268 | primary_spans | 268 | primary_spans |
269 | .iter() | 269 | .iter() |
270 | .flat_map(|primary_span| { | 270 | .flat_map(|primary_span| { |
271 | let primary_location = primary_location(config, workspace_root, &primary_span); | 271 | let primary_location = primary_location(config, workspace_root, primary_span); |
272 | 272 | ||
273 | let mut message = message.clone(); | 273 | let mut message = message.clone(); |
274 | if needs_primary_span_label { | 274 | if needs_primary_span_label { |
@@ -298,7 +298,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp( | |||
298 | // generated that code. | 298 | // generated that code. |
299 | let is_in_macro_call = i != 0; | 299 | let is_in_macro_call = i != 0; |
300 | 300 | ||
301 | let secondary_location = location(config, workspace_root, &span); | 301 | let secondary_location = location(config, workspace_root, span); |
302 | if secondary_location == primary_location { | 302 | if secondary_location == primary_location { |
303 | continue; | 303 | continue; |
304 | } | 304 | } |
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index 582a89667..583900cfe 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs | |||
@@ -194,7 +194,7 @@ impl GlobalState { | |||
194 | change.change_file(file.file_id, text); | 194 | change.change_file(file.file_id, text); |
195 | } | 195 | } |
196 | if has_fs_changes { | 196 | if has_fs_changes { |
197 | let roots = self.source_root_config.partition(&vfs); | 197 | let roots = self.source_root_config.partition(vfs); |
198 | change.set_roots(roots); | 198 | change.set_roots(roots); |
199 | } | 199 | } |
200 | change | 200 | change |
@@ -291,7 +291,7 @@ impl GlobalStateSnapshot { | |||
291 | } | 291 | } |
292 | 292 | ||
293 | pub(crate) fn url_file_version(&self, url: &Url) -> Option<i32> { | 293 | pub(crate) fn url_file_version(&self, url: &Url) -> Option<i32> { |
294 | let path = from_proto::vfs_path(&url).ok()?; | 294 | let path = from_proto::vfs_path(url).ok()?; |
295 | Some(self.mem_docs.get(&path)?.version) | 295 | Some(self.mem_docs.get(&path)?.version) |
296 | } | 296 | } |
297 | 297 | ||
@@ -300,7 +300,7 @@ impl GlobalStateSnapshot { | |||
300 | base.pop(); | 300 | base.pop(); |
301 | let path = base.join(&path.path).unwrap(); | 301 | let path = base.join(&path.path).unwrap(); |
302 | let path = path.as_path().unwrap(); | 302 | let path = path.as_path().unwrap(); |
303 | url_from_abs_path(&path) | 303 | url_from_abs_path(path) |
304 | } | 304 | } |
305 | 305 | ||
306 | pub(crate) fn cargo_target_for_crate_root( | 306 | pub(crate) fn cargo_target_for_crate_root( |
@@ -312,7 +312,7 @@ impl GlobalStateSnapshot { | |||
312 | let path = path.as_path()?; | 312 | let path = path.as_path()?; |
313 | self.workspaces.iter().find_map(|ws| match ws { | 313 | self.workspaces.iter().find_map(|ws| match ws { |
314 | ProjectWorkspace::Cargo { cargo, .. } => { | 314 | ProjectWorkspace::Cargo { cargo, .. } => { |
315 | cargo.target_by_root(&path).map(|it| (cargo, it)) | 315 | cargo.target_by_root(path).map(|it| (cargo, it)) |
316 | } | 316 | } |
317 | ProjectWorkspace::Json { .. } => None, | 317 | ProjectWorkspace::Json { .. } => None, |
318 | ProjectWorkspace::DetachedFiles { .. } => None, | 318 | ProjectWorkspace::DetachedFiles { .. } => None, |
@@ -323,7 +323,7 @@ impl GlobalStateSnapshot { | |||
323 | pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url { | 323 | pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url { |
324 | let path = vfs.file_path(id); | 324 | let path = vfs.file_path(id); |
325 | let path = path.as_path().unwrap(); | 325 | let path = path.as_path().unwrap(); |
326 | url_from_abs_path(&path) | 326 | url_from_abs_path(path) |
327 | } | 327 | } |
328 | 328 | ||
329 | pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> Result<FileId> { | 329 | pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> Result<FileId> { |
diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 40dd0da3e..59339d401 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs | |||
@@ -1396,7 +1396,7 @@ pub(crate) fn handle_semantic_tokens_full_delta( | |||
1396 | 1396 | ||
1397 | if let Some(prev_id) = &cached_tokens.result_id { | 1397 | if let Some(prev_id) = &cached_tokens.result_id { |
1398 | if *prev_id == params.previous_result_id { | 1398 | if *prev_id == params.previous_result_id { |
1399 | let delta = to_proto::semantic_token_delta(&cached_tokens, &semantic_tokens); | 1399 | let delta = to_proto::semantic_token_delta(cached_tokens, &semantic_tokens); |
1400 | *cached_tokens = semantic_tokens; | 1400 | *cached_tokens = semantic_tokens; |
1401 | return Ok(Some(delta.into())); | 1401 | return Ok(Some(delta.into())); |
1402 | } | 1402 | } |
@@ -1540,7 +1540,7 @@ fn runnable_action_links( | |||
1540 | snap: &GlobalStateSnapshot, | 1540 | snap: &GlobalStateSnapshot, |
1541 | runnable: Runnable, | 1541 | runnable: Runnable, |
1542 | ) -> Option<lsp_ext::CommandLinkGroup> { | 1542 | ) -> Option<lsp_ext::CommandLinkGroup> { |
1543 | let cargo_spec = CargoTargetSpec::for_file(&snap, runnable.nav.file_id).ok()?; | 1543 | let cargo_spec = CargoTargetSpec::for_file(snap, runnable.nav.file_id).ok()?; |
1544 | let hover_config = snap.config.hover(); | 1544 | let hover_config = snap.config.hover(); |
1545 | if !hover_config.runnable() || should_skip_target(&runnable, cargo_spec.as_ref()) { | 1545 | if !hover_config.runnable() || should_skip_target(&runnable, cargo_spec.as_ref()) { |
1546 | return None; | 1546 | return None; |
@@ -1624,7 +1624,7 @@ fn run_rustfmt( | |||
1624 | text_document: TextDocumentIdentifier, | 1624 | text_document: TextDocumentIdentifier, |
1625 | range: Option<lsp_types::Range>, | 1625 | range: Option<lsp_types::Range>, |
1626 | ) -> Result<Option<Vec<lsp_types::TextEdit>>> { | 1626 | ) -> Result<Option<Vec<lsp_types::TextEdit>>> { |
1627 | let file_id = from_proto::file_id(&snap, &text_document.uri)?; | 1627 | let file_id = from_proto::file_id(snap, &text_document.uri)?; |
1628 | let file = snap.analysis.file_text(file_id)?; | 1628 | let file = snap.analysis.file_text(file_id)?; |
1629 | let crate_ids = snap.analysis.crate_for(file_id)?; | 1629 | let crate_ids = snap.analysis.crate_for(file_id)?; |
1630 | 1630 | ||
@@ -1671,7 +1671,7 @@ fn run_rustfmt( | |||
1671 | .into()); | 1671 | .into()); |
1672 | } | 1672 | } |
1673 | 1673 | ||
1674 | let frange = from_proto::file_range(&snap, text_document, range)?; | 1674 | let frange = from_proto::file_range(snap, text_document, range)?; |
1675 | let start_line = line_index.index.line_col(frange.range.start()).line; | 1675 | let start_line = line_index.index.line_col(frange.range.start()).line; |
1676 | let end_line = line_index.index.line_col(frange.range.end()).line; | 1676 | let end_line = line_index.index.line_col(frange.range.end()).line; |
1677 | 1677 | ||
diff --git a/crates/rust-analyzer/src/lsp_utils.rs b/crates/rust-analyzer/src/lsp_utils.rs index 8000b5490..087c26a71 100644 --- a/crates/rust-analyzer/src/lsp_utils.rs +++ b/crates/rust-analyzer/src/lsp_utils.rs | |||
@@ -124,7 +124,7 @@ pub(crate) fn apply_document_changes( | |||
124 | match change.range { | 124 | match change.range { |
125 | Some(range) => { | 125 | Some(range) => { |
126 | if !index_valid.covers(range.end.line) { | 126 | if !index_valid.covers(range.end.line) { |
127 | line_index.index = Arc::new(ide::LineIndex::new(&old_text)); | 127 | line_index.index = Arc::new(ide::LineIndex::new(old_text)); |
128 | } | 128 | } |
129 | index_valid = IndexValid::UpToLineExclusive(range.start.line); | 129 | index_valid = IndexValid::UpToLineExclusive(range.start.line); |
130 | let range = from_proto::text_range(&line_index, range); | 130 | let range = from_proto::text_range(&line_index, range); |
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 31d8ea9e7..fa5fc6fbf 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs | |||
@@ -740,7 +740,7 @@ impl GlobalState { | |||
740 | let subscriptions = self | 740 | let subscriptions = self |
741 | .mem_docs | 741 | .mem_docs |
742 | .keys() | 742 | .keys() |
743 | .map(|path| self.vfs.read().0.file_id(&path).unwrap()) | 743 | .map(|path| self.vfs.read().0.file_id(path).unwrap()) |
744 | .collect::<Vec<_>>(); | 744 | .collect::<Vec<_>>(); |
745 | 745 | ||
746 | log::trace!("updating notifications for {:?}", subscriptions); | 746 | log::trace!("updating notifications for {:?}", subscriptions); |
diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index 7428a3043..e53cd3c7b 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs | |||
@@ -405,7 +405,7 @@ pub(crate) fn semantic_tokens( | |||
405 | text_range = | 405 | text_range = |
406 | TextRange::new(text_range.start(), text_range.end() - TextSize::of('\n')); | 406 | TextRange::new(text_range.start(), text_range.end() - TextSize::of('\n')); |
407 | } | 407 | } |
408 | let range = range(&line_index, text_range); | 408 | let range = range(line_index, text_range); |
409 | builder.push(range, token_index, modifier_bitset); | 409 | builder.push(range, token_index, modifier_bitset); |
410 | } | 410 | } |
411 | } | 411 | } |
@@ -781,7 +781,7 @@ pub(crate) fn snippet_workspace_edit( | |||
781 | document_changes.extend_from_slice(&ops); | 781 | document_changes.extend_from_slice(&ops); |
782 | } | 782 | } |
783 | for (file_id, edit) in source_change.source_file_edits { | 783 | for (file_id, edit) in source_change.source_file_edits { |
784 | let edit = snippet_text_document_edit(&snap, source_change.is_snippet, file_id, edit)?; | 784 | let edit = snippet_text_document_edit(snap, source_change.is_snippet, file_id, edit)?; |
785 | document_changes.push(lsp_ext::SnippetDocumentChangeOperation::Edit(edit)); | 785 | document_changes.push(lsp_ext::SnippetDocumentChangeOperation::Edit(edit)); |
786 | } | 786 | } |
787 | let mut workspace_edit = lsp_ext::SnippetWorkspaceEdit { | 787 | let mut workspace_edit = lsp_ext::SnippetWorkspaceEdit { |
@@ -957,7 +957,7 @@ pub(crate) fn code_lens( | |||
957 | let annotation_range = range(&line_index, annotation.range); | 957 | let annotation_range = range(&line_index, annotation.range); |
958 | 958 | ||
959 | let action = run.action(); | 959 | let action = run.action(); |
960 | let r = runnable(&snap, run)?; | 960 | let r = runnable(snap, run)?; |
961 | 961 | ||
962 | let command = if debug { | 962 | let command = if debug { |
963 | command::debug_single(&r) | 963 | command::debug_single(&r) |
@@ -1236,12 +1236,12 @@ fn main() { | |||
1236 | assert_eq!(folds.len(), 4); | 1236 | assert_eq!(folds.len(), 4); |
1237 | 1237 | ||
1238 | let line_index = LineIndex { | 1238 | let line_index = LineIndex { |
1239 | index: Arc::new(ide::LineIndex::new(&text)), | 1239 | index: Arc::new(ide::LineIndex::new(text)), |
1240 | endings: LineEndings::Unix, | 1240 | endings: LineEndings::Unix, |
1241 | encoding: OffsetEncoding::Utf16, | 1241 | encoding: OffsetEncoding::Utf16, |
1242 | }; | 1242 | }; |
1243 | let converted: Vec<lsp_types::FoldingRange> = | 1243 | let converted: Vec<lsp_types::FoldingRange> = |
1244 | folds.into_iter().map(|it| folding_range(&text, &line_index, true, it)).collect(); | 1244 | folds.into_iter().map(|it| folding_range(text, &line_index, true, it)).collect(); |
1245 | 1245 | ||
1246 | let expected_lines = [(0, 2), (4, 10), (5, 6), (7, 9)]; | 1246 | let expected_lines = [(0, 2), (4, 10), (5, 6), (7, 9)]; |
1247 | assert_eq!(converted.len(), expected_lines.len()); | 1247 | assert_eq!(converted.len(), expected_lines.len()); |
diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs index 9e89209ea..3585132d4 100644 --- a/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/crates/rust-analyzer/tests/slow-tests/main.rs | |||
@@ -493,7 +493,7 @@ fn preserves_dos_line_endings() { | |||
493 | } | 493 | } |
494 | 494 | ||
495 | let server = Project::with_fixture( | 495 | let server = Project::with_fixture( |
496 | &" | 496 | " |
497 | //- /Cargo.toml | 497 | //- /Cargo.toml |
498 | [package] | 498 | [package] |
499 | name = \"foo\" | 499 | name = \"foo\" |
@@ -758,7 +758,7 @@ pub fn foo(_input: TokenStream) -> TokenStream { | |||
758 | ```rust | 758 | ```rust |
759 | fn bar() | 759 | fn bar() |
760 | ```"#]] | 760 | ```"#]] |
761 | .assert_eq(&value); | 761 | .assert_eq(value); |
762 | } | 762 | } |
763 | 763 | ||
764 | #[test] | 764 | #[test] |
@@ -795,7 +795,7 @@ fn main() {} | |||
795 | 795 | ||
796 | "#; | 796 | "#; |
797 | let server = | 797 | let server = |
798 | Project::with_fixture(&code).tmp_dir(tmp_dir).server().wait_until_workspace_is_loaded(); | 798 | Project::with_fixture(code).tmp_dir(tmp_dir).server().wait_until_workspace_is_loaded(); |
799 | 799 | ||
800 | //rename same level file | 800 | //rename same level file |
801 | server.request::<WillRenameFiles>( | 801 | server.request::<WillRenameFiles>( |
diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs index 75e677762..e22c295f9 100644 --- a/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/crates/rust-analyzer/tests/slow-tests/support.rs | |||
@@ -323,7 +323,7 @@ fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a Valu | |||
323 | 323 | ||
324 | if !l.is_empty() { | 324 | if !l.is_empty() { |
325 | assert!(!r.is_empty()); | 325 | assert!(!r.is_empty()); |
326 | Some((&l[0], &r[0])) | 326 | Some((l[0], r[0])) |
327 | } else { | 327 | } else { |
328 | assert_eq!(r.len(), 0); | 328 | assert_eq!(r.len(), 0); |
329 | None | 329 | None |
diff --git a/crates/syntax/src/ast/edit.rs b/crates/syntax/src/ast/edit.rs index 19107ee38..8698687d8 100644 --- a/crates/syntax/src/ast/edit.rs +++ b/crates/syntax/src/ast/edit.rs | |||
@@ -30,7 +30,7 @@ impl ast::UseTree { | |||
30 | let suffix = if self.path().as_ref() == Some(prefix) && self.use_tree_list().is_none() { | 30 | let suffix = if self.path().as_ref() == Some(prefix) && self.use_tree_list().is_none() { |
31 | make::path_unqualified(make::path_segment_self()) | 31 | make::path_unqualified(make::path_segment_self()) |
32 | } else { | 32 | } else { |
33 | match split_path_prefix(&prefix) { | 33 | match split_path_prefix(prefix) { |
34 | Some(it) => it, | 34 | Some(it) => it, |
35 | None => return self.clone(), | 35 | None => return self.clone(), |
36 | } | 36 | } |
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs index 4b1e1ccee..ad52d9f54 100644 --- a/crates/syntax/src/ast/token_ext.rs +++ b/crates/syntax/src/ast/token_ext.rs | |||
@@ -242,7 +242,7 @@ impl ast::ByteString { | |||
242 | (Ok(c), true) if char_range.len() == 1 && Some(c) == text_iter.next() => (), | 242 | (Ok(c), true) if char_range.len() == 1 && Some(c) == text_iter.next() => (), |
243 | (Ok(c), true) => { | 243 | (Ok(c), true) => { |
244 | buf.reserve_exact(text.len()); | 244 | buf.reserve_exact(text.len()); |
245 | buf.extend_from_slice(&text[..char_range.start].as_bytes()); | 245 | buf.extend_from_slice(text[..char_range.start].as_bytes()); |
246 | buf.push(c as u8); | 246 | buf.push(c as u8); |
247 | } | 247 | } |
248 | (Err(_), _) => has_error = true, | 248 | (Err(_), _) => has_error = true, |
diff --git a/crates/syntax/src/parsing.rs b/crates/syntax/src/parsing.rs index 431ed0699..001921343 100644 --- a/crates/syntax/src/parsing.rs +++ b/crates/syntax/src/parsing.rs | |||
@@ -15,7 +15,7 @@ use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode}; | |||
15 | pub(crate) use crate::parsing::{lexer::*, reparsing::incremental_reparse}; | 15 | pub(crate) use crate::parsing::{lexer::*, reparsing::incremental_reparse}; |
16 | 16 | ||
17 | pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { | 17 | pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { |
18 | let (tokens, lexer_errors) = tokenize(&text); | 18 | let (tokens, lexer_errors) = tokenize(text); |
19 | 19 | ||
20 | let mut token_source = TextTokenSource::new(text, &tokens); | 20 | let mut token_source = TextTokenSource::new(text, &tokens); |
21 | let mut tree_sink = TextTreeSink::new(text, &tokens); | 21 | let mut tree_sink = TextTreeSink::new(text, &tokens); |
@@ -33,7 +33,7 @@ pub(crate) fn parse_text_fragment<T: AstNode>( | |||
33 | text: &str, | 33 | text: &str, |
34 | fragment_kind: parser::FragmentKind, | 34 | fragment_kind: parser::FragmentKind, |
35 | ) -> Result<T, ()> { | 35 | ) -> Result<T, ()> { |
36 | let (tokens, lexer_errors) = tokenize(&text); | 36 | let (tokens, lexer_errors) = tokenize(text); |
37 | if !lexer_errors.is_empty() { | 37 | if !lexer_errors.is_empty() { |
38 | return Err(()); | 38 | return Err(()); |
39 | } | 39 | } |
diff --git a/crates/syntax/src/parsing/lexer.rs b/crates/syntax/src/parsing/lexer.rs index 7c8d0a4c4..ae4844e48 100644 --- a/crates/syntax/src/parsing/lexer.rs +++ b/crates/syntax/src/parsing/lexer.rs | |||
@@ -144,7 +144,7 @@ fn rustc_token_kind_to_syntax_kind( | |||
144 | } | 144 | } |
145 | 145 | ||
146 | rustc_lexer::TokenKind::RawIdent => IDENT, | 146 | rustc_lexer::TokenKind::RawIdent => IDENT, |
147 | rustc_lexer::TokenKind::Literal { kind, .. } => return match_literal_kind(&kind), | 147 | rustc_lexer::TokenKind::Literal { kind, .. } => return match_literal_kind(kind), |
148 | 148 | ||
149 | rustc_lexer::TokenKind::Lifetime { starts_with_number: false } => LIFETIME_IDENT, | 149 | rustc_lexer::TokenKind::Lifetime { starts_with_number: false } => LIFETIME_IDENT, |
150 | rustc_lexer::TokenKind::Lifetime { starts_with_number: true } => { | 150 | rustc_lexer::TokenKind::Lifetime { starts_with_number: true } => { |
diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs index 304f47b3d..186cc9e74 100644 --- a/crates/syntax/src/parsing/reparsing.rs +++ b/crates/syntax/src/parsing/reparsing.rs | |||
@@ -26,11 +26,11 @@ pub(crate) fn incremental_reparse( | |||
26 | edit: &Indel, | 26 | edit: &Indel, |
27 | errors: Vec<SyntaxError>, | 27 | errors: Vec<SyntaxError>, |
28 | ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { | 28 | ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { |
29 | if let Some((green, new_errors, old_range)) = reparse_token(node, &edit) { | 29 | if let Some((green, new_errors, old_range)) = reparse_token(node, edit) { |
30 | return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); | 30 | return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); |
31 | } | 31 | } |
32 | 32 | ||
33 | if let Some((green, new_errors, old_range)) = reparse_block(node, &edit) { | 33 | if let Some((green, new_errors, old_range)) = reparse_block(node, edit) { |
34 | return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); | 34 | return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); |
35 | } | 35 | } |
36 | None | 36 | None |
@@ -52,7 +52,7 @@ fn reparse_token( | |||
52 | } | 52 | } |
53 | } | 53 | } |
54 | 54 | ||
55 | let mut new_text = get_text_after_edit(prev_token.clone().into(), &edit); | 55 | let mut new_text = get_text_after_edit(prev_token.clone().into(), edit); |
56 | let (new_token_kind, new_err) = lex_single_syntax_kind(&new_text)?; | 56 | let (new_token_kind, new_err) = lex_single_syntax_kind(&new_text)?; |
57 | 57 | ||
58 | if new_token_kind != prev_token_kind | 58 | if new_token_kind != prev_token_kind |
diff --git a/crates/syntax/src/tests.rs b/crates/syntax/src/tests.rs index 9f2426171..4961ca08d 100644 --- a/crates/syntax/src/tests.rs +++ b/crates/syntax/src/tests.rs | |||
@@ -69,13 +69,13 @@ fn parser_tests() { | |||
69 | dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], "rast", |text, path| { | 69 | dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], "rast", |text, path| { |
70 | let parse = SourceFile::parse(text); | 70 | let parse = SourceFile::parse(text); |
71 | let errors = parse.errors(); | 71 | let errors = parse.errors(); |
72 | assert_errors_are_absent(&errors, path); | 72 | assert_errors_are_absent(errors, path); |
73 | parse.debug_dump() | 73 | parse.debug_dump() |
74 | }); | 74 | }); |
75 | dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], "rast", |text, path| { | 75 | dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], "rast", |text, path| { |
76 | let parse = SourceFile::parse(text); | 76 | let parse = SourceFile::parse(text); |
77 | let errors = parse.errors(); | 77 | let errors = parse.errors(); |
78 | assert_errors_are_present(&errors, path); | 78 | assert_errors_are_present(errors, path); |
79 | parse.debug_dump() | 79 | parse.debug_dump() |
80 | }); | 80 | }); |
81 | } | 81 | } |
diff --git a/crates/vfs/src/file_set.rs b/crates/vfs/src/file_set.rs index 0a4590c8d..0011f73c9 100644 --- a/crates/vfs/src/file_set.rs +++ b/crates/vfs/src/file_set.rs | |||
@@ -111,7 +111,7 @@ impl FileSetConfig { | |||
111 | let mut scratch_space = Vec::new(); | 111 | let mut scratch_space = Vec::new(); |
112 | let mut res = vec![FileSet::default(); self.len()]; | 112 | let mut res = vec![FileSet::default(); self.len()]; |
113 | for (file_id, path) in vfs.iter() { | 113 | for (file_id, path) in vfs.iter() { |
114 | let root = self.classify(&path, &mut scratch_space); | 114 | let root = self.classify(path, &mut scratch_space); |
115 | res[root].insert(file_id, path.clone()) | 115 | res[root].insert(file_id, path.clone()) |
116 | } | 116 | } |
117 | res | 117 | res |
diff --git a/xtask/src/codegen/gen_lint_completions.rs b/xtask/src/codegen/gen_lint_completions.rs index 3b54b2489..4aebb02bd 100644 --- a/xtask/src/codegen/gen_lint_completions.rs +++ b/xtask/src/codegen/gen_lint_completions.rs | |||
@@ -28,7 +28,7 @@ pub(crate) fn generate_lint_completions() -> Result<()> { | |||
28 | contents.push('\n'); | 28 | contents.push('\n'); |
29 | 29 | ||
30 | cmd!("curl https://rust-lang.github.io/rust-clippy/master/lints.json --output ./target/clippy_lints.json").run()?; | 30 | cmd!("curl https://rust-lang.github.io/rust-clippy/master/lints.json --output ./target/clippy_lints.json").run()?; |
31 | generate_descriptor_clippy(&mut contents, &Path::new("./target/clippy_lints.json"))?; | 31 | generate_descriptor_clippy(&mut contents, Path::new("./target/clippy_lints.json"))?; |
32 | let contents = reformat(&contents)?; | 32 | let contents = reformat(&contents)?; |
33 | 33 | ||
34 | let destination = project_root().join("crates/ide_db/src/helpers/generated_lints.rs"); | 34 | let destination = project_root().join("crates/ide_db/src/helpers/generated_lints.rs"); |
diff --git a/xtask/src/codegen/gen_syntax.rs b/xtask/src/codegen/gen_syntax.rs index b0b9e30db..5435da76e 100644 --- a/xtask/src/codegen/gen_syntax.rs +++ b/xtask/src/codegen/gen_syntax.rs | |||
@@ -258,7 +258,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> Result<String> { | |||
258 | for chunk in ast.split("# [pretty_doc_comment_placeholder_workaround] ") { | 258 | for chunk in ast.split("# [pretty_doc_comment_placeholder_workaround] ") { |
259 | res.push_str(chunk); | 259 | res.push_str(chunk); |
260 | if let Some(doc) = docs.next() { | 260 | if let Some(doc) = docs.next() { |
261 | write_doc_comment(&doc, &mut res); | 261 | write_doc_comment(doc, &mut res); |
262 | } | 262 | } |
263 | } | 263 | } |
264 | 264 | ||
@@ -294,14 +294,14 @@ fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> Result<String> { | |||
294 | 294 | ||
295 | let full_keywords_values = &grammar.keywords; | 295 | let full_keywords_values = &grammar.keywords; |
296 | let full_keywords = | 296 | let full_keywords = |
297 | full_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(&kw))); | 297 | full_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(kw))); |
298 | 298 | ||
299 | let all_keywords_values = | 299 | let all_keywords_values = |
300 | grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>(); | 300 | grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>(); |
301 | let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw)); | 301 | let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw)); |
302 | let all_keywords = all_keywords_values | 302 | let all_keywords = all_keywords_values |
303 | .iter() | 303 | .iter() |
304 | .map(|name| format_ident!("{}_KW", to_upper_snake_case(&name))) | 304 | .map(|name| format_ident!("{}_KW", to_upper_snake_case(name))) |
305 | .collect::<Vec<_>>(); | 305 | .collect::<Vec<_>>(); |
306 | 306 | ||
307 | let literals = | 307 | let literals = |
diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs index 34679062f..7b190d425 100644 --- a/xtask/src/metrics.rs +++ b/xtask/src/metrics.rs | |||
@@ -71,7 +71,7 @@ impl Metrics { | |||
71 | Ok(()) | 71 | Ok(()) |
72 | } | 72 | } |
73 | fn measure_analysis_stats_self(&mut self) -> Result<()> { | 73 | fn measure_analysis_stats_self(&mut self) -> Result<()> { |
74 | self.measure_analysis_stats_path("self", &".") | 74 | self.measure_analysis_stats_path("self", ".") |
75 | } | 75 | } |
76 | fn measure_analysis_stats(&mut self, bench: &str) -> Result<()> { | 76 | fn measure_analysis_stats(&mut self, bench: &str) -> Result<()> { |
77 | self.measure_analysis_stats_path( | 77 | self.measure_analysis_stats_path( |