diff options
Diffstat (limited to 'crates/hir_ty/src')
35 files changed, 22854 insertions, 0 deletions
diff --git a/crates/hir_ty/src/autoderef.rs b/crates/hir_ty/src/autoderef.rs new file mode 100644 index 000000000..ece68183e --- /dev/null +++ b/crates/hir_ty/src/autoderef.rs | |||
@@ -0,0 +1,131 @@ | |||
1 | //! In certain situations, rust automatically inserts derefs as necessary: for | ||
2 | //! example, field accesses `foo.bar` still work when `foo` is actually a | ||
3 | //! reference to a type with the field `bar`. This is an approximation of the | ||
4 | //! logic in rustc (which lives in librustc_typeck/check/autoderef.rs). | ||
5 | |||
6 | use std::iter::successors; | ||
7 | |||
8 | use base_db::CrateId; | ||
9 | use hir_def::lang_item::LangItemTarget; | ||
10 | use hir_expand::name::name; | ||
11 | use log::{info, warn}; | ||
12 | |||
13 | use crate::{ | ||
14 | db::HirDatabase, | ||
15 | traits::{InEnvironment, Solution}, | ||
16 | utils::generics, | ||
17 | BoundVar, Canonical, DebruijnIndex, Obligation, Substs, TraitRef, Ty, | ||
18 | }; | ||
19 | |||
20 | const AUTODEREF_RECURSION_LIMIT: usize = 10; | ||
21 | |||
22 | pub fn autoderef<'a>( | ||
23 | db: &'a dyn HirDatabase, | ||
24 | krate: Option<CrateId>, | ||
25 | ty: InEnvironment<Canonical<Ty>>, | ||
26 | ) -> impl Iterator<Item = Canonical<Ty>> + 'a { | ||
27 | let InEnvironment { value: ty, environment } = ty; | ||
28 | successors(Some(ty), move |ty| { | ||
29 | deref(db, krate?, InEnvironment { value: ty, environment: environment.clone() }) | ||
30 | }) | ||
31 | .take(AUTODEREF_RECURSION_LIMIT) | ||
32 | } | ||
33 | |||
34 | pub(crate) fn deref( | ||
35 | db: &dyn HirDatabase, | ||
36 | krate: CrateId, | ||
37 | ty: InEnvironment<&Canonical<Ty>>, | ||
38 | ) -> Option<Canonical<Ty>> { | ||
39 | if let Some(derefed) = ty.value.value.builtin_deref() { | ||
40 | Some(Canonical { value: derefed, kinds: ty.value.kinds.clone() }) | ||
41 | } else { | ||
42 | deref_by_trait(db, krate, ty) | ||
43 | } | ||
44 | } | ||
45 | |||
46 | fn deref_by_trait( | ||
47 | db: &dyn HirDatabase, | ||
48 | krate: CrateId, | ||
49 | ty: InEnvironment<&Canonical<Ty>>, | ||
50 | ) -> Option<Canonical<Ty>> { | ||
51 | let deref_trait = match db.lang_item(krate, "deref".into())? { | ||
52 | LangItemTarget::TraitId(it) => it, | ||
53 | _ => return None, | ||
54 | }; | ||
55 | let target = db.trait_data(deref_trait).associated_type_by_name(&name![Target])?; | ||
56 | |||
57 | let generic_params = generics(db.upcast(), target.into()); | ||
58 | if generic_params.len() != 1 { | ||
59 | // the Target type + Deref trait should only have one generic parameter, | ||
60 | // namely Deref's Self type | ||
61 | return None; | ||
62 | } | ||
63 | |||
64 | // FIXME make the Canonical / bound var handling nicer | ||
65 | |||
66 | let parameters = | ||
67 | Substs::build_for_generics(&generic_params).push(ty.value.value.clone()).build(); | ||
68 | |||
69 | // Check that the type implements Deref at all | ||
70 | let trait_ref = TraitRef { trait_: deref_trait, substs: parameters.clone() }; | ||
71 | let implements_goal = Canonical { | ||
72 | kinds: ty.value.kinds.clone(), | ||
73 | value: InEnvironment { | ||
74 | value: Obligation::Trait(trait_ref), | ||
75 | environment: ty.environment.clone(), | ||
76 | }, | ||
77 | }; | ||
78 | if db.trait_solve(krate, implements_goal).is_none() { | ||
79 | return None; | ||
80 | } | ||
81 | |||
82 | // Now do the assoc type projection | ||
83 | let projection = super::traits::ProjectionPredicate { | ||
84 | ty: Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, ty.value.kinds.len())), | ||
85 | projection_ty: super::ProjectionTy { associated_ty: target, parameters }, | ||
86 | }; | ||
87 | |||
88 | let obligation = super::Obligation::Projection(projection); | ||
89 | |||
90 | let in_env = InEnvironment { value: obligation, environment: ty.environment }; | ||
91 | |||
92 | let canonical = | ||
93 | Canonical::new(in_env, ty.value.kinds.iter().copied().chain(Some(super::TyKind::General))); | ||
94 | |||
95 | let solution = db.trait_solve(krate, canonical)?; | ||
96 | |||
97 | match &solution { | ||
98 | Solution::Unique(vars) => { | ||
99 | // FIXME: vars may contain solutions for any inference variables | ||
100 | // that happened to be inside ty. To correctly handle these, we | ||
101 | // would have to pass the solution up to the inference context, but | ||
102 | // that requires a larger refactoring (especially if the deref | ||
103 | // happens during method resolution). So for the moment, we just | ||
104 | // check that we're not in the situation we're we would actually | ||
105 | // need to handle the values of the additional variables, i.e. | ||
106 | // they're just being 'passed through'. In the 'standard' case where | ||
107 | // we have `impl<T> Deref for Foo<T> { Target = T }`, that should be | ||
108 | // the case. | ||
109 | |||
110 | // FIXME: if the trait solver decides to truncate the type, these | ||
111 | // assumptions will be broken. We would need to properly introduce | ||
112 | // new variables in that case | ||
113 | |||
114 | for i in 1..vars.0.kinds.len() { | ||
115 | if vars.0.value[i - 1] != Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, i - 1)) | ||
116 | { | ||
117 | warn!("complex solution for derefing {:?}: {:?}, ignoring", ty.value, solution); | ||
118 | return None; | ||
119 | } | ||
120 | } | ||
121 | Some(Canonical { | ||
122 | value: vars.0.value[vars.0.value.len() - 1].clone(), | ||
123 | kinds: vars.0.kinds.clone(), | ||
124 | }) | ||
125 | } | ||
126 | Solution::Ambig(_) => { | ||
127 | info!("Ambiguous solution for derefing {:?}: {:?}", ty.value, solution); | ||
128 | None | ||
129 | } | ||
130 | } | ||
131 | } | ||
diff --git a/crates/hir_ty/src/db.rs b/crates/hir_ty/src/db.rs new file mode 100644 index 000000000..25cf9eb7f --- /dev/null +++ b/crates/hir_ty/src/db.rs | |||
@@ -0,0 +1,158 @@ | |||
1 | //! FIXME: write short doc here | ||
2 | |||
3 | use std::sync::Arc; | ||
4 | |||
5 | use arena::map::ArenaMap; | ||
6 | use base_db::{impl_intern_key, salsa, CrateId, Upcast}; | ||
7 | use hir_def::{ | ||
8 | db::DefDatabase, expr::ExprId, DefWithBodyId, FunctionId, GenericDefId, ImplId, LocalFieldId, | ||
9 | TypeParamId, VariantId, | ||
10 | }; | ||
11 | |||
12 | use crate::{ | ||
13 | method_resolution::{InherentImpls, TraitImpls}, | ||
14 | traits::chalk, | ||
15 | Binders, CallableDefId, GenericPredicate, InferenceResult, OpaqueTyId, PolyFnSig, | ||
16 | ReturnTypeImplTraits, TraitRef, Ty, TyDefId, ValueTyDefId, | ||
17 | }; | ||
18 | use hir_expand::name::Name; | ||
19 | |||
20 | #[salsa::query_group(HirDatabaseStorage)] | ||
21 | pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> { | ||
22 | #[salsa::invoke(infer_wait)] | ||
23 | #[salsa::transparent] | ||
24 | fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>; | ||
25 | |||
26 | #[salsa::invoke(crate::infer::infer_query)] | ||
27 | fn infer_query(&self, def: DefWithBodyId) -> Arc<InferenceResult>; | ||
28 | |||
29 | #[salsa::invoke(crate::lower::ty_query)] | ||
30 | #[salsa::cycle(crate::lower::ty_recover)] | ||
31 | fn ty(&self, def: TyDefId) -> Binders<Ty>; | ||
32 | |||
33 | #[salsa::invoke(crate::lower::value_ty_query)] | ||
34 | fn value_ty(&self, def: ValueTyDefId) -> Binders<Ty>; | ||
35 | |||
36 | #[salsa::invoke(crate::lower::impl_self_ty_query)] | ||
37 | #[salsa::cycle(crate::lower::impl_self_ty_recover)] | ||
38 | fn impl_self_ty(&self, def: ImplId) -> Binders<Ty>; | ||
39 | |||
40 | #[salsa::invoke(crate::lower::impl_trait_query)] | ||
41 | fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>; | ||
42 | |||
43 | #[salsa::invoke(crate::lower::field_types_query)] | ||
44 | fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>>; | ||
45 | |||
46 | #[salsa::invoke(crate::callable_item_sig)] | ||
47 | fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig; | ||
48 | |||
49 | #[salsa::invoke(crate::lower::return_type_impl_traits)] | ||
50 | fn return_type_impl_traits( | ||
51 | &self, | ||
52 | def: FunctionId, | ||
53 | ) -> Option<Arc<Binders<ReturnTypeImplTraits>>>; | ||
54 | |||
55 | #[salsa::invoke(crate::lower::generic_predicates_for_param_query)] | ||
56 | #[salsa::cycle(crate::lower::generic_predicates_for_param_recover)] | ||
57 | fn generic_predicates_for_param( | ||
58 | &self, | ||
59 | param_id: TypeParamId, | ||
60 | ) -> Arc<[Binders<GenericPredicate>]>; | ||
61 | |||
62 | #[salsa::invoke(crate::lower::generic_predicates_query)] | ||
63 | fn generic_predicates(&self, def: GenericDefId) -> Arc<[Binders<GenericPredicate>]>; | ||
64 | |||
65 | #[salsa::invoke(crate::lower::generic_defaults_query)] | ||
66 | fn generic_defaults(&self, def: GenericDefId) -> Arc<[Binders<Ty>]>; | ||
67 | |||
68 | #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)] | ||
69 | fn inherent_impls_in_crate(&self, krate: CrateId) -> Arc<InherentImpls>; | ||
70 | |||
71 | #[salsa::invoke(TraitImpls::trait_impls_in_crate_query)] | ||
72 | fn trait_impls_in_crate(&self, krate: CrateId) -> Arc<TraitImpls>; | ||
73 | |||
74 | #[salsa::invoke(TraitImpls::trait_impls_in_deps_query)] | ||
75 | fn trait_impls_in_deps(&self, krate: CrateId) -> Arc<TraitImpls>; | ||
76 | |||
77 | // Interned IDs for Chalk integration | ||
78 | #[salsa::interned] | ||
79 | fn intern_callable_def(&self, callable_def: CallableDefId) -> InternedCallableDefId; | ||
80 | #[salsa::interned] | ||
81 | fn intern_type_param_id(&self, param_id: TypeParamId) -> GlobalTypeParamId; | ||
82 | #[salsa::interned] | ||
83 | fn intern_impl_trait_id(&self, id: OpaqueTyId) -> InternedOpaqueTyId; | ||
84 | #[salsa::interned] | ||
85 | fn intern_closure(&self, id: (DefWithBodyId, ExprId)) -> ClosureId; | ||
86 | |||
87 | #[salsa::invoke(chalk::associated_ty_data_query)] | ||
88 | fn associated_ty_data(&self, id: chalk::AssocTypeId) -> Arc<chalk::AssociatedTyDatum>; | ||
89 | |||
90 | #[salsa::invoke(chalk::trait_datum_query)] | ||
91 | fn trait_datum(&self, krate: CrateId, trait_id: chalk::TraitId) -> Arc<chalk::TraitDatum>; | ||
92 | |||
93 | #[salsa::invoke(chalk::struct_datum_query)] | ||
94 | fn struct_datum(&self, krate: CrateId, struct_id: chalk::AdtId) -> Arc<chalk::StructDatum>; | ||
95 | |||
96 | #[salsa::invoke(crate::traits::chalk::impl_datum_query)] | ||
97 | fn impl_datum(&self, krate: CrateId, impl_id: chalk::ImplId) -> Arc<chalk::ImplDatum>; | ||
98 | |||
99 | #[salsa::invoke(crate::traits::chalk::fn_def_datum_query)] | ||
100 | fn fn_def_datum(&self, krate: CrateId, fn_def_id: chalk::FnDefId) -> Arc<chalk::FnDefDatum>; | ||
101 | |||
102 | #[salsa::invoke(crate::traits::chalk::associated_ty_value_query)] | ||
103 | fn associated_ty_value( | ||
104 | &self, | ||
105 | krate: CrateId, | ||
106 | id: chalk::AssociatedTyValueId, | ||
107 | ) -> Arc<chalk::AssociatedTyValue>; | ||
108 | |||
109 | #[salsa::invoke(crate::traits::trait_solve_query)] | ||
110 | fn trait_solve( | ||
111 | &self, | ||
112 | krate: CrateId, | ||
113 | goal: crate::Canonical<crate::InEnvironment<crate::Obligation>>, | ||
114 | ) -> Option<crate::traits::Solution>; | ||
115 | |||
116 | #[salsa::invoke(crate::traits::chalk::program_clauses_for_chalk_env_query)] | ||
117 | fn program_clauses_for_chalk_env( | ||
118 | &self, | ||
119 | krate: CrateId, | ||
120 | env: chalk_ir::Environment<chalk::Interner>, | ||
121 | ) -> chalk_ir::ProgramClauses<chalk::Interner>; | ||
122 | } | ||
123 | |||
124 | fn infer_wait(db: &impl HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> { | ||
125 | let _p = profile::span("infer:wait").detail(|| match def { | ||
126 | DefWithBodyId::FunctionId(it) => db.function_data(it).name.to_string(), | ||
127 | DefWithBodyId::StaticId(it) => { | ||
128 | db.static_data(it).name.clone().unwrap_or_else(Name::missing).to_string() | ||
129 | } | ||
130 | DefWithBodyId::ConstId(it) => { | ||
131 | db.const_data(it).name.clone().unwrap_or_else(Name::missing).to_string() | ||
132 | } | ||
133 | }); | ||
134 | db.infer_query(def) | ||
135 | } | ||
136 | |||
137 | #[test] | ||
138 | fn hir_database_is_object_safe() { | ||
139 | fn _assert_object_safe(_: &dyn HirDatabase) {} | ||
140 | } | ||
141 | |||
142 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
143 | pub struct GlobalTypeParamId(salsa::InternId); | ||
144 | impl_intern_key!(GlobalTypeParamId); | ||
145 | |||
146 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
147 | pub struct InternedOpaqueTyId(salsa::InternId); | ||
148 | impl_intern_key!(InternedOpaqueTyId); | ||
149 | |||
150 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
151 | pub struct ClosureId(salsa::InternId); | ||
152 | impl_intern_key!(ClosureId); | ||
153 | |||
154 | /// This exists just for Chalk, because Chalk just has a single `FnDefId` where | ||
155 | /// we have different IDs for struct and enum variant constructors. | ||
156 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] | ||
157 | pub struct InternedCallableDefId(salsa::InternId); | ||
158 | impl_intern_key!(InternedCallableDefId); | ||
diff --git a/crates/hir_ty/src/diagnostics.rs b/crates/hir_ty/src/diagnostics.rs new file mode 100644 index 000000000..ae0cf8d09 --- /dev/null +++ b/crates/hir_ty/src/diagnostics.rs | |||
@@ -0,0 +1,444 @@ | |||
1 | //! FIXME: write short doc here | ||
2 | mod expr; | ||
3 | mod match_check; | ||
4 | mod unsafe_check; | ||
5 | |||
6 | use std::any::Any; | ||
7 | |||
8 | use hir_def::DefWithBodyId; | ||
9 | use hir_expand::diagnostics::{Diagnostic, DiagnosticSink}; | ||
10 | use hir_expand::{name::Name, HirFileId, InFile}; | ||
11 | use stdx::format_to; | ||
12 | use syntax::{ast, AstPtr, SyntaxNodePtr}; | ||
13 | |||
14 | use crate::db::HirDatabase; | ||
15 | |||
16 | pub use crate::diagnostics::expr::{record_literal_missing_fields, record_pattern_missing_fields}; | ||
17 | |||
18 | pub fn validate_body(db: &dyn HirDatabase, owner: DefWithBodyId, sink: &mut DiagnosticSink<'_>) { | ||
19 | let _p = profile::span("validate_body"); | ||
20 | let infer = db.infer(owner); | ||
21 | infer.add_diagnostics(db, owner, sink); | ||
22 | let mut validator = expr::ExprValidator::new(owner, infer.clone(), sink); | ||
23 | validator.validate_body(db); | ||
24 | let mut validator = unsafe_check::UnsafeValidator::new(owner, infer, sink); | ||
25 | validator.validate_body(db); | ||
26 | } | ||
27 | |||
28 | #[derive(Debug)] | ||
29 | pub struct NoSuchField { | ||
30 | pub file: HirFileId, | ||
31 | pub field: AstPtr<ast::RecordExprField>, | ||
32 | } | ||
33 | |||
34 | impl Diagnostic for NoSuchField { | ||
35 | fn message(&self) -> String { | ||
36 | "no such field".to_string() | ||
37 | } | ||
38 | |||
39 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
40 | InFile::new(self.file, self.field.clone().into()) | ||
41 | } | ||
42 | |||
43 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
44 | self | ||
45 | } | ||
46 | } | ||
47 | |||
48 | #[derive(Debug)] | ||
49 | pub struct MissingFields { | ||
50 | pub file: HirFileId, | ||
51 | pub field_list_parent: AstPtr<ast::RecordExpr>, | ||
52 | pub field_list_parent_path: Option<AstPtr<ast::Path>>, | ||
53 | pub missed_fields: Vec<Name>, | ||
54 | } | ||
55 | |||
56 | impl Diagnostic for MissingFields { | ||
57 | fn message(&self) -> String { | ||
58 | let mut buf = String::from("Missing structure fields:\n"); | ||
59 | for field in &self.missed_fields { | ||
60 | format_to!(buf, "- {}\n", field); | ||
61 | } | ||
62 | buf | ||
63 | } | ||
64 | |||
65 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
66 | InFile { | ||
67 | file_id: self.file, | ||
68 | value: self | ||
69 | .field_list_parent_path | ||
70 | .clone() | ||
71 | .map(SyntaxNodePtr::from) | ||
72 | .unwrap_or_else(|| self.field_list_parent.clone().into()), | ||
73 | } | ||
74 | } | ||
75 | |||
76 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
77 | self | ||
78 | } | ||
79 | } | ||
80 | |||
81 | #[derive(Debug)] | ||
82 | pub struct MissingPatFields { | ||
83 | pub file: HirFileId, | ||
84 | pub field_list_parent: AstPtr<ast::RecordPat>, | ||
85 | pub field_list_parent_path: Option<AstPtr<ast::Path>>, | ||
86 | pub missed_fields: Vec<Name>, | ||
87 | } | ||
88 | |||
89 | impl Diagnostic for MissingPatFields { | ||
90 | fn message(&self) -> String { | ||
91 | let mut buf = String::from("Missing structure fields:\n"); | ||
92 | for field in &self.missed_fields { | ||
93 | format_to!(buf, "- {}\n", field); | ||
94 | } | ||
95 | buf | ||
96 | } | ||
97 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
98 | InFile { | ||
99 | file_id: self.file, | ||
100 | value: self | ||
101 | .field_list_parent_path | ||
102 | .clone() | ||
103 | .map(SyntaxNodePtr::from) | ||
104 | .unwrap_or_else(|| self.field_list_parent.clone().into()), | ||
105 | } | ||
106 | } | ||
107 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
108 | self | ||
109 | } | ||
110 | } | ||
111 | |||
112 | #[derive(Debug)] | ||
113 | pub struct MissingMatchArms { | ||
114 | pub file: HirFileId, | ||
115 | pub match_expr: AstPtr<ast::Expr>, | ||
116 | pub arms: AstPtr<ast::MatchArmList>, | ||
117 | } | ||
118 | |||
119 | impl Diagnostic for MissingMatchArms { | ||
120 | fn message(&self) -> String { | ||
121 | String::from("Missing match arm") | ||
122 | } | ||
123 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
124 | InFile { file_id: self.file, value: self.match_expr.clone().into() } | ||
125 | } | ||
126 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
127 | self | ||
128 | } | ||
129 | } | ||
130 | |||
131 | #[derive(Debug)] | ||
132 | pub struct MissingOkInTailExpr { | ||
133 | pub file: HirFileId, | ||
134 | pub expr: AstPtr<ast::Expr>, | ||
135 | } | ||
136 | |||
137 | impl Diagnostic for MissingOkInTailExpr { | ||
138 | fn message(&self) -> String { | ||
139 | "wrap return expression in Ok".to_string() | ||
140 | } | ||
141 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
142 | InFile { file_id: self.file, value: self.expr.clone().into() } | ||
143 | } | ||
144 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
145 | self | ||
146 | } | ||
147 | } | ||
148 | |||
149 | #[derive(Debug)] | ||
150 | pub struct BreakOutsideOfLoop { | ||
151 | pub file: HirFileId, | ||
152 | pub expr: AstPtr<ast::Expr>, | ||
153 | } | ||
154 | |||
155 | impl Diagnostic for BreakOutsideOfLoop { | ||
156 | fn message(&self) -> String { | ||
157 | "break outside of loop".to_string() | ||
158 | } | ||
159 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
160 | InFile { file_id: self.file, value: self.expr.clone().into() } | ||
161 | } | ||
162 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
163 | self | ||
164 | } | ||
165 | } | ||
166 | |||
167 | #[derive(Debug)] | ||
168 | pub struct MissingUnsafe { | ||
169 | pub file: HirFileId, | ||
170 | pub expr: AstPtr<ast::Expr>, | ||
171 | } | ||
172 | |||
173 | impl Diagnostic for MissingUnsafe { | ||
174 | fn message(&self) -> String { | ||
175 | format!("This operation is unsafe and requires an unsafe function or block") | ||
176 | } | ||
177 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
178 | InFile { file_id: self.file, value: self.expr.clone().into() } | ||
179 | } | ||
180 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
181 | self | ||
182 | } | ||
183 | } | ||
184 | |||
185 | #[derive(Debug)] | ||
186 | pub struct MismatchedArgCount { | ||
187 | pub file: HirFileId, | ||
188 | pub call_expr: AstPtr<ast::Expr>, | ||
189 | pub expected: usize, | ||
190 | pub found: usize, | ||
191 | } | ||
192 | |||
193 | impl Diagnostic for MismatchedArgCount { | ||
194 | fn message(&self) -> String { | ||
195 | let s = if self.expected == 1 { "" } else { "s" }; | ||
196 | format!("Expected {} argument{}, found {}", self.expected, s, self.found) | ||
197 | } | ||
198 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
199 | InFile { file_id: self.file, value: self.call_expr.clone().into() } | ||
200 | } | ||
201 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
202 | self | ||
203 | } | ||
204 | fn is_experimental(&self) -> bool { | ||
205 | true | ||
206 | } | ||
207 | } | ||
208 | |||
209 | #[cfg(test)] | ||
210 | mod tests { | ||
211 | use base_db::{fixture::WithFixture, FileId, SourceDatabase, SourceDatabaseExt}; | ||
212 | use hir_def::{db::DefDatabase, AssocItemId, ModuleDefId}; | ||
213 | use hir_expand::{ | ||
214 | db::AstDatabase, | ||
215 | diagnostics::{Diagnostic, DiagnosticSinkBuilder}, | ||
216 | }; | ||
217 | use rustc_hash::FxHashMap; | ||
218 | use syntax::{TextRange, TextSize}; | ||
219 | |||
220 | use crate::{diagnostics::validate_body, test_db::TestDB}; | ||
221 | |||
222 | impl TestDB { | ||
223 | fn diagnostics<F: FnMut(&dyn Diagnostic)>(&self, mut cb: F) { | ||
224 | let crate_graph = self.crate_graph(); | ||
225 | for krate in crate_graph.iter() { | ||
226 | let crate_def_map = self.crate_def_map(krate); | ||
227 | |||
228 | let mut fns = Vec::new(); | ||
229 | for (module_id, _) in crate_def_map.modules.iter() { | ||
230 | for decl in crate_def_map[module_id].scope.declarations() { | ||
231 | if let ModuleDefId::FunctionId(f) = decl { | ||
232 | fns.push(f) | ||
233 | } | ||
234 | } | ||
235 | |||
236 | for impl_id in crate_def_map[module_id].scope.impls() { | ||
237 | let impl_data = self.impl_data(impl_id); | ||
238 | for item in impl_data.items.iter() { | ||
239 | if let AssocItemId::FunctionId(f) = item { | ||
240 | fns.push(*f) | ||
241 | } | ||
242 | } | ||
243 | } | ||
244 | } | ||
245 | |||
246 | for f in fns { | ||
247 | let mut sink = DiagnosticSinkBuilder::new().build(&mut cb); | ||
248 | validate_body(self, f.into(), &mut sink); | ||
249 | } | ||
250 | } | ||
251 | } | ||
252 | } | ||
253 | |||
254 | pub(crate) fn check_diagnostics(ra_fixture: &str) { | ||
255 | let db = TestDB::with_files(ra_fixture); | ||
256 | let annotations = db.extract_annotations(); | ||
257 | |||
258 | let mut actual: FxHashMap<FileId, Vec<(TextRange, String)>> = FxHashMap::default(); | ||
259 | db.diagnostics(|d| { | ||
260 | let src = d.display_source(); | ||
261 | let root = db.parse_or_expand(src.file_id).unwrap(); | ||
262 | // FIXME: macros... | ||
263 | let file_id = src.file_id.original_file(&db); | ||
264 | let range = src.value.to_node(&root).text_range(); | ||
265 | let message = d.message().to_owned(); | ||
266 | actual.entry(file_id).or_default().push((range, message)); | ||
267 | }); | ||
268 | |||
269 | for (file_id, diags) in actual.iter_mut() { | ||
270 | diags.sort_by_key(|it| it.0.start()); | ||
271 | let text = db.file_text(*file_id); | ||
272 | // For multiline spans, place them on line start | ||
273 | for (range, content) in diags { | ||
274 | if text[*range].contains('\n') { | ||
275 | *range = TextRange::new(range.start(), range.start() + TextSize::from(1)); | ||
276 | *content = format!("... {}", content); | ||
277 | } | ||
278 | } | ||
279 | } | ||
280 | |||
281 | assert_eq!(annotations, actual); | ||
282 | } | ||
283 | |||
284 | #[test] | ||
285 | fn no_such_field_diagnostics() { | ||
286 | check_diagnostics( | ||
287 | r#" | ||
288 | struct S { foo: i32, bar: () } | ||
289 | impl S { | ||
290 | fn new() -> S { | ||
291 | S { | ||
292 | //^ Missing structure fields: | ||
293 | //| - bar | ||
294 | foo: 92, | ||
295 | baz: 62, | ||
296 | //^^^^^^^ no such field | ||
297 | } | ||
298 | } | ||
299 | } | ||
300 | "#, | ||
301 | ); | ||
302 | } | ||
303 | #[test] | ||
304 | fn no_such_field_with_feature_flag_diagnostics() { | ||
305 | check_diagnostics( | ||
306 | r#" | ||
307 | //- /lib.rs crate:foo cfg:feature=foo | ||
308 | struct MyStruct { | ||
309 | my_val: usize, | ||
310 | #[cfg(feature = "foo")] | ||
311 | bar: bool, | ||
312 | } | ||
313 | |||
314 | impl MyStruct { | ||
315 | #[cfg(feature = "foo")] | ||
316 | pub(crate) fn new(my_val: usize, bar: bool) -> Self { | ||
317 | Self { my_val, bar } | ||
318 | } | ||
319 | #[cfg(not(feature = "foo"))] | ||
320 | pub(crate) fn new(my_val: usize, _bar: bool) -> Self { | ||
321 | Self { my_val } | ||
322 | } | ||
323 | } | ||
324 | "#, | ||
325 | ); | ||
326 | } | ||
327 | |||
328 | #[test] | ||
329 | fn no_such_field_enum_with_feature_flag_diagnostics() { | ||
330 | check_diagnostics( | ||
331 | r#" | ||
332 | //- /lib.rs crate:foo cfg:feature=foo | ||
333 | enum Foo { | ||
334 | #[cfg(not(feature = "foo"))] | ||
335 | Buz, | ||
336 | #[cfg(feature = "foo")] | ||
337 | Bar, | ||
338 | Baz | ||
339 | } | ||
340 | |||
341 | fn test_fn(f: Foo) { | ||
342 | match f { | ||
343 | Foo::Bar => {}, | ||
344 | Foo::Baz => {}, | ||
345 | } | ||
346 | } | ||
347 | "#, | ||
348 | ); | ||
349 | } | ||
350 | |||
351 | #[test] | ||
352 | fn no_such_field_with_feature_flag_diagnostics_on_struct_lit() { | ||
353 | check_diagnostics( | ||
354 | r#" | ||
355 | //- /lib.rs crate:foo cfg:feature=foo | ||
356 | struct S { | ||
357 | #[cfg(feature = "foo")] | ||
358 | foo: u32, | ||
359 | #[cfg(not(feature = "foo"))] | ||
360 | bar: u32, | ||
361 | } | ||
362 | |||
363 | impl S { | ||
364 | #[cfg(feature = "foo")] | ||
365 | fn new(foo: u32) -> Self { | ||
366 | Self { foo } | ||
367 | } | ||
368 | #[cfg(not(feature = "foo"))] | ||
369 | fn new(bar: u32) -> Self { | ||
370 | Self { bar } | ||
371 | } | ||
372 | fn new2(bar: u32) -> Self { | ||
373 | #[cfg(feature = "foo")] | ||
374 | { Self { foo: bar } } | ||
375 | #[cfg(not(feature = "foo"))] | ||
376 | { Self { bar } } | ||
377 | } | ||
378 | fn new2(val: u32) -> Self { | ||
379 | Self { | ||
380 | #[cfg(feature = "foo")] | ||
381 | foo: val, | ||
382 | #[cfg(not(feature = "foo"))] | ||
383 | bar: val, | ||
384 | } | ||
385 | } | ||
386 | } | ||
387 | "#, | ||
388 | ); | ||
389 | } | ||
390 | |||
391 | #[test] | ||
392 | fn no_such_field_with_type_macro() { | ||
393 | check_diagnostics( | ||
394 | r#" | ||
395 | macro_rules! Type { () => { u32 }; } | ||
396 | struct Foo { bar: Type![] } | ||
397 | |||
398 | impl Foo { | ||
399 | fn new() -> Self { | ||
400 | Foo { bar: 0 } | ||
401 | } | ||
402 | } | ||
403 | "#, | ||
404 | ); | ||
405 | } | ||
406 | |||
407 | #[test] | ||
408 | fn missing_record_pat_field_diagnostic() { | ||
409 | check_diagnostics( | ||
410 | r#" | ||
411 | struct S { foo: i32, bar: () } | ||
412 | fn baz(s: S) { | ||
413 | let S { foo: _ } = s; | ||
414 | //^ Missing structure fields: | ||
415 | //| - bar | ||
416 | } | ||
417 | "#, | ||
418 | ); | ||
419 | } | ||
420 | |||
421 | #[test] | ||
422 | fn missing_record_pat_field_no_diagnostic_if_not_exhaustive() { | ||
423 | check_diagnostics( | ||
424 | r" | ||
425 | struct S { foo: i32, bar: () } | ||
426 | fn baz(s: S) -> i32 { | ||
427 | match s { | ||
428 | S { foo, .. } => foo, | ||
429 | } | ||
430 | } | ||
431 | ", | ||
432 | ) | ||
433 | } | ||
434 | |||
435 | #[test] | ||
436 | fn break_outside_of_loop() { | ||
437 | check_diagnostics( | ||
438 | r#" | ||
439 | fn foo() { break; } | ||
440 | //^^^^^ break outside of loop | ||
441 | "#, | ||
442 | ); | ||
443 | } | ||
444 | } | ||
diff --git a/crates/hir_ty/src/diagnostics/expr.rs b/crates/hir_ty/src/diagnostics/expr.rs new file mode 100644 index 000000000..fb76e2e4e --- /dev/null +++ b/crates/hir_ty/src/diagnostics/expr.rs | |||
@@ -0,0 +1,569 @@ | |||
1 | //! FIXME: write short doc here | ||
2 | |||
3 | use std::sync::Arc; | ||
4 | |||
5 | use hir_def::{path::path, resolver::HasResolver, AdtId, DefWithBodyId}; | ||
6 | use hir_expand::diagnostics::DiagnosticSink; | ||
7 | use rustc_hash::FxHashSet; | ||
8 | use syntax::{ast, AstPtr}; | ||
9 | |||
10 | use crate::{ | ||
11 | db::HirDatabase, | ||
12 | diagnostics::{ | ||
13 | match_check::{is_useful, MatchCheckCtx, Matrix, PatStack, Usefulness}, | ||
14 | MismatchedArgCount, MissingFields, MissingMatchArms, MissingOkInTailExpr, MissingPatFields, | ||
15 | }, | ||
16 | utils::variant_data, | ||
17 | ApplicationTy, InferenceResult, Ty, TypeCtor, | ||
18 | }; | ||
19 | |||
20 | pub use hir_def::{ | ||
21 | body::{ | ||
22 | scope::{ExprScopes, ScopeEntry, ScopeId}, | ||
23 | Body, BodySourceMap, ExprPtr, ExprSource, PatPtr, PatSource, | ||
24 | }, | ||
25 | expr::{ | ||
26 | ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp, | ||
27 | MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, UnaryOp, | ||
28 | }, | ||
29 | src::HasSource, | ||
30 | LocalFieldId, Lookup, VariantId, | ||
31 | }; | ||
32 | |||
33 | pub(super) struct ExprValidator<'a, 'b: 'a> { | ||
34 | owner: DefWithBodyId, | ||
35 | infer: Arc<InferenceResult>, | ||
36 | sink: &'a mut DiagnosticSink<'b>, | ||
37 | } | ||
38 | |||
39 | impl<'a, 'b> ExprValidator<'a, 'b> { | ||
40 | pub(super) fn new( | ||
41 | owner: DefWithBodyId, | ||
42 | infer: Arc<InferenceResult>, | ||
43 | sink: &'a mut DiagnosticSink<'b>, | ||
44 | ) -> ExprValidator<'a, 'b> { | ||
45 | ExprValidator { owner, infer, sink } | ||
46 | } | ||
47 | |||
48 | pub(super) fn validate_body(&mut self, db: &dyn HirDatabase) { | ||
49 | let body = db.body(self.owner.into()); | ||
50 | |||
51 | for (id, expr) in body.exprs.iter() { | ||
52 | if let Some((variant_def, missed_fields, true)) = | ||
53 | record_literal_missing_fields(db, &self.infer, id, expr) | ||
54 | { | ||
55 | self.create_record_literal_missing_fields_diagnostic( | ||
56 | id, | ||
57 | db, | ||
58 | variant_def, | ||
59 | missed_fields, | ||
60 | ); | ||
61 | } | ||
62 | |||
63 | match expr { | ||
64 | Expr::Match { expr, arms } => { | ||
65 | self.validate_match(id, *expr, arms, db, self.infer.clone()); | ||
66 | } | ||
67 | Expr::Call { .. } | Expr::MethodCall { .. } => { | ||
68 | self.validate_call(db, id, expr); | ||
69 | } | ||
70 | _ => {} | ||
71 | } | ||
72 | } | ||
73 | for (id, pat) in body.pats.iter() { | ||
74 | if let Some((variant_def, missed_fields, true)) = | ||
75 | record_pattern_missing_fields(db, &self.infer, id, pat) | ||
76 | { | ||
77 | self.create_record_pattern_missing_fields_diagnostic( | ||
78 | id, | ||
79 | db, | ||
80 | variant_def, | ||
81 | missed_fields, | ||
82 | ); | ||
83 | } | ||
84 | } | ||
85 | let body_expr = &body[body.body_expr]; | ||
86 | if let Expr::Block { tail: Some(t), .. } = body_expr { | ||
87 | self.validate_results_in_tail_expr(body.body_expr, *t, db); | ||
88 | } | ||
89 | } | ||
90 | |||
91 | fn create_record_literal_missing_fields_diagnostic( | ||
92 | &mut self, | ||
93 | id: ExprId, | ||
94 | db: &dyn HirDatabase, | ||
95 | variant_def: VariantId, | ||
96 | missed_fields: Vec<LocalFieldId>, | ||
97 | ) { | ||
98 | // XXX: only look at source_map if we do have missing fields | ||
99 | let (_, source_map) = db.body_with_source_map(self.owner.into()); | ||
100 | |||
101 | if let Ok(source_ptr) = source_map.expr_syntax(id) { | ||
102 | let root = source_ptr.file_syntax(db.upcast()); | ||
103 | if let ast::Expr::RecordExpr(record_expr) = &source_ptr.value.to_node(&root) { | ||
104 | if let Some(_) = record_expr.record_expr_field_list() { | ||
105 | let variant_data = variant_data(db.upcast(), variant_def); | ||
106 | let missed_fields = missed_fields | ||
107 | .into_iter() | ||
108 | .map(|idx| variant_data.fields()[idx].name.clone()) | ||
109 | .collect(); | ||
110 | self.sink.push(MissingFields { | ||
111 | file: source_ptr.file_id, | ||
112 | field_list_parent: AstPtr::new(&record_expr), | ||
113 | field_list_parent_path: record_expr.path().map(|path| AstPtr::new(&path)), | ||
114 | missed_fields, | ||
115 | }) | ||
116 | } | ||
117 | } | ||
118 | } | ||
119 | } | ||
120 | |||
121 | fn create_record_pattern_missing_fields_diagnostic( | ||
122 | &mut self, | ||
123 | id: PatId, | ||
124 | db: &dyn HirDatabase, | ||
125 | variant_def: VariantId, | ||
126 | missed_fields: Vec<LocalFieldId>, | ||
127 | ) { | ||
128 | // XXX: only look at source_map if we do have missing fields | ||
129 | let (_, source_map) = db.body_with_source_map(self.owner.into()); | ||
130 | |||
131 | if let Ok(source_ptr) = source_map.pat_syntax(id) { | ||
132 | if let Some(expr) = source_ptr.value.as_ref().left() { | ||
133 | let root = source_ptr.file_syntax(db.upcast()); | ||
134 | if let ast::Pat::RecordPat(record_pat) = expr.to_node(&root) { | ||
135 | if let Some(_) = record_pat.record_pat_field_list() { | ||
136 | let variant_data = variant_data(db.upcast(), variant_def); | ||
137 | let missed_fields = missed_fields | ||
138 | .into_iter() | ||
139 | .map(|idx| variant_data.fields()[idx].name.clone()) | ||
140 | .collect(); | ||
141 | self.sink.push(MissingPatFields { | ||
142 | file: source_ptr.file_id, | ||
143 | field_list_parent: AstPtr::new(&record_pat), | ||
144 | field_list_parent_path: record_pat | ||
145 | .path() | ||
146 | .map(|path| AstPtr::new(&path)), | ||
147 | missed_fields, | ||
148 | }) | ||
149 | } | ||
150 | } | ||
151 | } | ||
152 | } | ||
153 | } | ||
154 | |||
155 | fn validate_call(&mut self, db: &dyn HirDatabase, call_id: ExprId, expr: &Expr) -> Option<()> { | ||
156 | // Check that the number of arguments matches the number of parameters. | ||
157 | |||
158 | // FIXME: Due to shortcomings in the current type system implementation, only emit this | ||
159 | // diagnostic if there are no type mismatches in the containing function. | ||
160 | if self.infer.type_mismatches.iter().next().is_some() { | ||
161 | return Some(()); | ||
162 | } | ||
163 | |||
164 | let is_method_call = matches!(expr, Expr::MethodCall { .. }); | ||
165 | let (sig, args) = match expr { | ||
166 | Expr::Call { callee, args } => { | ||
167 | let callee = &self.infer.type_of_expr[*callee]; | ||
168 | let sig = callee.callable_sig(db)?; | ||
169 | (sig, args.clone()) | ||
170 | } | ||
171 | Expr::MethodCall { receiver, args, .. } => { | ||
172 | let mut args = args.clone(); | ||
173 | args.insert(0, *receiver); | ||
174 | |||
175 | // FIXME: note that we erase information about substs here. This | ||
176 | // is not right, but, luckily, doesn't matter as we care only | ||
177 | // about the number of params | ||
178 | let callee = self.infer.method_resolution(call_id)?; | ||
179 | let sig = db.callable_item_signature(callee.into()).value; | ||
180 | |||
181 | (sig, args) | ||
182 | } | ||
183 | _ => return None, | ||
184 | }; | ||
185 | |||
186 | if sig.is_varargs { | ||
187 | return None; | ||
188 | } | ||
189 | |||
190 | let params = sig.params(); | ||
191 | |||
192 | let mut param_count = params.len(); | ||
193 | let mut arg_count = args.len(); | ||
194 | |||
195 | if arg_count != param_count { | ||
196 | let (_, source_map) = db.body_with_source_map(self.owner.into()); | ||
197 | if let Ok(source_ptr) = source_map.expr_syntax(call_id) { | ||
198 | if is_method_call { | ||
199 | param_count -= 1; | ||
200 | arg_count -= 1; | ||
201 | } | ||
202 | self.sink.push(MismatchedArgCount { | ||
203 | file: source_ptr.file_id, | ||
204 | call_expr: source_ptr.value, | ||
205 | expected: param_count, | ||
206 | found: arg_count, | ||
207 | }); | ||
208 | } | ||
209 | } | ||
210 | |||
211 | None | ||
212 | } | ||
213 | |||
214 | fn validate_match( | ||
215 | &mut self, | ||
216 | id: ExprId, | ||
217 | match_expr: ExprId, | ||
218 | arms: &[MatchArm], | ||
219 | db: &dyn HirDatabase, | ||
220 | infer: Arc<InferenceResult>, | ||
221 | ) { | ||
222 | let (body, source_map): (Arc<Body>, Arc<BodySourceMap>) = | ||
223 | db.body_with_source_map(self.owner.into()); | ||
224 | |||
225 | let match_expr_ty = match infer.type_of_expr.get(match_expr) { | ||
226 | Some(ty) => ty, | ||
227 | // If we can't resolve the type of the match expression | ||
228 | // we cannot perform exhaustiveness checks. | ||
229 | None => return, | ||
230 | }; | ||
231 | |||
232 | let cx = MatchCheckCtx { match_expr, body, infer: infer.clone(), db }; | ||
233 | let pats = arms.iter().map(|arm| arm.pat); | ||
234 | |||
235 | let mut seen = Matrix::empty(); | ||
236 | for pat in pats { | ||
237 | if let Some(pat_ty) = infer.type_of_pat.get(pat) { | ||
238 | // We only include patterns whose type matches the type | ||
239 | // of the match expression. If we had a InvalidMatchArmPattern | ||
240 | // diagnostic or similar we could raise that in an else | ||
241 | // block here. | ||
242 | // | ||
243 | // When comparing the types, we also have to consider that rustc | ||
244 | // will automatically de-reference the match expression type if | ||
245 | // necessary. | ||
246 | // | ||
247 | // FIXME we should use the type checker for this. | ||
248 | if pat_ty == match_expr_ty | ||
249 | || match_expr_ty | ||
250 | .as_reference() | ||
251 | .map(|(match_expr_ty, _)| match_expr_ty == pat_ty) | ||
252 | .unwrap_or(false) | ||
253 | { | ||
254 | // If we had a NotUsefulMatchArm diagnostic, we could | ||
255 | // check the usefulness of each pattern as we added it | ||
256 | // to the matrix here. | ||
257 | let v = PatStack::from_pattern(pat); | ||
258 | seen.push(&cx, v); | ||
259 | continue; | ||
260 | } | ||
261 | } | ||
262 | |||
263 | // If we can't resolve the type of a pattern, or the pattern type doesn't | ||
264 | // fit the match expression, we skip this diagnostic. Skipping the entire | ||
265 | // diagnostic rather than just not including this match arm is preferred | ||
266 | // to avoid the chance of false positives. | ||
267 | return; | ||
268 | } | ||
269 | |||
270 | match is_useful(&cx, &seen, &PatStack::from_wild()) { | ||
271 | Ok(Usefulness::Useful) => (), | ||
272 | // if a wildcard pattern is not useful, then all patterns are covered | ||
273 | Ok(Usefulness::NotUseful) => return, | ||
274 | // this path is for unimplemented checks, so we err on the side of not | ||
275 | // reporting any errors | ||
276 | _ => return, | ||
277 | } | ||
278 | |||
279 | if let Ok(source_ptr) = source_map.expr_syntax(id) { | ||
280 | let root = source_ptr.file_syntax(db.upcast()); | ||
281 | if let ast::Expr::MatchExpr(match_expr) = &source_ptr.value.to_node(&root) { | ||
282 | if let (Some(match_expr), Some(arms)) = | ||
283 | (match_expr.expr(), match_expr.match_arm_list()) | ||
284 | { | ||
285 | self.sink.push(MissingMatchArms { | ||
286 | file: source_ptr.file_id, | ||
287 | match_expr: AstPtr::new(&match_expr), | ||
288 | arms: AstPtr::new(&arms), | ||
289 | }) | ||
290 | } | ||
291 | } | ||
292 | } | ||
293 | } | ||
294 | |||
295 | fn validate_results_in_tail_expr(&mut self, body_id: ExprId, id: ExprId, db: &dyn HirDatabase) { | ||
296 | // the mismatch will be on the whole block currently | ||
297 | let mismatch = match self.infer.type_mismatch_for_expr(body_id) { | ||
298 | Some(m) => m, | ||
299 | None => return, | ||
300 | }; | ||
301 | |||
302 | let core_result_path = path![core::result::Result]; | ||
303 | |||
304 | let resolver = self.owner.resolver(db.upcast()); | ||
305 | let core_result_enum = match resolver.resolve_known_enum(db.upcast(), &core_result_path) { | ||
306 | Some(it) => it, | ||
307 | _ => return, | ||
308 | }; | ||
309 | |||
310 | let core_result_ctor = TypeCtor::Adt(AdtId::EnumId(core_result_enum)); | ||
311 | let params = match &mismatch.expected { | ||
312 | Ty::Apply(ApplicationTy { ctor, parameters }) if ctor == &core_result_ctor => { | ||
313 | parameters | ||
314 | } | ||
315 | _ => return, | ||
316 | }; | ||
317 | |||
318 | if params.len() == 2 && params[0] == mismatch.actual { | ||
319 | let (_, source_map) = db.body_with_source_map(self.owner.into()); | ||
320 | |||
321 | if let Ok(source_ptr) = source_map.expr_syntax(id) { | ||
322 | self.sink | ||
323 | .push(MissingOkInTailExpr { file: source_ptr.file_id, expr: source_ptr.value }); | ||
324 | } | ||
325 | } | ||
326 | } | ||
327 | } | ||
328 | |||
329 | pub fn record_literal_missing_fields( | ||
330 | db: &dyn HirDatabase, | ||
331 | infer: &InferenceResult, | ||
332 | id: ExprId, | ||
333 | expr: &Expr, | ||
334 | ) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> { | ||
335 | let (fields, exhausitve) = match expr { | ||
336 | Expr::RecordLit { path: _, fields, spread } => (fields, spread.is_none()), | ||
337 | _ => return None, | ||
338 | }; | ||
339 | |||
340 | let variant_def = infer.variant_resolution_for_expr(id)?; | ||
341 | if let VariantId::UnionId(_) = variant_def { | ||
342 | return None; | ||
343 | } | ||
344 | |||
345 | let variant_data = variant_data(db.upcast(), variant_def); | ||
346 | |||
347 | let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect(); | ||
348 | let missed_fields: Vec<LocalFieldId> = variant_data | ||
349 | .fields() | ||
350 | .iter() | ||
351 | .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) }) | ||
352 | .collect(); | ||
353 | if missed_fields.is_empty() { | ||
354 | return None; | ||
355 | } | ||
356 | Some((variant_def, missed_fields, exhausitve)) | ||
357 | } | ||
358 | |||
359 | pub fn record_pattern_missing_fields( | ||
360 | db: &dyn HirDatabase, | ||
361 | infer: &InferenceResult, | ||
362 | id: PatId, | ||
363 | pat: &Pat, | ||
364 | ) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> { | ||
365 | let (fields, exhaustive) = match pat { | ||
366 | Pat::Record { path: _, args, ellipsis } => (args, !ellipsis), | ||
367 | _ => return None, | ||
368 | }; | ||
369 | |||
370 | let variant_def = infer.variant_resolution_for_pat(id)?; | ||
371 | if let VariantId::UnionId(_) = variant_def { | ||
372 | return None; | ||
373 | } | ||
374 | |||
375 | let variant_data = variant_data(db.upcast(), variant_def); | ||
376 | |||
377 | let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect(); | ||
378 | let missed_fields: Vec<LocalFieldId> = variant_data | ||
379 | .fields() | ||
380 | .iter() | ||
381 | .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) }) | ||
382 | .collect(); | ||
383 | if missed_fields.is_empty() { | ||
384 | return None; | ||
385 | } | ||
386 | Some((variant_def, missed_fields, exhaustive)) | ||
387 | } | ||
388 | |||
389 | #[cfg(test)] | ||
390 | mod tests { | ||
391 | use crate::diagnostics::tests::check_diagnostics; | ||
392 | |||
393 | #[test] | ||
394 | fn simple_free_fn_zero() { | ||
395 | check_diagnostics( | ||
396 | r#" | ||
397 | fn zero() {} | ||
398 | fn f() { zero(1); } | ||
399 | //^^^^^^^ Expected 0 arguments, found 1 | ||
400 | "#, | ||
401 | ); | ||
402 | |||
403 | check_diagnostics( | ||
404 | r#" | ||
405 | fn zero() {} | ||
406 | fn f() { zero(); } | ||
407 | "#, | ||
408 | ); | ||
409 | } | ||
410 | |||
411 | #[test] | ||
412 | fn simple_free_fn_one() { | ||
413 | check_diagnostics( | ||
414 | r#" | ||
415 | fn one(arg: u8) {} | ||
416 | fn f() { one(); } | ||
417 | //^^^^^ Expected 1 argument, found 0 | ||
418 | "#, | ||
419 | ); | ||
420 | |||
421 | check_diagnostics( | ||
422 | r#" | ||
423 | fn one(arg: u8) {} | ||
424 | fn f() { one(1); } | ||
425 | "#, | ||
426 | ); | ||
427 | } | ||
428 | |||
429 | #[test] | ||
430 | fn method_as_fn() { | ||
431 | check_diagnostics( | ||
432 | r#" | ||
433 | struct S; | ||
434 | impl S { fn method(&self) {} } | ||
435 | |||
436 | fn f() { | ||
437 | S::method(); | ||
438 | } //^^^^^^^^^^^ Expected 1 argument, found 0 | ||
439 | "#, | ||
440 | ); | ||
441 | |||
442 | check_diagnostics( | ||
443 | r#" | ||
444 | struct S; | ||
445 | impl S { fn method(&self) {} } | ||
446 | |||
447 | fn f() { | ||
448 | S::method(&S); | ||
449 | S.method(); | ||
450 | } | ||
451 | "#, | ||
452 | ); | ||
453 | } | ||
454 | |||
455 | #[test] | ||
456 | fn method_with_arg() { | ||
457 | check_diagnostics( | ||
458 | r#" | ||
459 | struct S; | ||
460 | impl S { fn method(&self, arg: u8) {} } | ||
461 | |||
462 | fn f() { | ||
463 | S.method(); | ||
464 | } //^^^^^^^^^^ Expected 1 argument, found 0 | ||
465 | "#, | ||
466 | ); | ||
467 | |||
468 | check_diagnostics( | ||
469 | r#" | ||
470 | struct S; | ||
471 | impl S { fn method(&self, arg: u8) {} } | ||
472 | |||
473 | fn f() { | ||
474 | S::method(&S, 0); | ||
475 | S.method(1); | ||
476 | } | ||
477 | "#, | ||
478 | ); | ||
479 | } | ||
480 | |||
481 | #[test] | ||
482 | fn tuple_struct() { | ||
483 | check_diagnostics( | ||
484 | r#" | ||
485 | struct Tup(u8, u16); | ||
486 | fn f() { | ||
487 | Tup(0); | ||
488 | } //^^^^^^ Expected 2 arguments, found 1 | ||
489 | "#, | ||
490 | ) | ||
491 | } | ||
492 | |||
493 | #[test] | ||
494 | fn enum_variant() { | ||
495 | check_diagnostics( | ||
496 | r#" | ||
497 | enum En { Variant(u8, u16), } | ||
498 | fn f() { | ||
499 | En::Variant(0); | ||
500 | } //^^^^^^^^^^^^^^ Expected 2 arguments, found 1 | ||
501 | "#, | ||
502 | ) | ||
503 | } | ||
504 | |||
505 | #[test] | ||
506 | fn enum_variant_type_macro() { | ||
507 | check_diagnostics( | ||
508 | r#" | ||
509 | macro_rules! Type { | ||
510 | () => { u32 }; | ||
511 | } | ||
512 | enum Foo { | ||
513 | Bar(Type![]) | ||
514 | } | ||
515 | impl Foo { | ||
516 | fn new() { | ||
517 | Foo::Bar(0); | ||
518 | Foo::Bar(0, 1); | ||
519 | //^^^^^^^^^^^^^^ Expected 1 argument, found 2 | ||
520 | Foo::Bar(); | ||
521 | //^^^^^^^^^^ Expected 1 argument, found 0 | ||
522 | } | ||
523 | } | ||
524 | "#, | ||
525 | ); | ||
526 | } | ||
527 | |||
528 | #[test] | ||
529 | fn varargs() { | ||
530 | check_diagnostics( | ||
531 | r#" | ||
532 | extern "C" { | ||
533 | fn fixed(fixed: u8); | ||
534 | fn varargs(fixed: u8, ...); | ||
535 | fn varargs2(...); | ||
536 | } | ||
537 | |||
538 | fn f() { | ||
539 | unsafe { | ||
540 | fixed(0); | ||
541 | fixed(0, 1); | ||
542 | //^^^^^^^^^^^ Expected 1 argument, found 2 | ||
543 | varargs(0); | ||
544 | varargs(0, 1); | ||
545 | varargs2(); | ||
546 | varargs2(0); | ||
547 | varargs2(0, 1); | ||
548 | } | ||
549 | } | ||
550 | "#, | ||
551 | ) | ||
552 | } | ||
553 | |||
554 | #[test] | ||
555 | fn arg_count_lambda() { | ||
556 | check_diagnostics( | ||
557 | r#" | ||
558 | fn main() { | ||
559 | let f = |()| (); | ||
560 | f(); | ||
561 | //^^^ Expected 1 argument, found 0 | ||
562 | f(()); | ||
563 | f((), ()); | ||
564 | //^^^^^^^^^ Expected 1 argument, found 2 | ||
565 | } | ||
566 | "#, | ||
567 | ) | ||
568 | } | ||
569 | } | ||
diff --git a/crates/hir_ty/src/diagnostics/match_check.rs b/crates/hir_ty/src/diagnostics/match_check.rs new file mode 100644 index 000000000..7f007f1d6 --- /dev/null +++ b/crates/hir_ty/src/diagnostics/match_check.rs | |||
@@ -0,0 +1,1421 @@ | |||
1 | //! This module implements match statement exhaustiveness checking and usefulness checking | ||
2 | //! for match arms. | ||
3 | //! | ||
4 | //! It is modeled on the rustc module `librustc_mir_build::hair::pattern::_match`, which | ||
5 | //! contains very detailed documentation about the algorithms used here. I've duplicated | ||
6 | //! most of that documentation below. | ||
7 | //! | ||
8 | //! This file includes the logic for exhaustiveness and usefulness checking for | ||
9 | //! pattern-matching. Specifically, given a list of patterns for a type, we can | ||
10 | //! tell whether: | ||
11 | //! - (a) the patterns cover every possible constructor for the type (exhaustiveness). | ||
12 | //! - (b) each pattern is necessary (usefulness). | ||
13 | //! | ||
14 | //! The algorithm implemented here is a modified version of the one described in | ||
15 | //! <http://moscova.inria.fr/~maranget/papers/warn/index.html>. | ||
16 | //! However, to save future implementors from reading the original paper, we | ||
17 | //! summarise the algorithm here to hopefully save time and be a little clearer | ||
18 | //! (without being so rigorous). | ||
19 | //! | ||
20 | //! The core of the algorithm revolves about a "usefulness" check. In particular, we | ||
21 | //! are trying to compute a predicate `U(P, p)` where `P` is a list of patterns (we refer to this as | ||
22 | //! a matrix). `U(P, p)` represents whether, given an existing list of patterns | ||
23 | //! `P_1 ..= P_m`, adding a new pattern `p` will be "useful" (that is, cover previously- | ||
24 | //! uncovered values of the type). | ||
25 | //! | ||
26 | //! If we have this predicate, then we can easily compute both exhaustiveness of an | ||
27 | //! entire set of patterns and the individual usefulness of each one. | ||
28 | //! (a) the set of patterns is exhaustive iff `U(P, _)` is false (i.e., adding a wildcard | ||
29 | //! match doesn't increase the number of values we're matching) | ||
30 | //! (b) a pattern `P_i` is not useful if `U(P[0..=(i-1), P_i)` is false (i.e., adding a | ||
31 | //! pattern to those that have come before it doesn't increase the number of values | ||
32 | //! we're matching). | ||
33 | //! | ||
34 | //! During the course of the algorithm, the rows of the matrix won't just be individual patterns, | ||
35 | //! but rather partially-deconstructed patterns in the form of a list of patterns. The paper | ||
36 | //! calls those pattern-vectors, and we will call them pattern-stacks. The same holds for the | ||
37 | //! new pattern `p`. | ||
38 | //! | ||
39 | //! For example, say we have the following: | ||
40 | //! | ||
41 | //! ```ignore | ||
42 | //! // x: (Option<bool>, Result<()>) | ||
43 | //! match x { | ||
44 | //! (Some(true), _) => (), | ||
45 | //! (None, Err(())) => (), | ||
46 | //! (None, Err(_)) => (), | ||
47 | //! } | ||
48 | //! ``` | ||
49 | //! | ||
50 | //! Here, the matrix `P` starts as: | ||
51 | //! | ||
52 | //! ```text | ||
53 | //! [ | ||
54 | //! [(Some(true), _)], | ||
55 | //! [(None, Err(()))], | ||
56 | //! [(None, Err(_))], | ||
57 | //! ] | ||
58 | //! ``` | ||
59 | //! | ||
60 | //! We can tell it's not exhaustive, because `U(P, _)` is true (we're not covering | ||
61 | //! `[(Some(false), _)]`, for instance). In addition, row 3 is not useful, because | ||
62 | //! all the values it covers are already covered by row 2. | ||
63 | //! | ||
64 | //! A list of patterns can be thought of as a stack, because we are mainly interested in the top of | ||
65 | //! the stack at any given point, and we can pop or apply constructors to get new pattern-stacks. | ||
66 | //! To match the paper, the top of the stack is at the beginning / on the left. | ||
67 | //! | ||
68 | //! There are two important operations on pattern-stacks necessary to understand the algorithm: | ||
69 | //! | ||
70 | //! 1. We can pop a given constructor off the top of a stack. This operation is called | ||
71 | //! `specialize`, and is denoted `S(c, p)` where `c` is a constructor (like `Some` or | ||
72 | //! `None`) and `p` a pattern-stack. | ||
73 | //! If the pattern on top of the stack can cover `c`, this removes the constructor and | ||
74 | //! pushes its arguments onto the stack. It also expands OR-patterns into distinct patterns. | ||
75 | //! Otherwise the pattern-stack is discarded. | ||
76 | //! This essentially filters those pattern-stacks whose top covers the constructor `c` and | ||
77 | //! discards the others. | ||
78 | //! | ||
79 | //! For example, the first pattern above initially gives a stack `[(Some(true), _)]`. If we | ||
80 | //! pop the tuple constructor, we are left with `[Some(true), _]`, and if we then pop the | ||
81 | //! `Some` constructor we get `[true, _]`. If we had popped `None` instead, we would get | ||
82 | //! nothing back. | ||
83 | //! | ||
84 | //! This returns zero or more new pattern-stacks, as follows. We look at the pattern `p_1` | ||
85 | //! on top of the stack, and we have four cases: | ||
86 | //! | ||
87 | //! * 1.1. `p_1 = c(r_1, .., r_a)`, i.e. the top of the stack has constructor `c`. We push onto | ||
88 | //! the stack the arguments of this constructor, and return the result: | ||
89 | //! | ||
90 | //! r_1, .., r_a, p_2, .., p_n | ||
91 | //! | ||
92 | //! * 1.2. `p_1 = c'(r_1, .., r_a')` where `c ≠c'`. We discard the current stack and return | ||
93 | //! nothing. | ||
94 | //! * 1.3. `p_1 = _`. We push onto the stack as many wildcards as the constructor `c` has | ||
95 | //! arguments (its arity), and return the resulting stack: | ||
96 | //! | ||
97 | //! _, .., _, p_2, .., p_n | ||
98 | //! | ||
99 | //! * 1.4. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting stack: | ||
100 | //! | ||
101 | //! S(c, (r_1, p_2, .., p_n)) | ||
102 | //! S(c, (r_2, p_2, .., p_n)) | ||
103 | //! | ||
104 | //! 2. We can pop a wildcard off the top of the stack. This is called `D(p)`, where `p` is | ||
105 | //! a pattern-stack. | ||
106 | //! This is used when we know there are missing constructor cases, but there might be | ||
107 | //! existing wildcard patterns, so to check the usefulness of the matrix, we have to check | ||
108 | //! all its *other* components. | ||
109 | //! | ||
110 | //! It is computed as follows. We look at the pattern `p_1` on top of the stack, | ||
111 | //! and we have three cases: | ||
112 | //! * 1.1. `p_1 = c(r_1, .., r_a)`. We discard the current stack and return nothing. | ||
113 | //! * 1.2. `p_1 = _`. We return the rest of the stack: | ||
114 | //! | ||
115 | //! p_2, .., p_n | ||
116 | //! | ||
117 | //! * 1.3. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting stack: | ||
118 | //! | ||
119 | //! D((r_1, p_2, .., p_n)) | ||
120 | //! D((r_2, p_2, .., p_n)) | ||
121 | //! | ||
122 | //! Note that the OR-patterns are not always used directly in Rust, but are used to derive the | ||
123 | //! exhaustive integer matching rules, so they're written here for posterity. | ||
124 | //! | ||
125 | //! Both those operations extend straightforwardly to a list or pattern-stacks, i.e. a matrix, by | ||
126 | //! working row-by-row. Popping a constructor ends up keeping only the matrix rows that start with | ||
127 | //! the given constructor, and popping a wildcard keeps those rows that start with a wildcard. | ||
128 | //! | ||
129 | //! | ||
130 | //! The algorithm for computing `U` | ||
131 | //! ------------------------------- | ||
132 | //! The algorithm is inductive (on the number of columns: i.e., components of tuple patterns). | ||
133 | //! That means we're going to check the components from left-to-right, so the algorithm | ||
134 | //! operates principally on the first component of the matrix and new pattern-stack `p`. | ||
135 | //! This algorithm is realised in the `is_useful` function. | ||
136 | //! | ||
137 | //! Base case (`n = 0`, i.e., an empty tuple pattern): | ||
138 | //! - If `P` already contains an empty pattern (i.e., if the number of patterns `m > 0`), then | ||
139 | //! `U(P, p)` is false. | ||
140 | //! - Otherwise, `P` must be empty, so `U(P, p)` is true. | ||
141 | //! | ||
142 | //! Inductive step (`n > 0`, i.e., whether there's at least one column [which may then be expanded | ||
143 | //! into further columns later]). We're going to match on the top of the new pattern-stack, `p_1`: | ||
144 | //! | ||
145 | //! - If `p_1 == c(r_1, .., r_a)`, i.e. we have a constructor pattern. | ||
146 | //! Then, the usefulness of `p_1` can be reduced to whether it is useful when | ||
147 | //! we ignore all the patterns in the first column of `P` that involve other constructors. | ||
148 | //! This is where `S(c, P)` comes in: | ||
149 | //! | ||
150 | //! ```text | ||
151 | //! U(P, p) := U(S(c, P), S(c, p)) | ||
152 | //! ``` | ||
153 | //! | ||
154 | //! This special case is handled in `is_useful_specialized`. | ||
155 | //! | ||
156 | //! For example, if `P` is: | ||
157 | //! | ||
158 | //! ```text | ||
159 | //! [ | ||
160 | //! [Some(true), _], | ||
161 | //! [None, 0], | ||
162 | //! ] | ||
163 | //! ``` | ||
164 | //! | ||
165 | //! and `p` is `[Some(false), 0]`, then we don't care about row 2 since we know `p` only | ||
166 | //! matches values that row 2 doesn't. For row 1 however, we need to dig into the | ||
167 | //! arguments of `Some` to know whether some new value is covered. So we compute | ||
168 | //! `U([[true, _]], [false, 0])`. | ||
169 | //! | ||
170 | //! - If `p_1 == _`, then we look at the list of constructors that appear in the first component of | ||
171 | //! the rows of `P`: | ||
172 | //! - If there are some constructors that aren't present, then we might think that the | ||
173 | //! wildcard `_` is useful, since it covers those constructors that weren't covered | ||
174 | //! before. | ||
175 | //! That's almost correct, but only works if there were no wildcards in those first | ||
176 | //! components. So we need to check that `p` is useful with respect to the rows that | ||
177 | //! start with a wildcard, if there are any. This is where `D` comes in: | ||
178 | //! `U(P, p) := U(D(P), D(p))` | ||
179 | //! | ||
180 | //! For example, if `P` is: | ||
181 | //! ```text | ||
182 | //! [ | ||
183 | //! [_, true, _], | ||
184 | //! [None, false, 1], | ||
185 | //! ] | ||
186 | //! ``` | ||
187 | //! and `p` is `[_, false, _]`, the `Some` constructor doesn't appear in `P`. So if we | ||
188 | //! only had row 2, we'd know that `p` is useful. However row 1 starts with a | ||
189 | //! wildcard, so we need to check whether `U([[true, _]], [false, 1])`. | ||
190 | //! | ||
191 | //! - Otherwise, all possible constructors (for the relevant type) are present. In this | ||
192 | //! case we must check whether the wildcard pattern covers any unmatched value. For | ||
193 | //! that, we can think of the `_` pattern as a big OR-pattern that covers all | ||
194 | //! possible constructors. For `Option`, that would mean `_ = None | Some(_)` for | ||
195 | //! example. The wildcard pattern is useful in this case if it is useful when | ||
196 | //! specialized to one of the possible constructors. So we compute: | ||
197 | //! `U(P, p) := ∃(k ϵ constructors) U(S(k, P), S(k, p))` | ||
198 | //! | ||
199 | //! For example, if `P` is: | ||
200 | //! ```text | ||
201 | //! [ | ||
202 | //! [Some(true), _], | ||
203 | //! [None, false], | ||
204 | //! ] | ||
205 | //! ``` | ||
206 | //! and `p` is `[_, false]`, both `None` and `Some` constructors appear in the first | ||
207 | //! components of `P`. We will therefore try popping both constructors in turn: we | ||
208 | //! compute `U([[true, _]], [_, false])` for the `Some` constructor, and `U([[false]], | ||
209 | //! [false])` for the `None` constructor. The first case returns true, so we know that | ||
210 | //! `p` is useful for `P`. Indeed, it matches `[Some(false), _]` that wasn't matched | ||
211 | //! before. | ||
212 | //! | ||
213 | //! - If `p_1 == r_1 | r_2`, then the usefulness depends on each `r_i` separately: | ||
214 | //! | ||
215 | //! ```text | ||
216 | //! U(P, p) := U(P, (r_1, p_2, .., p_n)) | ||
217 | //! || U(P, (r_2, p_2, .., p_n)) | ||
218 | //! ``` | ||
219 | use std::sync::Arc; | ||
220 | |||
221 | use arena::Idx; | ||
222 | use hir_def::{ | ||
223 | adt::VariantData, | ||
224 | body::Body, | ||
225 | expr::{Expr, Literal, Pat, PatId}, | ||
226 | AdtId, EnumVariantId, VariantId, | ||
227 | }; | ||
228 | use smallvec::{smallvec, SmallVec}; | ||
229 | |||
230 | use crate::{db::HirDatabase, ApplicationTy, InferenceResult, Ty, TypeCtor}; | ||
231 | |||
232 | #[derive(Debug, Clone, Copy)] | ||
233 | /// Either a pattern from the source code being analyzed, represented as | ||
234 | /// as `PatId`, or a `Wild` pattern which is created as an intermediate | ||
235 | /// step in the match checking algorithm and thus is not backed by a | ||
236 | /// real `PatId`. | ||
237 | /// | ||
238 | /// Note that it is totally valid for the `PatId` variant to contain | ||
239 | /// a `PatId` which resolves to a `Wild` pattern, if that wild pattern | ||
240 | /// exists in the source code being analyzed. | ||
241 | enum PatIdOrWild { | ||
242 | PatId(PatId), | ||
243 | Wild, | ||
244 | } | ||
245 | |||
246 | impl PatIdOrWild { | ||
247 | fn as_pat(self, cx: &MatchCheckCtx) -> Pat { | ||
248 | match self { | ||
249 | PatIdOrWild::PatId(id) => cx.body.pats[id].clone(), | ||
250 | PatIdOrWild::Wild => Pat::Wild, | ||
251 | } | ||
252 | } | ||
253 | |||
254 | fn as_id(self) -> Option<PatId> { | ||
255 | match self { | ||
256 | PatIdOrWild::PatId(id) => Some(id), | ||
257 | PatIdOrWild::Wild => None, | ||
258 | } | ||
259 | } | ||
260 | } | ||
261 | |||
262 | impl From<PatId> for PatIdOrWild { | ||
263 | fn from(pat_id: PatId) -> Self { | ||
264 | Self::PatId(pat_id) | ||
265 | } | ||
266 | } | ||
267 | |||
268 | impl From<&PatId> for PatIdOrWild { | ||
269 | fn from(pat_id: &PatId) -> Self { | ||
270 | Self::PatId(*pat_id) | ||
271 | } | ||
272 | } | ||
273 | |||
274 | #[derive(Debug, Clone, Copy, PartialEq)] | ||
275 | pub(super) enum MatchCheckErr { | ||
276 | NotImplemented, | ||
277 | MalformedMatchArm, | ||
278 | /// Used when type inference cannot resolve the type of | ||
279 | /// a pattern or expression. | ||
280 | Unknown, | ||
281 | } | ||
282 | |||
283 | /// The return type of `is_useful` is either an indication of usefulness | ||
284 | /// of the match arm, or an error in the case the match statement | ||
285 | /// is made up of types for which exhaustiveness checking is currently | ||
286 | /// not completely implemented. | ||
287 | /// | ||
288 | /// The `std::result::Result` type is used here rather than a custom enum | ||
289 | /// to allow the use of `?`. | ||
290 | pub(super) type MatchCheckResult<T> = Result<T, MatchCheckErr>; | ||
291 | |||
292 | #[derive(Debug)] | ||
293 | /// A row in a Matrix. | ||
294 | /// | ||
295 | /// This type is modeled from the struct of the same name in `rustc`. | ||
296 | pub(super) struct PatStack(PatStackInner); | ||
297 | type PatStackInner = SmallVec<[PatIdOrWild; 2]>; | ||
298 | |||
299 | impl PatStack { | ||
300 | pub(super) fn from_pattern(pat_id: PatId) -> PatStack { | ||
301 | Self(smallvec!(pat_id.into())) | ||
302 | } | ||
303 | |||
304 | pub(super) fn from_wild() -> PatStack { | ||
305 | Self(smallvec!(PatIdOrWild::Wild)) | ||
306 | } | ||
307 | |||
308 | fn from_slice(slice: &[PatIdOrWild]) -> PatStack { | ||
309 | Self(SmallVec::from_slice(slice)) | ||
310 | } | ||
311 | |||
312 | fn from_vec(v: PatStackInner) -> PatStack { | ||
313 | Self(v) | ||
314 | } | ||
315 | |||
316 | fn get_head(&self) -> Option<PatIdOrWild> { | ||
317 | self.0.first().copied() | ||
318 | } | ||
319 | |||
320 | fn tail(&self) -> &[PatIdOrWild] { | ||
321 | self.0.get(1..).unwrap_or(&[]) | ||
322 | } | ||
323 | |||
324 | fn to_tail(&self) -> PatStack { | ||
325 | Self::from_slice(self.tail()) | ||
326 | } | ||
327 | |||
328 | fn replace_head_with<I, T>(&self, pats: I) -> PatStack | ||
329 | where | ||
330 | I: Iterator<Item = T>, | ||
331 | T: Into<PatIdOrWild>, | ||
332 | { | ||
333 | let mut patterns: PatStackInner = smallvec![]; | ||
334 | for pat in pats { | ||
335 | patterns.push(pat.into()); | ||
336 | } | ||
337 | for pat in &self.0[1..] { | ||
338 | patterns.push(*pat); | ||
339 | } | ||
340 | PatStack::from_vec(patterns) | ||
341 | } | ||
342 | |||
343 | /// Computes `D(self)`. | ||
344 | /// | ||
345 | /// See the module docs and the associated documentation in rustc for details. | ||
346 | fn specialize_wildcard(&self, cx: &MatchCheckCtx) -> Option<PatStack> { | ||
347 | if matches!(self.get_head()?.as_pat(cx), Pat::Wild) { | ||
348 | Some(self.to_tail()) | ||
349 | } else { | ||
350 | None | ||
351 | } | ||
352 | } | ||
353 | |||
354 | /// Computes `S(constructor, self)`. | ||
355 | /// | ||
356 | /// See the module docs and the associated documentation in rustc for details. | ||
357 | fn specialize_constructor( | ||
358 | &self, | ||
359 | cx: &MatchCheckCtx, | ||
360 | constructor: &Constructor, | ||
361 | ) -> MatchCheckResult<Option<PatStack>> { | ||
362 | let head = match self.get_head() { | ||
363 | Some(head) => head, | ||
364 | None => return Ok(None), | ||
365 | }; | ||
366 | |||
367 | let head_pat = head.as_pat(cx); | ||
368 | let result = match (head_pat, constructor) { | ||
369 | (Pat::Tuple { args: ref pat_ids, ellipsis }, Constructor::Tuple { arity: _ }) => { | ||
370 | if ellipsis.is_some() { | ||
371 | // If there are ellipsis here, we should add the correct number of | ||
372 | // Pat::Wild patterns to `pat_ids`. We should be able to use the | ||
373 | // constructors arity for this, but at the time of writing we aren't | ||
374 | // correctly calculating this arity when ellipsis are present. | ||
375 | return Err(MatchCheckErr::NotImplemented); | ||
376 | } | ||
377 | |||
378 | Some(self.replace_head_with(pat_ids.iter())) | ||
379 | } | ||
380 | (Pat::Lit(lit_expr), Constructor::Bool(constructor_val)) => { | ||
381 | match cx.body.exprs[lit_expr] { | ||
382 | Expr::Literal(Literal::Bool(pat_val)) if *constructor_val == pat_val => { | ||
383 | Some(self.to_tail()) | ||
384 | } | ||
385 | // it was a bool but the value doesn't match | ||
386 | Expr::Literal(Literal::Bool(_)) => None, | ||
387 | // perhaps this is actually unreachable given we have | ||
388 | // already checked that these match arms have the appropriate type? | ||
389 | _ => return Err(MatchCheckErr::NotImplemented), | ||
390 | } | ||
391 | } | ||
392 | (Pat::Wild, constructor) => Some(self.expand_wildcard(cx, constructor)?), | ||
393 | (Pat::Path(_), Constructor::Enum(constructor)) => { | ||
394 | // unit enum variants become `Pat::Path` | ||
395 | let pat_id = head.as_id().expect("we know this isn't a wild"); | ||
396 | if !enum_variant_matches(cx, pat_id, *constructor) { | ||
397 | None | ||
398 | } else { | ||
399 | Some(self.to_tail()) | ||
400 | } | ||
401 | } | ||
402 | ( | ||
403 | Pat::TupleStruct { args: ref pat_ids, ellipsis, .. }, | ||
404 | Constructor::Enum(enum_constructor), | ||
405 | ) => { | ||
406 | let pat_id = head.as_id().expect("we know this isn't a wild"); | ||
407 | if !enum_variant_matches(cx, pat_id, *enum_constructor) { | ||
408 | None | ||
409 | } else { | ||
410 | let constructor_arity = constructor.arity(cx)?; | ||
411 | if let Some(ellipsis_position) = ellipsis { | ||
412 | // If there are ellipsis in the pattern, the ellipsis must take the place | ||
413 | // of at least one sub-pattern, so `pat_ids` should be smaller than the | ||
414 | // constructor arity. | ||
415 | if pat_ids.len() < constructor_arity { | ||
416 | let mut new_patterns: Vec<PatIdOrWild> = vec![]; | ||
417 | |||
418 | for pat_id in &pat_ids[0..ellipsis_position] { | ||
419 | new_patterns.push((*pat_id).into()); | ||
420 | } | ||
421 | |||
422 | for _ in 0..(constructor_arity - pat_ids.len()) { | ||
423 | new_patterns.push(PatIdOrWild::Wild); | ||
424 | } | ||
425 | |||
426 | for pat_id in &pat_ids[ellipsis_position..pat_ids.len()] { | ||
427 | new_patterns.push((*pat_id).into()); | ||
428 | } | ||
429 | |||
430 | Some(self.replace_head_with(new_patterns.into_iter())) | ||
431 | } else { | ||
432 | return Err(MatchCheckErr::MalformedMatchArm); | ||
433 | } | ||
434 | } else { | ||
435 | // If there is no ellipsis in the tuple pattern, the number | ||
436 | // of patterns must equal the constructor arity. | ||
437 | if pat_ids.len() == constructor_arity { | ||
438 | Some(self.replace_head_with(pat_ids.into_iter())) | ||
439 | } else { | ||
440 | return Err(MatchCheckErr::MalformedMatchArm); | ||
441 | } | ||
442 | } | ||
443 | } | ||
444 | } | ||
445 | (Pat::Record { args: ref arg_patterns, .. }, Constructor::Enum(e)) => { | ||
446 | let pat_id = head.as_id().expect("we know this isn't a wild"); | ||
447 | if !enum_variant_matches(cx, pat_id, *e) { | ||
448 | None | ||
449 | } else { | ||
450 | match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() { | ||
451 | VariantData::Record(struct_field_arena) => { | ||
452 | // Here we treat any missing fields in the record as the wild pattern, as | ||
453 | // if the record has ellipsis. We want to do this here even if the | ||
454 | // record does not contain ellipsis, because it allows us to continue | ||
455 | // enforcing exhaustiveness for the rest of the match statement. | ||
456 | // | ||
457 | // Creating the diagnostic for the missing field in the pattern | ||
458 | // should be done in a different diagnostic. | ||
459 | let patterns = struct_field_arena.iter().map(|(_, struct_field)| { | ||
460 | arg_patterns | ||
461 | .iter() | ||
462 | .find(|pat| pat.name == struct_field.name) | ||
463 | .map(|pat| PatIdOrWild::from(pat.pat)) | ||
464 | .unwrap_or(PatIdOrWild::Wild) | ||
465 | }); | ||
466 | |||
467 | Some(self.replace_head_with(patterns)) | ||
468 | } | ||
469 | _ => return Err(MatchCheckErr::Unknown), | ||
470 | } | ||
471 | } | ||
472 | } | ||
473 | (Pat::Or(_), _) => return Err(MatchCheckErr::NotImplemented), | ||
474 | (_, _) => return Err(MatchCheckErr::NotImplemented), | ||
475 | }; | ||
476 | |||
477 | Ok(result) | ||
478 | } | ||
479 | |||
480 | /// A special case of `specialize_constructor` where the head of the pattern stack | ||
481 | /// is a Wild pattern. | ||
482 | /// | ||
483 | /// Replaces the Wild pattern at the head of the pattern stack with N Wild patterns | ||
484 | /// (N >= 0), where N is the arity of the given constructor. | ||
485 | fn expand_wildcard( | ||
486 | &self, | ||
487 | cx: &MatchCheckCtx, | ||
488 | constructor: &Constructor, | ||
489 | ) -> MatchCheckResult<PatStack> { | ||
490 | assert_eq!( | ||
491 | Pat::Wild, | ||
492 | self.get_head().expect("expand_wildcard called on empty PatStack").as_pat(cx), | ||
493 | "expand_wildcard must only be called on PatStack with wild at head", | ||
494 | ); | ||
495 | |||
496 | let mut patterns: PatStackInner = smallvec![]; | ||
497 | |||
498 | for _ in 0..constructor.arity(cx)? { | ||
499 | patterns.push(PatIdOrWild::Wild); | ||
500 | } | ||
501 | |||
502 | for pat in &self.0[1..] { | ||
503 | patterns.push(*pat); | ||
504 | } | ||
505 | |||
506 | Ok(PatStack::from_vec(patterns)) | ||
507 | } | ||
508 | } | ||
509 | |||
510 | /// A collection of PatStack. | ||
511 | /// | ||
512 | /// This type is modeled from the struct of the same name in `rustc`. | ||
513 | pub(super) struct Matrix(Vec<PatStack>); | ||
514 | |||
515 | impl Matrix { | ||
516 | pub(super) fn empty() -> Self { | ||
517 | Self(vec![]) | ||
518 | } | ||
519 | |||
520 | pub(super) fn push(&mut self, cx: &MatchCheckCtx, row: PatStack) { | ||
521 | if let Some(Pat::Or(pat_ids)) = row.get_head().map(|pat_id| pat_id.as_pat(cx)) { | ||
522 | // Or patterns are expanded here | ||
523 | for pat_id in pat_ids { | ||
524 | self.0.push(PatStack::from_pattern(pat_id)); | ||
525 | } | ||
526 | } else { | ||
527 | self.0.push(row); | ||
528 | } | ||
529 | } | ||
530 | |||
531 | fn is_empty(&self) -> bool { | ||
532 | self.0.is_empty() | ||
533 | } | ||
534 | |||
535 | fn heads(&self) -> Vec<PatIdOrWild> { | ||
536 | self.0.iter().flat_map(|p| p.get_head()).collect() | ||
537 | } | ||
538 | |||
539 | /// Computes `D(self)` for each contained PatStack. | ||
540 | /// | ||
541 | /// See the module docs and the associated documentation in rustc for details. | ||
542 | fn specialize_wildcard(&self, cx: &MatchCheckCtx) -> Self { | ||
543 | Self::collect(cx, self.0.iter().filter_map(|r| r.specialize_wildcard(cx))) | ||
544 | } | ||
545 | |||
546 | /// Computes `S(constructor, self)` for each contained PatStack. | ||
547 | /// | ||
548 | /// See the module docs and the associated documentation in rustc for details. | ||
549 | fn specialize_constructor( | ||
550 | &self, | ||
551 | cx: &MatchCheckCtx, | ||
552 | constructor: &Constructor, | ||
553 | ) -> MatchCheckResult<Self> { | ||
554 | let mut new_matrix = Matrix::empty(); | ||
555 | for pat in &self.0 { | ||
556 | if let Some(pat) = pat.specialize_constructor(cx, constructor)? { | ||
557 | new_matrix.push(cx, pat); | ||
558 | } | ||
559 | } | ||
560 | |||
561 | Ok(new_matrix) | ||
562 | } | ||
563 | |||
564 | fn collect<T: IntoIterator<Item = PatStack>>(cx: &MatchCheckCtx, iter: T) -> Self { | ||
565 | let mut matrix = Matrix::empty(); | ||
566 | |||
567 | for pat in iter { | ||
568 | // using push ensures we expand or-patterns | ||
569 | matrix.push(cx, pat); | ||
570 | } | ||
571 | |||
572 | matrix | ||
573 | } | ||
574 | } | ||
575 | |||
576 | #[derive(Clone, Debug, PartialEq)] | ||
577 | /// An indication of the usefulness of a given match arm, where | ||
578 | /// usefulness is defined as matching some patterns which were | ||
579 | /// not matched by an prior match arms. | ||
580 | /// | ||
581 | /// We may eventually need an `Unknown` variant here. | ||
582 | pub(super) enum Usefulness { | ||
583 | Useful, | ||
584 | NotUseful, | ||
585 | } | ||
586 | |||
587 | pub(super) struct MatchCheckCtx<'a> { | ||
588 | pub(super) match_expr: Idx<Expr>, | ||
589 | pub(super) body: Arc<Body>, | ||
590 | pub(super) infer: Arc<InferenceResult>, | ||
591 | pub(super) db: &'a dyn HirDatabase, | ||
592 | } | ||
593 | |||
594 | /// Given a set of patterns `matrix`, and pattern to consider `v`, determines | ||
595 | /// whether `v` is useful. A pattern is useful if it covers cases which were | ||
596 | /// not previously covered. | ||
597 | /// | ||
598 | /// When calling this function externally (that is, not the recursive calls) it | ||
599 | /// expected that you have already type checked the match arms. All patterns in | ||
600 | /// matrix should be the same type as v, as well as they should all be the same | ||
601 | /// type as the match expression. | ||
602 | pub(super) fn is_useful( | ||
603 | cx: &MatchCheckCtx, | ||
604 | matrix: &Matrix, | ||
605 | v: &PatStack, | ||
606 | ) -> MatchCheckResult<Usefulness> { | ||
607 | // Handle two special cases: | ||
608 | // - enum with no variants | ||
609 | // - `!` type | ||
610 | // In those cases, no match arm is useful. | ||
611 | match cx.infer[cx.match_expr].strip_references() { | ||
612 | Ty::Apply(ApplicationTy { ctor: TypeCtor::Adt(AdtId::EnumId(enum_id)), .. }) => { | ||
613 | if cx.db.enum_data(*enum_id).variants.is_empty() { | ||
614 | return Ok(Usefulness::NotUseful); | ||
615 | } | ||
616 | } | ||
617 | Ty::Apply(ApplicationTy { ctor: TypeCtor::Never, .. }) => { | ||
618 | return Ok(Usefulness::NotUseful); | ||
619 | } | ||
620 | _ => (), | ||
621 | } | ||
622 | |||
623 | let head = match v.get_head() { | ||
624 | Some(head) => head, | ||
625 | None => { | ||
626 | let result = if matrix.is_empty() { Usefulness::Useful } else { Usefulness::NotUseful }; | ||
627 | |||
628 | return Ok(result); | ||
629 | } | ||
630 | }; | ||
631 | |||
632 | if let Pat::Or(pat_ids) = head.as_pat(cx) { | ||
633 | let mut found_unimplemented = false; | ||
634 | let any_useful = pat_ids.iter().any(|&pat_id| { | ||
635 | let v = PatStack::from_pattern(pat_id); | ||
636 | |||
637 | match is_useful(cx, matrix, &v) { | ||
638 | Ok(Usefulness::Useful) => true, | ||
639 | Ok(Usefulness::NotUseful) => false, | ||
640 | _ => { | ||
641 | found_unimplemented = true; | ||
642 | false | ||
643 | } | ||
644 | } | ||
645 | }); | ||
646 | |||
647 | return if any_useful { | ||
648 | Ok(Usefulness::Useful) | ||
649 | } else if found_unimplemented { | ||
650 | Err(MatchCheckErr::NotImplemented) | ||
651 | } else { | ||
652 | Ok(Usefulness::NotUseful) | ||
653 | }; | ||
654 | } | ||
655 | |||
656 | if let Some(constructor) = pat_constructor(cx, head)? { | ||
657 | let matrix = matrix.specialize_constructor(&cx, &constructor)?; | ||
658 | let v = v | ||
659 | .specialize_constructor(&cx, &constructor)? | ||
660 | .expect("we know this can't fail because we get the constructor from `v.head()` above"); | ||
661 | |||
662 | is_useful(&cx, &matrix, &v) | ||
663 | } else { | ||
664 | // expanding wildcard | ||
665 | let mut used_constructors: Vec<Constructor> = vec![]; | ||
666 | for pat in matrix.heads() { | ||
667 | if let Some(constructor) = pat_constructor(cx, pat)? { | ||
668 | used_constructors.push(constructor); | ||
669 | } | ||
670 | } | ||
671 | |||
672 | // We assume here that the first constructor is the "correct" type. Since we | ||
673 | // only care about the "type" of the constructor (i.e. if it is a bool we | ||
674 | // don't care about the value), this assumption should be valid as long as | ||
675 | // the match statement is well formed. We currently uphold this invariant by | ||
676 | // filtering match arms before calling `is_useful`, only passing in match arms | ||
677 | // whose type matches the type of the match expression. | ||
678 | match &used_constructors.first() { | ||
679 | Some(constructor) if all_constructors_covered(&cx, constructor, &used_constructors) => { | ||
680 | // If all constructors are covered, then we need to consider whether | ||
681 | // any values are covered by this wildcard. | ||
682 | // | ||
683 | // For example, with matrix '[[Some(true)], [None]]', all | ||
684 | // constructors are covered (`Some`/`None`), so we need | ||
685 | // to perform specialization to see that our wildcard will cover | ||
686 | // the `Some(false)` case. | ||
687 | // | ||
688 | // Here we create a constructor for each variant and then check | ||
689 | // usefulness after specializing for that constructor. | ||
690 | let mut found_unimplemented = false; | ||
691 | for constructor in constructor.all_constructors(cx) { | ||
692 | let matrix = matrix.specialize_constructor(&cx, &constructor)?; | ||
693 | let v = v.expand_wildcard(&cx, &constructor)?; | ||
694 | |||
695 | match is_useful(&cx, &matrix, &v) { | ||
696 | Ok(Usefulness::Useful) => return Ok(Usefulness::Useful), | ||
697 | Ok(Usefulness::NotUseful) => continue, | ||
698 | _ => found_unimplemented = true, | ||
699 | }; | ||
700 | } | ||
701 | |||
702 | if found_unimplemented { | ||
703 | Err(MatchCheckErr::NotImplemented) | ||
704 | } else { | ||
705 | Ok(Usefulness::NotUseful) | ||
706 | } | ||
707 | } | ||
708 | _ => { | ||
709 | // Either not all constructors are covered, or the only other arms | ||
710 | // are wildcards. Either way, this pattern is useful if it is useful | ||
711 | // when compared to those arms with wildcards. | ||
712 | let matrix = matrix.specialize_wildcard(&cx); | ||
713 | let v = v.to_tail(); | ||
714 | |||
715 | is_useful(&cx, &matrix, &v) | ||
716 | } | ||
717 | } | ||
718 | } | ||
719 | } | ||
720 | |||
721 | #[derive(Debug, Clone, Copy)] | ||
722 | /// Similar to TypeCtor, but includes additional information about the specific | ||
723 | /// value being instantiated. For example, TypeCtor::Bool doesn't contain the | ||
724 | /// boolean value. | ||
725 | enum Constructor { | ||
726 | Bool(bool), | ||
727 | Tuple { arity: usize }, | ||
728 | Enum(EnumVariantId), | ||
729 | } | ||
730 | |||
731 | impl Constructor { | ||
732 | fn arity(&self, cx: &MatchCheckCtx) -> MatchCheckResult<usize> { | ||
733 | let arity = match self { | ||
734 | Constructor::Bool(_) => 0, | ||
735 | Constructor::Tuple { arity } => *arity, | ||
736 | Constructor::Enum(e) => { | ||
737 | match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() { | ||
738 | VariantData::Tuple(struct_field_data) => struct_field_data.len(), | ||
739 | VariantData::Record(struct_field_data) => struct_field_data.len(), | ||
740 | VariantData::Unit => 0, | ||
741 | } | ||
742 | } | ||
743 | }; | ||
744 | |||
745 | Ok(arity) | ||
746 | } | ||
747 | |||
748 | fn all_constructors(&self, cx: &MatchCheckCtx) -> Vec<Constructor> { | ||
749 | match self { | ||
750 | Constructor::Bool(_) => vec![Constructor::Bool(true), Constructor::Bool(false)], | ||
751 | Constructor::Tuple { .. } => vec![*self], | ||
752 | Constructor::Enum(e) => cx | ||
753 | .db | ||
754 | .enum_data(e.parent) | ||
755 | .variants | ||
756 | .iter() | ||
757 | .map(|(local_id, _)| { | ||
758 | Constructor::Enum(EnumVariantId { parent: e.parent, local_id }) | ||
759 | }) | ||
760 | .collect(), | ||
761 | } | ||
762 | } | ||
763 | } | ||
764 | |||
765 | /// Returns the constructor for the given pattern. Should only return None | ||
766 | /// in the case of a Wild pattern. | ||
767 | fn pat_constructor(cx: &MatchCheckCtx, pat: PatIdOrWild) -> MatchCheckResult<Option<Constructor>> { | ||
768 | let res = match pat.as_pat(cx) { | ||
769 | Pat::Wild => None, | ||
770 | // FIXME somehow create the Tuple constructor with the proper arity. If there are | ||
771 | // ellipsis, the arity is not equal to the number of patterns. | ||
772 | Pat::Tuple { args: pats, ellipsis } if ellipsis.is_none() => { | ||
773 | Some(Constructor::Tuple { arity: pats.len() }) | ||
774 | } | ||
775 | Pat::Lit(lit_expr) => match cx.body.exprs[lit_expr] { | ||
776 | Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)), | ||
777 | _ => return Err(MatchCheckErr::NotImplemented), | ||
778 | }, | ||
779 | Pat::TupleStruct { .. } | Pat::Path(_) | Pat::Record { .. } => { | ||
780 | let pat_id = pat.as_id().expect("we already know this pattern is not a wild"); | ||
781 | let variant_id = | ||
782 | cx.infer.variant_resolution_for_pat(pat_id).ok_or(MatchCheckErr::Unknown)?; | ||
783 | match variant_id { | ||
784 | VariantId::EnumVariantId(enum_variant_id) => { | ||
785 | Some(Constructor::Enum(enum_variant_id)) | ||
786 | } | ||
787 | _ => return Err(MatchCheckErr::NotImplemented), | ||
788 | } | ||
789 | } | ||
790 | _ => return Err(MatchCheckErr::NotImplemented), | ||
791 | }; | ||
792 | |||
793 | Ok(res) | ||
794 | } | ||
795 | |||
796 | fn all_constructors_covered( | ||
797 | cx: &MatchCheckCtx, | ||
798 | constructor: &Constructor, | ||
799 | used_constructors: &[Constructor], | ||
800 | ) -> bool { | ||
801 | match constructor { | ||
802 | Constructor::Tuple { arity } => { | ||
803 | used_constructors.iter().any(|constructor| match constructor { | ||
804 | Constructor::Tuple { arity: used_arity } => arity == used_arity, | ||
805 | _ => false, | ||
806 | }) | ||
807 | } | ||
808 | Constructor::Bool(_) => { | ||
809 | if used_constructors.is_empty() { | ||
810 | return false; | ||
811 | } | ||
812 | |||
813 | let covers_true = | ||
814 | used_constructors.iter().any(|c| matches!(c, Constructor::Bool(true))); | ||
815 | let covers_false = | ||
816 | used_constructors.iter().any(|c| matches!(c, Constructor::Bool(false))); | ||
817 | |||
818 | covers_true && covers_false | ||
819 | } | ||
820 | Constructor::Enum(e) => cx.db.enum_data(e.parent).variants.iter().all(|(id, _)| { | ||
821 | for constructor in used_constructors { | ||
822 | if let Constructor::Enum(e) = constructor { | ||
823 | if id == e.local_id { | ||
824 | return true; | ||
825 | } | ||
826 | } | ||
827 | } | ||
828 | |||
829 | false | ||
830 | }), | ||
831 | } | ||
832 | } | ||
833 | |||
834 | fn enum_variant_matches(cx: &MatchCheckCtx, pat_id: PatId, enum_variant_id: EnumVariantId) -> bool { | ||
835 | Some(enum_variant_id.into()) == cx.infer.variant_resolution_for_pat(pat_id) | ||
836 | } | ||
837 | |||
838 | #[cfg(test)] | ||
839 | mod tests { | ||
840 | use crate::diagnostics::tests::check_diagnostics; | ||
841 | |||
842 | #[test] | ||
843 | fn empty_tuple() { | ||
844 | check_diagnostics( | ||
845 | r#" | ||
846 | fn main() { | ||
847 | match () { } | ||
848 | //^^ Missing match arm | ||
849 | match (()) { } | ||
850 | //^^^^ Missing match arm | ||
851 | |||
852 | match () { _ => (), } | ||
853 | match () { () => (), } | ||
854 | match (()) { (()) => (), } | ||
855 | } | ||
856 | "#, | ||
857 | ); | ||
858 | } | ||
859 | |||
860 | #[test] | ||
861 | fn tuple_of_two_empty_tuple() { | ||
862 | check_diagnostics( | ||
863 | r#" | ||
864 | fn main() { | ||
865 | match ((), ()) { } | ||
866 | //^^^^^^^^ Missing match arm | ||
867 | |||
868 | match ((), ()) { ((), ()) => (), } | ||
869 | } | ||
870 | "#, | ||
871 | ); | ||
872 | } | ||
873 | |||
874 | #[test] | ||
875 | fn boolean() { | ||
876 | check_diagnostics( | ||
877 | r#" | ||
878 | fn test_main() { | ||
879 | match false { } | ||
880 | //^^^^^ Missing match arm | ||
881 | match false { true => (), } | ||
882 | //^^^^^ Missing match arm | ||
883 | match (false, true) {} | ||
884 | //^^^^^^^^^^^^^ Missing match arm | ||
885 | match (false, true) { (true, true) => (), } | ||
886 | //^^^^^^^^^^^^^ Missing match arm | ||
887 | match (false, true) { | ||
888 | //^^^^^^^^^^^^^ Missing match arm | ||
889 | (false, true) => (), | ||
890 | (false, false) => (), | ||
891 | (true, false) => (), | ||
892 | } | ||
893 | match (false, true) { (true, _x) => (), } | ||
894 | //^^^^^^^^^^^^^ Missing match arm | ||
895 | |||
896 | match false { true => (), false => (), } | ||
897 | match (false, true) { | ||
898 | (false, _) => (), | ||
899 | (true, false) => (), | ||
900 | (_, true) => (), | ||
901 | } | ||
902 | match (false, true) { | ||
903 | (true, true) => (), | ||
904 | (true, false) => (), | ||
905 | (false, true) => (), | ||
906 | (false, false) => (), | ||
907 | } | ||
908 | match (false, true) { | ||
909 | (true, _x) => (), | ||
910 | (false, true) => (), | ||
911 | (false, false) => (), | ||
912 | } | ||
913 | match (false, true, false) { | ||
914 | (false, ..) => (), | ||
915 | (true, ..) => (), | ||
916 | } | ||
917 | match (false, true, false) { | ||
918 | (.., false) => (), | ||
919 | (.., true) => (), | ||
920 | } | ||
921 | match (false, true, false) { (..) => (), } | ||
922 | } | ||
923 | "#, | ||
924 | ); | ||
925 | } | ||
926 | |||
927 | #[test] | ||
928 | fn tuple_of_tuple_and_bools() { | ||
929 | check_diagnostics( | ||
930 | r#" | ||
931 | fn main() { | ||
932 | match (false, ((), false)) {} | ||
933 | //^^^^^^^^^^^^^^^^^^^^ Missing match arm | ||
934 | match (false, ((), false)) { (true, ((), true)) => (), } | ||
935 | //^^^^^^^^^^^^^^^^^^^^ Missing match arm | ||
936 | match (false, ((), false)) { (true, _) => (), } | ||
937 | //^^^^^^^^^^^^^^^^^^^^ Missing match arm | ||
938 | |||
939 | match (false, ((), false)) { | ||
940 | (true, ((), true)) => (), | ||
941 | (true, ((), false)) => (), | ||
942 | (false, ((), true)) => (), | ||
943 | (false, ((), false)) => (), | ||
944 | } | ||
945 | match (false, ((), false)) { | ||
946 | (true, ((), true)) => (), | ||
947 | (true, ((), false)) => (), | ||
948 | (false, _) => (), | ||
949 | } | ||
950 | } | ||
951 | "#, | ||
952 | ); | ||
953 | } | ||
954 | |||
955 | #[test] | ||
956 | fn enums() { | ||
957 | check_diagnostics( | ||
958 | r#" | ||
959 | enum Either { A, B, } | ||
960 | |||
961 | fn main() { | ||
962 | match Either::A { } | ||
963 | //^^^^^^^^^ Missing match arm | ||
964 | match Either::B { Either::A => (), } | ||
965 | //^^^^^^^^^ Missing match arm | ||
966 | |||
967 | match &Either::B { | ||
968 | //^^^^^^^^^^ Missing match arm | ||
969 | Either::A => (), | ||
970 | } | ||
971 | |||
972 | match Either::B { | ||
973 | Either::A => (), Either::B => (), | ||
974 | } | ||
975 | match &Either::B { | ||
976 | Either::A => (), Either::B => (), | ||
977 | } | ||
978 | } | ||
979 | "#, | ||
980 | ); | ||
981 | } | ||
982 | |||
983 | #[test] | ||
984 | fn enum_containing_bool() { | ||
985 | check_diagnostics( | ||
986 | r#" | ||
987 | enum Either { A(bool), B } | ||
988 | |||
989 | fn main() { | ||
990 | match Either::B { } | ||
991 | //^^^^^^^^^ Missing match arm | ||
992 | match Either::B { | ||
993 | //^^^^^^^^^ Missing match arm | ||
994 | Either::A(true) => (), Either::B => () | ||
995 | } | ||
996 | |||
997 | match Either::B { | ||
998 | Either::A(true) => (), | ||
999 | Either::A(false) => (), | ||
1000 | Either::B => (), | ||
1001 | } | ||
1002 | match Either::B { | ||
1003 | Either::B => (), | ||
1004 | _ => (), | ||
1005 | } | ||
1006 | match Either::B { | ||
1007 | Either::A(_) => (), | ||
1008 | Either::B => (), | ||
1009 | } | ||
1010 | |||
1011 | } | ||
1012 | "#, | ||
1013 | ); | ||
1014 | } | ||
1015 | |||
1016 | #[test] | ||
1017 | fn enum_different_sizes() { | ||
1018 | check_diagnostics( | ||
1019 | r#" | ||
1020 | enum Either { A(bool), B(bool, bool) } | ||
1021 | |||
1022 | fn main() { | ||
1023 | match Either::A(false) { | ||
1024 | //^^^^^^^^^^^^^^^^ Missing match arm | ||
1025 | Either::A(_) => (), | ||
1026 | Either::B(false, _) => (), | ||
1027 | } | ||
1028 | |||
1029 | match Either::A(false) { | ||
1030 | Either::A(_) => (), | ||
1031 | Either::B(true, _) => (), | ||
1032 | Either::B(false, _) => (), | ||
1033 | } | ||
1034 | match Either::A(false) { | ||
1035 | Either::A(true) | Either::A(false) => (), | ||
1036 | Either::B(true, _) => (), | ||
1037 | Either::B(false, _) => (), | ||
1038 | } | ||
1039 | } | ||
1040 | "#, | ||
1041 | ); | ||
1042 | } | ||
1043 | |||
1044 | #[test] | ||
1045 | fn tuple_of_enum_no_diagnostic() { | ||
1046 | check_diagnostics( | ||
1047 | r#" | ||
1048 | enum Either { A(bool), B(bool, bool) } | ||
1049 | enum Either2 { C, D } | ||
1050 | |||
1051 | fn main() { | ||
1052 | match (Either::A(false), Either2::C) { | ||
1053 | (Either::A(true), _) | (Either::A(false), _) => (), | ||
1054 | (Either::B(true, _), Either2::C) => (), | ||
1055 | (Either::B(false, _), Either2::C) => (), | ||
1056 | (Either::B(_, _), Either2::D) => (), | ||
1057 | } | ||
1058 | } | ||
1059 | "#, | ||
1060 | ); | ||
1061 | } | ||
1062 | |||
1063 | #[test] | ||
1064 | fn mismatched_types() { | ||
1065 | // Match statements with arms that don't match the | ||
1066 | // expression pattern do not fire this diagnostic. | ||
1067 | check_diagnostics( | ||
1068 | r#" | ||
1069 | enum Either { A, B } | ||
1070 | enum Either2 { C, D } | ||
1071 | |||
1072 | fn main() { | ||
1073 | match Either::A { | ||
1074 | Either2::C => (), | ||
1075 | Either2::D => (), | ||
1076 | } | ||
1077 | match (true, false) { | ||
1078 | (true, false, true) => (), | ||
1079 | (true) => (), | ||
1080 | } | ||
1081 | match (0) { () => () } | ||
1082 | match Unresolved::Bar { Unresolved::Baz => () } | ||
1083 | } | ||
1084 | "#, | ||
1085 | ); | ||
1086 | } | ||
1087 | |||
1088 | #[test] | ||
1089 | fn malformed_match_arm_tuple_enum_missing_pattern() { | ||
1090 | // We are testing to be sure we don't panic here when the match | ||
1091 | // arm `Either::B` is missing its pattern. | ||
1092 | check_diagnostics( | ||
1093 | r#" | ||
1094 | enum Either { A, B(u32) } | ||
1095 | |||
1096 | fn main() { | ||
1097 | match Either::A { | ||
1098 | Either::A => (), | ||
1099 | Either::B() => (), | ||
1100 | } | ||
1101 | } | ||
1102 | "#, | ||
1103 | ); | ||
1104 | } | ||
1105 | |||
1106 | #[test] | ||
1107 | fn expr_diverges() { | ||
1108 | check_diagnostics( | ||
1109 | r#" | ||
1110 | enum Either { A, B } | ||
1111 | |||
1112 | fn main() { | ||
1113 | match loop {} { | ||
1114 | Either::A => (), | ||
1115 | Either::B => (), | ||
1116 | } | ||
1117 | match loop {} { | ||
1118 | Either::A => (), | ||
1119 | } | ||
1120 | match loop { break Foo::A } { | ||
1121 | //^^^^^^^^^^^^^^^^^^^^^ Missing match arm | ||
1122 | Either::A => (), | ||
1123 | } | ||
1124 | match loop { break Foo::A } { | ||
1125 | Either::A => (), | ||
1126 | Either::B => (), | ||
1127 | } | ||
1128 | } | ||
1129 | "#, | ||
1130 | ); | ||
1131 | } | ||
1132 | |||
1133 | #[test] | ||
1134 | fn expr_partially_diverges() { | ||
1135 | check_diagnostics( | ||
1136 | r#" | ||
1137 | enum Either<T> { A(T), B } | ||
1138 | |||
1139 | fn foo() -> Either<!> { Either::B } | ||
1140 | fn main() -> u32 { | ||
1141 | match foo() { | ||
1142 | Either::A(val) => val, | ||
1143 | Either::B => 0, | ||
1144 | } | ||
1145 | } | ||
1146 | "#, | ||
1147 | ); | ||
1148 | } | ||
1149 | |||
1150 | #[test] | ||
1151 | fn enum_record() { | ||
1152 | check_diagnostics( | ||
1153 | r#" | ||
1154 | enum Either { A { foo: bool }, B } | ||
1155 | |||
1156 | fn main() { | ||
1157 | let a = Either::A { foo: true }; | ||
1158 | match a { } | ||
1159 | //^ Missing match arm | ||
1160 | match a { Either::A { foo: true } => () } | ||
1161 | //^ Missing match arm | ||
1162 | match a { | ||
1163 | Either::A { } => (), | ||
1164 | //^^^^^^^^^ Missing structure fields: | ||
1165 | // | - foo | ||
1166 | Either::B => (), | ||
1167 | } | ||
1168 | match a { | ||
1169 | //^ Missing match arm | ||
1170 | Either::A { } => (), | ||
1171 | } //^^^^^^^^^ Missing structure fields: | ||
1172 | // | - foo | ||
1173 | |||
1174 | match a { | ||
1175 | Either::A { foo: true } => (), | ||
1176 | Either::A { foo: false } => (), | ||
1177 | Either::B => (), | ||
1178 | } | ||
1179 | match a { | ||
1180 | Either::A { foo: _ } => (), | ||
1181 | Either::B => (), | ||
1182 | } | ||
1183 | } | ||
1184 | "#, | ||
1185 | ); | ||
1186 | } | ||
1187 | |||
1188 | #[test] | ||
1189 | fn enum_record_fields_out_of_order() { | ||
1190 | check_diagnostics( | ||
1191 | r#" | ||
1192 | enum Either { | ||
1193 | A { foo: bool, bar: () }, | ||
1194 | B, | ||
1195 | } | ||
1196 | |||
1197 | fn main() { | ||
1198 | let a = Either::A { foo: true, bar: () }; | ||
1199 | match a { | ||
1200 | //^ Missing match arm | ||
1201 | Either::A { bar: (), foo: false } => (), | ||
1202 | Either::A { foo: true, bar: () } => (), | ||
1203 | } | ||
1204 | |||
1205 | match a { | ||
1206 | Either::A { bar: (), foo: false } => (), | ||
1207 | Either::A { foo: true, bar: () } => (), | ||
1208 | Either::B => (), | ||
1209 | } | ||
1210 | } | ||
1211 | "#, | ||
1212 | ); | ||
1213 | } | ||
1214 | |||
1215 | #[test] | ||
1216 | fn enum_record_ellipsis() { | ||
1217 | check_diagnostics( | ||
1218 | r#" | ||
1219 | enum Either { | ||
1220 | A { foo: bool, bar: bool }, | ||
1221 | B, | ||
1222 | } | ||
1223 | |||
1224 | fn main() { | ||
1225 | let a = Either::B; | ||
1226 | match a { | ||
1227 | //^ Missing match arm | ||
1228 | Either::A { foo: true, .. } => (), | ||
1229 | Either::B => (), | ||
1230 | } | ||
1231 | match a { | ||
1232 | //^ Missing match arm | ||
1233 | Either::A { .. } => (), | ||
1234 | } | ||
1235 | |||
1236 | match a { | ||
1237 | Either::A { foo: true, .. } => (), | ||
1238 | Either::A { foo: false, .. } => (), | ||
1239 | Either::B => (), | ||
1240 | } | ||
1241 | |||
1242 | match a { | ||
1243 | Either::A { .. } => (), | ||
1244 | Either::B => (), | ||
1245 | } | ||
1246 | } | ||
1247 | "#, | ||
1248 | ); | ||
1249 | } | ||
1250 | |||
1251 | #[test] | ||
1252 | fn enum_tuple_partial_ellipsis() { | ||
1253 | check_diagnostics( | ||
1254 | r#" | ||
1255 | enum Either { | ||
1256 | A(bool, bool, bool, bool), | ||
1257 | B, | ||
1258 | } | ||
1259 | |||
1260 | fn main() { | ||
1261 | match Either::B { | ||
1262 | //^^^^^^^^^ Missing match arm | ||
1263 | Either::A(true, .., true) => (), | ||
1264 | Either::A(true, .., false) => (), | ||
1265 | Either::A(false, .., false) => (), | ||
1266 | Either::B => (), | ||
1267 | } | ||
1268 | match Either::B { | ||
1269 | //^^^^^^^^^ Missing match arm | ||
1270 | Either::A(true, .., true) => (), | ||
1271 | Either::A(true, .., false) => (), | ||
1272 | Either::A(.., true) => (), | ||
1273 | Either::B => (), | ||
1274 | } | ||
1275 | |||
1276 | match Either::B { | ||
1277 | Either::A(true, .., true) => (), | ||
1278 | Either::A(true, .., false) => (), | ||
1279 | Either::A(false, .., true) => (), | ||
1280 | Either::A(false, .., false) => (), | ||
1281 | Either::B => (), | ||
1282 | } | ||
1283 | match Either::B { | ||
1284 | Either::A(true, .., true) => (), | ||
1285 | Either::A(true, .., false) => (), | ||
1286 | Either::A(.., true) => (), | ||
1287 | Either::A(.., false) => (), | ||
1288 | Either::B => (), | ||
1289 | } | ||
1290 | } | ||
1291 | "#, | ||
1292 | ); | ||
1293 | } | ||
1294 | |||
1295 | #[test] | ||
1296 | fn never() { | ||
1297 | check_diagnostics( | ||
1298 | r#" | ||
1299 | enum Never {} | ||
1300 | |||
1301 | fn enum_(never: Never) { | ||
1302 | match never {} | ||
1303 | } | ||
1304 | fn enum_ref(never: &Never) { | ||
1305 | match never {} | ||
1306 | } | ||
1307 | fn bang(never: !) { | ||
1308 | match never {} | ||
1309 | } | ||
1310 | "#, | ||
1311 | ); | ||
1312 | } | ||
1313 | |||
1314 | #[test] | ||
1315 | fn or_pattern_panic() { | ||
1316 | check_diagnostics( | ||
1317 | r#" | ||
1318 | pub enum Category { Infinity, Zero } | ||
1319 | |||
1320 | fn panic(a: Category, b: Category) { | ||
1321 | match (a, b) { | ||
1322 | (Category::Zero | Category::Infinity, _) => (), | ||
1323 | (_, Category::Zero | Category::Infinity) => (), | ||
1324 | } | ||
1325 | |||
1326 | // FIXME: This is a false positive, but the code used to cause a panic in the match checker, | ||
1327 | // so this acts as a regression test for that. | ||
1328 | match (a, b) { | ||
1329 | //^^^^^^ Missing match arm | ||
1330 | (Category::Infinity, Category::Infinity) | (Category::Zero, Category::Zero) => (), | ||
1331 | (Category::Infinity | Category::Zero, _) => (), | ||
1332 | } | ||
1333 | } | ||
1334 | "#, | ||
1335 | ); | ||
1336 | } | ||
1337 | |||
1338 | mod false_negatives { | ||
1339 | //! The implementation of match checking here is a work in progress. As we roll this out, we | ||
1340 | //! prefer false negatives to false positives (ideally there would be no false positives). This | ||
1341 | //! test module should document known false negatives. Eventually we will have a complete | ||
1342 | //! implementation of match checking and this module will be empty. | ||
1343 | //! | ||
1344 | //! The reasons for documenting known false negatives: | ||
1345 | //! | ||
1346 | //! 1. It acts as a backlog of work that can be done to improve the behavior of the system. | ||
1347 | //! 2. It ensures the code doesn't panic when handling these cases. | ||
1348 | use super::*; | ||
1349 | |||
1350 | #[test] | ||
1351 | fn integers() { | ||
1352 | // We don't currently check integer exhaustiveness. | ||
1353 | check_diagnostics( | ||
1354 | r#" | ||
1355 | fn main() { | ||
1356 | match 5 { | ||
1357 | 10 => (), | ||
1358 | 11..20 => (), | ||
1359 | } | ||
1360 | } | ||
1361 | "#, | ||
1362 | ); | ||
1363 | } | ||
1364 | |||
1365 | #[test] | ||
1366 | fn internal_or() { | ||
1367 | // We do not currently handle patterns with internal `or`s. | ||
1368 | check_diagnostics( | ||
1369 | r#" | ||
1370 | fn main() { | ||
1371 | enum Either { A(bool), B } | ||
1372 | match Either::B { | ||
1373 | Either::A(true | false) => (), | ||
1374 | } | ||
1375 | } | ||
1376 | "#, | ||
1377 | ); | ||
1378 | } | ||
1379 | |||
1380 | #[test] | ||
1381 | fn tuple_of_bools_with_ellipsis_at_end_missing_arm() { | ||
1382 | // We don't currently handle tuple patterns with ellipsis. | ||
1383 | check_diagnostics( | ||
1384 | r#" | ||
1385 | fn main() { | ||
1386 | match (false, true, false) { | ||
1387 | (false, ..) => (), | ||
1388 | } | ||
1389 | } | ||
1390 | "#, | ||
1391 | ); | ||
1392 | } | ||
1393 | |||
1394 | #[test] | ||
1395 | fn tuple_of_bools_with_ellipsis_at_beginning_missing_arm() { | ||
1396 | // We don't currently handle tuple patterns with ellipsis. | ||
1397 | check_diagnostics( | ||
1398 | r#" | ||
1399 | fn main() { | ||
1400 | match (false, true, false) { | ||
1401 | (.., false) => (), | ||
1402 | } | ||
1403 | } | ||
1404 | "#, | ||
1405 | ); | ||
1406 | } | ||
1407 | |||
1408 | #[test] | ||
1409 | fn struct_missing_arm() { | ||
1410 | // We don't currently handle structs. | ||
1411 | check_diagnostics( | ||
1412 | r#" | ||
1413 | struct Foo { a: bool } | ||
1414 | fn main(f: Foo) { | ||
1415 | match f { Foo { a: true } => () } | ||
1416 | } | ||
1417 | "#, | ||
1418 | ); | ||
1419 | } | ||
1420 | } | ||
1421 | } | ||
diff --git a/crates/hir_ty/src/diagnostics/unsafe_check.rs b/crates/hir_ty/src/diagnostics/unsafe_check.rs new file mode 100644 index 000000000..61ffbf5d1 --- /dev/null +++ b/crates/hir_ty/src/diagnostics/unsafe_check.rs | |||
@@ -0,0 +1,205 @@ | |||
1 | //! Provides validations for unsafe code. Currently checks if unsafe functions are missing | ||
2 | //! unsafe blocks. | ||
3 | |||
4 | use std::sync::Arc; | ||
5 | |||
6 | use hir_def::{ | ||
7 | body::Body, | ||
8 | expr::{Expr, ExprId, UnaryOp}, | ||
9 | resolver::{resolver_for_expr, ResolveValueResult, ValueNs}, | ||
10 | DefWithBodyId, | ||
11 | }; | ||
12 | use hir_expand::diagnostics::DiagnosticSink; | ||
13 | |||
14 | use crate::{ | ||
15 | db::HirDatabase, diagnostics::MissingUnsafe, lower::CallableDefId, ApplicationTy, | ||
16 | InferenceResult, Ty, TypeCtor, | ||
17 | }; | ||
18 | |||
19 | pub(super) struct UnsafeValidator<'a, 'b: 'a> { | ||
20 | owner: DefWithBodyId, | ||
21 | infer: Arc<InferenceResult>, | ||
22 | sink: &'a mut DiagnosticSink<'b>, | ||
23 | } | ||
24 | |||
25 | impl<'a, 'b> UnsafeValidator<'a, 'b> { | ||
26 | pub(super) fn new( | ||
27 | owner: DefWithBodyId, | ||
28 | infer: Arc<InferenceResult>, | ||
29 | sink: &'a mut DiagnosticSink<'b>, | ||
30 | ) -> UnsafeValidator<'a, 'b> { | ||
31 | UnsafeValidator { owner, infer, sink } | ||
32 | } | ||
33 | |||
34 | pub(super) fn validate_body(&mut self, db: &dyn HirDatabase) { | ||
35 | let def = self.owner.into(); | ||
36 | let unsafe_expressions = unsafe_expressions(db, self.infer.as_ref(), def); | ||
37 | let is_unsafe = match self.owner { | ||
38 | DefWithBodyId::FunctionId(it) => db.function_data(it).is_unsafe, | ||
39 | DefWithBodyId::StaticId(_) | DefWithBodyId::ConstId(_) => false, | ||
40 | }; | ||
41 | if is_unsafe | ||
42 | || unsafe_expressions | ||
43 | .iter() | ||
44 | .filter(|unsafe_expr| !unsafe_expr.inside_unsafe_block) | ||
45 | .count() | ||
46 | == 0 | ||
47 | { | ||
48 | return; | ||
49 | } | ||
50 | |||
51 | let (_, body_source) = db.body_with_source_map(def); | ||
52 | for unsafe_expr in unsafe_expressions { | ||
53 | if !unsafe_expr.inside_unsafe_block { | ||
54 | if let Ok(in_file) = body_source.as_ref().expr_syntax(unsafe_expr.expr) { | ||
55 | self.sink.push(MissingUnsafe { file: in_file.file_id, expr: in_file.value }) | ||
56 | } | ||
57 | } | ||
58 | } | ||
59 | } | ||
60 | } | ||
61 | |||
62 | pub struct UnsafeExpr { | ||
63 | pub expr: ExprId, | ||
64 | pub inside_unsafe_block: bool, | ||
65 | } | ||
66 | |||
67 | pub fn unsafe_expressions( | ||
68 | db: &dyn HirDatabase, | ||
69 | infer: &InferenceResult, | ||
70 | def: DefWithBodyId, | ||
71 | ) -> Vec<UnsafeExpr> { | ||
72 | let mut unsafe_exprs = vec![]; | ||
73 | let body = db.body(def); | ||
74 | walk_unsafe(&mut unsafe_exprs, db, infer, def, &body, body.body_expr, false); | ||
75 | |||
76 | unsafe_exprs | ||
77 | } | ||
78 | |||
79 | fn walk_unsafe( | ||
80 | unsafe_exprs: &mut Vec<UnsafeExpr>, | ||
81 | db: &dyn HirDatabase, | ||
82 | infer: &InferenceResult, | ||
83 | def: DefWithBodyId, | ||
84 | body: &Body, | ||
85 | current: ExprId, | ||
86 | inside_unsafe_block: bool, | ||
87 | ) { | ||
88 | let expr = &body.exprs[current]; | ||
89 | match expr { | ||
90 | Expr::Call { callee, .. } => { | ||
91 | let ty = &infer[*callee]; | ||
92 | if let &Ty::Apply(ApplicationTy { | ||
93 | ctor: TypeCtor::FnDef(CallableDefId::FunctionId(func)), | ||
94 | .. | ||
95 | }) = ty | ||
96 | { | ||
97 | if db.function_data(func).is_unsafe { | ||
98 | unsafe_exprs.push(UnsafeExpr { expr: current, inside_unsafe_block }); | ||
99 | } | ||
100 | } | ||
101 | } | ||
102 | Expr::Path(path) => { | ||
103 | let resolver = resolver_for_expr(db.upcast(), def, current); | ||
104 | let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path.mod_path()); | ||
105 | if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id))) = value_or_partial { | ||
106 | if db.static_data(id).mutable { | ||
107 | unsafe_exprs.push(UnsafeExpr { expr: current, inside_unsafe_block }); | ||
108 | } | ||
109 | } | ||
110 | } | ||
111 | Expr::MethodCall { .. } => { | ||
112 | if infer | ||
113 | .method_resolution(current) | ||
114 | .map(|func| db.function_data(func).is_unsafe) | ||
115 | .unwrap_or(false) | ||
116 | { | ||
117 | unsafe_exprs.push(UnsafeExpr { expr: current, inside_unsafe_block }); | ||
118 | } | ||
119 | } | ||
120 | Expr::UnaryOp { expr, op: UnaryOp::Deref } => { | ||
121 | if let Ty::Apply(ApplicationTy { ctor: TypeCtor::RawPtr(..), .. }) = &infer[*expr] { | ||
122 | unsafe_exprs.push(UnsafeExpr { expr: current, inside_unsafe_block }); | ||
123 | } | ||
124 | } | ||
125 | Expr::Unsafe { body: child } => { | ||
126 | return walk_unsafe(unsafe_exprs, db, infer, def, body, *child, true); | ||
127 | } | ||
128 | _ => {} | ||
129 | } | ||
130 | |||
131 | expr.walk_child_exprs(|child| { | ||
132 | walk_unsafe(unsafe_exprs, db, infer, def, body, child, inside_unsafe_block); | ||
133 | }); | ||
134 | } | ||
135 | |||
136 | #[cfg(test)] | ||
137 | mod tests { | ||
138 | use crate::diagnostics::tests::check_diagnostics; | ||
139 | |||
140 | #[test] | ||
141 | fn missing_unsafe_diagnostic_with_raw_ptr() { | ||
142 | check_diagnostics( | ||
143 | r#" | ||
144 | fn main() { | ||
145 | let x = &5 as *const usize; | ||
146 | unsafe { let y = *x; } | ||
147 | let z = *x; | ||
148 | } //^^ This operation is unsafe and requires an unsafe function or block | ||
149 | "#, | ||
150 | ) | ||
151 | } | ||
152 | |||
153 | #[test] | ||
154 | fn missing_unsafe_diagnostic_with_unsafe_call() { | ||
155 | check_diagnostics( | ||
156 | r#" | ||
157 | struct HasUnsafe; | ||
158 | |||
159 | impl HasUnsafe { | ||
160 | unsafe fn unsafe_fn(&self) { | ||
161 | let x = &5 as *const usize; | ||
162 | let y = *x; | ||
163 | } | ||
164 | } | ||
165 | |||
166 | unsafe fn unsafe_fn() { | ||
167 | let x = &5 as *const usize; | ||
168 | let y = *x; | ||
169 | } | ||
170 | |||
171 | fn main() { | ||
172 | unsafe_fn(); | ||
173 | //^^^^^^^^^^^ This operation is unsafe and requires an unsafe function or block | ||
174 | HasUnsafe.unsafe_fn(); | ||
175 | //^^^^^^^^^^^^^^^^^^^^^ This operation is unsafe and requires an unsafe function or block | ||
176 | unsafe { | ||
177 | unsafe_fn(); | ||
178 | HasUnsafe.unsafe_fn(); | ||
179 | } | ||
180 | } | ||
181 | "#, | ||
182 | ); | ||
183 | } | ||
184 | |||
185 | #[test] | ||
186 | fn missing_unsafe_diagnostic_with_static_mut() { | ||
187 | check_diagnostics( | ||
188 | r#" | ||
189 | struct Ty { | ||
190 | a: u8, | ||
191 | } | ||
192 | |||
193 | static mut static_mut: Ty = Ty { a: 0 }; | ||
194 | |||
195 | fn main() { | ||
196 | let x = static_mut.a; | ||
197 | //^^^^^^^^^^ This operation is unsafe and requires an unsafe function or block | ||
198 | unsafe { | ||
199 | let x = static_mut.a; | ||
200 | } | ||
201 | } | ||
202 | "#, | ||
203 | ); | ||
204 | } | ||
205 | } | ||
diff --git a/crates/hir_ty/src/display.rs b/crates/hir_ty/src/display.rs new file mode 100644 index 000000000..64b68014d --- /dev/null +++ b/crates/hir_ty/src/display.rs | |||
@@ -0,0 +1,632 @@ | |||
1 | //! FIXME: write short doc here | ||
2 | |||
3 | use std::fmt; | ||
4 | |||
5 | use crate::{ | ||
6 | db::HirDatabase, utils::generics, ApplicationTy, CallableDefId, FnSig, GenericPredicate, | ||
7 | Obligation, OpaqueTyId, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, | ||
8 | }; | ||
9 | use hir_def::{ | ||
10 | find_path, generics::TypeParamProvenance, item_scope::ItemInNs, AdtId, AssocContainerId, | ||
11 | Lookup, ModuleId, | ||
12 | }; | ||
13 | use hir_expand::name::Name; | ||
14 | |||
15 | pub struct HirFormatter<'a> { | ||
16 | pub db: &'a dyn HirDatabase, | ||
17 | fmt: &'a mut dyn fmt::Write, | ||
18 | buf: String, | ||
19 | curr_size: usize, | ||
20 | pub(crate) max_size: Option<usize>, | ||
21 | omit_verbose_types: bool, | ||
22 | display_target: DisplayTarget, | ||
23 | } | ||
24 | |||
25 | pub trait HirDisplay { | ||
26 | fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError>; | ||
27 | |||
28 | /// Returns a `Display`able type that is human-readable. | ||
29 | /// Use this for showing types to the user (e.g. diagnostics) | ||
30 | fn display<'a>(&'a self, db: &'a dyn HirDatabase) -> HirDisplayWrapper<'a, Self> | ||
31 | where | ||
32 | Self: Sized, | ||
33 | { | ||
34 | HirDisplayWrapper { | ||
35 | db, | ||
36 | t: self, | ||
37 | max_size: None, | ||
38 | omit_verbose_types: false, | ||
39 | display_target: DisplayTarget::Diagnostics, | ||
40 | } | ||
41 | } | ||
42 | |||
43 | /// Returns a `Display`able type that is human-readable and tries to be succinct. | ||
44 | /// Use this for showing types to the user where space is constrained (e.g. doc popups) | ||
45 | fn display_truncated<'a>( | ||
46 | &'a self, | ||
47 | db: &'a dyn HirDatabase, | ||
48 | max_size: Option<usize>, | ||
49 | ) -> HirDisplayWrapper<'a, Self> | ||
50 | where | ||
51 | Self: Sized, | ||
52 | { | ||
53 | HirDisplayWrapper { | ||
54 | db, | ||
55 | t: self, | ||
56 | max_size, | ||
57 | omit_verbose_types: true, | ||
58 | display_target: DisplayTarget::Diagnostics, | ||
59 | } | ||
60 | } | ||
61 | |||
62 | /// Returns a String representation of `self` that can be inserted into the given module. | ||
63 | /// Use this when generating code (e.g. assists) | ||
64 | fn display_source_code<'a>( | ||
65 | &'a self, | ||
66 | db: &'a dyn HirDatabase, | ||
67 | module_id: ModuleId, | ||
68 | ) -> Result<String, DisplaySourceCodeError> { | ||
69 | let mut result = String::new(); | ||
70 | match self.hir_fmt(&mut HirFormatter { | ||
71 | db, | ||
72 | fmt: &mut result, | ||
73 | buf: String::with_capacity(20), | ||
74 | curr_size: 0, | ||
75 | max_size: None, | ||
76 | omit_verbose_types: false, | ||
77 | display_target: DisplayTarget::SourceCode { module_id }, | ||
78 | }) { | ||
79 | Ok(()) => {} | ||
80 | Err(HirDisplayError::FmtError) => panic!("Writing to String can't fail!"), | ||
81 | Err(HirDisplayError::DisplaySourceCodeError(e)) => return Err(e), | ||
82 | }; | ||
83 | Ok(result) | ||
84 | } | ||
85 | } | ||
86 | |||
87 | impl<'a> HirFormatter<'a> { | ||
88 | pub fn write_joined<T: HirDisplay>( | ||
89 | &mut self, | ||
90 | iter: impl IntoIterator<Item = T>, | ||
91 | sep: &str, | ||
92 | ) -> Result<(), HirDisplayError> { | ||
93 | let mut first = true; | ||
94 | for e in iter { | ||
95 | if !first { | ||
96 | write!(self, "{}", sep)?; | ||
97 | } | ||
98 | first = false; | ||
99 | e.hir_fmt(self)?; | ||
100 | } | ||
101 | Ok(()) | ||
102 | } | ||
103 | |||
104 | /// This allows using the `write!` macro directly with a `HirFormatter`. | ||
105 | pub fn write_fmt(&mut self, args: fmt::Arguments) -> Result<(), HirDisplayError> { | ||
106 | // We write to a buffer first to track output size | ||
107 | self.buf.clear(); | ||
108 | fmt::write(&mut self.buf, args)?; | ||
109 | self.curr_size += self.buf.len(); | ||
110 | |||
111 | // Then we write to the internal formatter from the buffer | ||
112 | self.fmt.write_str(&self.buf).map_err(HirDisplayError::from) | ||
113 | } | ||
114 | |||
115 | pub fn should_truncate(&self) -> bool { | ||
116 | if let Some(max_size) = self.max_size { | ||
117 | self.curr_size >= max_size | ||
118 | } else { | ||
119 | false | ||
120 | } | ||
121 | } | ||
122 | |||
123 | pub fn omit_verbose_types(&self) -> bool { | ||
124 | self.omit_verbose_types | ||
125 | } | ||
126 | } | ||
127 | |||
128 | #[derive(Clone, Copy)] | ||
129 | enum DisplayTarget { | ||
130 | /// Display types for inlays, doc popups, autocompletion, etc... | ||
131 | /// Showing `{unknown}` or not qualifying paths is fine here. | ||
132 | /// There's no reason for this to fail. | ||
133 | Diagnostics, | ||
134 | /// Display types for inserting them in source files. | ||
135 | /// The generated code should compile, so paths need to be qualified. | ||
136 | SourceCode { module_id: ModuleId }, | ||
137 | } | ||
138 | |||
139 | impl DisplayTarget { | ||
140 | fn is_source_code(&self) -> bool { | ||
141 | matches!(self, Self::SourceCode {..}) | ||
142 | } | ||
143 | } | ||
144 | |||
145 | #[derive(Debug)] | ||
146 | pub enum DisplaySourceCodeError { | ||
147 | PathNotFound, | ||
148 | } | ||
149 | |||
150 | pub enum HirDisplayError { | ||
151 | /// Errors that can occur when generating source code | ||
152 | DisplaySourceCodeError(DisplaySourceCodeError), | ||
153 | /// `FmtError` is required to be compatible with std::fmt::Display | ||
154 | FmtError, | ||
155 | } | ||
156 | impl From<fmt::Error> for HirDisplayError { | ||
157 | fn from(_: fmt::Error) -> Self { | ||
158 | Self::FmtError | ||
159 | } | ||
160 | } | ||
161 | |||
162 | pub struct HirDisplayWrapper<'a, T> { | ||
163 | db: &'a dyn HirDatabase, | ||
164 | t: &'a T, | ||
165 | max_size: Option<usize>, | ||
166 | omit_verbose_types: bool, | ||
167 | display_target: DisplayTarget, | ||
168 | } | ||
169 | |||
170 | impl<'a, T> fmt::Display for HirDisplayWrapper<'a, T> | ||
171 | where | ||
172 | T: HirDisplay, | ||
173 | { | ||
174 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
175 | match self.t.hir_fmt(&mut HirFormatter { | ||
176 | db: self.db, | ||
177 | fmt: f, | ||
178 | buf: String::with_capacity(20), | ||
179 | curr_size: 0, | ||
180 | max_size: self.max_size, | ||
181 | omit_verbose_types: self.omit_verbose_types, | ||
182 | display_target: self.display_target, | ||
183 | }) { | ||
184 | Ok(()) => Ok(()), | ||
185 | Err(HirDisplayError::FmtError) => Err(fmt::Error), | ||
186 | Err(HirDisplayError::DisplaySourceCodeError(_)) => { | ||
187 | // This should never happen | ||
188 | panic!("HirDisplay failed when calling Display::fmt!") | ||
189 | } | ||
190 | } | ||
191 | } | ||
192 | } | ||
193 | |||
194 | const TYPE_HINT_TRUNCATION: &str = "…"; | ||
195 | |||
196 | impl HirDisplay for &Ty { | ||
197 | fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { | ||
198 | HirDisplay::hir_fmt(*self, f) | ||
199 | } | ||
200 | } | ||
201 | |||
202 | impl HirDisplay for ApplicationTy { | ||
203 | fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { | ||
204 | if f.should_truncate() { | ||
205 | return write!(f, "{}", TYPE_HINT_TRUNCATION); | ||
206 | } | ||
207 | |||
208 | match self.ctor { | ||
209 | TypeCtor::Bool => write!(f, "bool")?, | ||
210 | TypeCtor::Char => write!(f, "char")?, | ||
211 | TypeCtor::Int(t) => write!(f, "{}", t)?, | ||
212 | TypeCtor::Float(t) => write!(f, "{}", t)?, | ||
213 | TypeCtor::Str => write!(f, "str")?, | ||
214 | TypeCtor::Slice => { | ||
215 | let t = self.parameters.as_single(); | ||
216 | write!(f, "[{}]", t.display(f.db))?; | ||
217 | } | ||
218 | TypeCtor::Array => { | ||
219 | let t = self.parameters.as_single(); | ||
220 | write!(f, "[{}; _]", t.display(f.db))?; | ||
221 | } | ||
222 | TypeCtor::RawPtr(m) => { | ||
223 | let t = self.parameters.as_single(); | ||
224 | write!(f, "*{}{}", m.as_keyword_for_ptr(), t.display(f.db))?; | ||
225 | } | ||
226 | TypeCtor::Ref(m) => { | ||
227 | let t = self.parameters.as_single(); | ||
228 | let ty_display = if f.omit_verbose_types() { | ||
229 | t.display_truncated(f.db, f.max_size) | ||
230 | } else { | ||
231 | t.display(f.db) | ||
232 | }; | ||
233 | write!(f, "&{}{}", m.as_keyword_for_ref(), ty_display)?; | ||
234 | } | ||
235 | TypeCtor::Never => write!(f, "!")?, | ||
236 | TypeCtor::Tuple { .. } => { | ||
237 | let ts = &self.parameters; | ||
238 | if ts.len() == 1 { | ||
239 | write!(f, "({},)", ts[0].display(f.db))?; | ||
240 | } else { | ||
241 | write!(f, "(")?; | ||
242 | f.write_joined(&*ts.0, ", ")?; | ||
243 | write!(f, ")")?; | ||
244 | } | ||
245 | } | ||
246 | TypeCtor::FnPtr { is_varargs, .. } => { | ||
247 | let sig = FnSig::from_fn_ptr_substs(&self.parameters, is_varargs); | ||
248 | write!(f, "fn(")?; | ||
249 | f.write_joined(sig.params(), ", ")?; | ||
250 | if is_varargs { | ||
251 | if sig.params().is_empty() { | ||
252 | write!(f, "...")?; | ||
253 | } else { | ||
254 | write!(f, ", ...")?; | ||
255 | } | ||
256 | } | ||
257 | write!(f, ")")?; | ||
258 | let ret = sig.ret(); | ||
259 | if *ret != Ty::unit() { | ||
260 | let ret_display = if f.omit_verbose_types() { | ||
261 | ret.display_truncated(f.db, f.max_size) | ||
262 | } else { | ||
263 | ret.display(f.db) | ||
264 | }; | ||
265 | write!(f, " -> {}", ret_display)?; | ||
266 | } | ||
267 | } | ||
268 | TypeCtor::FnDef(def) => { | ||
269 | let sig = f.db.callable_item_signature(def).subst(&self.parameters); | ||
270 | match def { | ||
271 | CallableDefId::FunctionId(ff) => { | ||
272 | write!(f, "fn {}", f.db.function_data(ff).name)? | ||
273 | } | ||
274 | CallableDefId::StructId(s) => write!(f, "{}", f.db.struct_data(s).name)?, | ||
275 | CallableDefId::EnumVariantId(e) => { | ||
276 | write!(f, "{}", f.db.enum_data(e.parent).variants[e.local_id].name)? | ||
277 | } | ||
278 | }; | ||
279 | if self.parameters.len() > 0 { | ||
280 | let generics = generics(f.db.upcast(), def.into()); | ||
281 | let (parent_params, self_param, type_params, _impl_trait_params) = | ||
282 | generics.provenance_split(); | ||
283 | let total_len = parent_params + self_param + type_params; | ||
284 | // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self? | ||
285 | if total_len > 0 { | ||
286 | write!(f, "<")?; | ||
287 | f.write_joined(&self.parameters.0[..total_len], ", ")?; | ||
288 | write!(f, ">")?; | ||
289 | } | ||
290 | } | ||
291 | write!(f, "(")?; | ||
292 | f.write_joined(sig.params(), ", ")?; | ||
293 | write!(f, ")")?; | ||
294 | let ret = sig.ret(); | ||
295 | if *ret != Ty::unit() { | ||
296 | let ret_display = if f.omit_verbose_types() { | ||
297 | ret.display_truncated(f.db, f.max_size) | ||
298 | } else { | ||
299 | ret.display(f.db) | ||
300 | }; | ||
301 | write!(f, " -> {}", ret_display)?; | ||
302 | } | ||
303 | } | ||
304 | TypeCtor::Adt(def_id) => { | ||
305 | match f.display_target { | ||
306 | DisplayTarget::Diagnostics => { | ||
307 | let name = match def_id { | ||
308 | AdtId::StructId(it) => f.db.struct_data(it).name.clone(), | ||
309 | AdtId::UnionId(it) => f.db.union_data(it).name.clone(), | ||
310 | AdtId::EnumId(it) => f.db.enum_data(it).name.clone(), | ||
311 | }; | ||
312 | write!(f, "{}", name)?; | ||
313 | } | ||
314 | DisplayTarget::SourceCode { module_id } => { | ||
315 | if let Some(path) = find_path::find_path( | ||
316 | f.db.upcast(), | ||
317 | ItemInNs::Types(def_id.into()), | ||
318 | module_id, | ||
319 | ) { | ||
320 | write!(f, "{}", path)?; | ||
321 | } else { | ||
322 | return Err(HirDisplayError::DisplaySourceCodeError( | ||
323 | DisplaySourceCodeError::PathNotFound, | ||
324 | )); | ||
325 | } | ||
326 | } | ||
327 | } | ||
328 | |||
329 | if self.parameters.len() > 0 { | ||
330 | let parameters_to_write = | ||
331 | if f.display_target.is_source_code() || f.omit_verbose_types() { | ||
332 | match self | ||
333 | .ctor | ||
334 | .as_generic_def() | ||
335 | .map(|generic_def_id| f.db.generic_defaults(generic_def_id)) | ||
336 | .filter(|defaults| !defaults.is_empty()) | ||
337 | { | ||
338 | None => self.parameters.0.as_ref(), | ||
339 | Some(default_parameters) => { | ||
340 | let mut default_from = 0; | ||
341 | for (i, parameter) in self.parameters.iter().enumerate() { | ||
342 | match (parameter, default_parameters.get(i)) { | ||
343 | (&Ty::Unknown, _) | (_, None) => { | ||
344 | default_from = i + 1; | ||
345 | } | ||
346 | (_, Some(default_parameter)) => { | ||
347 | let actual_default = default_parameter | ||
348 | .clone() | ||
349 | .subst(&self.parameters.prefix(i)); | ||
350 | if parameter != &actual_default { | ||
351 | default_from = i + 1; | ||
352 | } | ||
353 | } | ||
354 | } | ||
355 | } | ||
356 | &self.parameters.0[0..default_from] | ||
357 | } | ||
358 | } | ||
359 | } else { | ||
360 | self.parameters.0.as_ref() | ||
361 | }; | ||
362 | if !parameters_to_write.is_empty() { | ||
363 | write!(f, "<")?; | ||
364 | f.write_joined(parameters_to_write, ", ")?; | ||
365 | write!(f, ">")?; | ||
366 | } | ||
367 | } | ||
368 | } | ||
369 | TypeCtor::AssociatedType(type_alias) => { | ||
370 | let trait_ = match type_alias.lookup(f.db.upcast()).container { | ||
371 | AssocContainerId::TraitId(it) => it, | ||
372 | _ => panic!("not an associated type"), | ||
373 | }; | ||
374 | let trait_ = f.db.trait_data(trait_); | ||
375 | let type_alias = f.db.type_alias_data(type_alias); | ||
376 | write!(f, "{}::{}", trait_.name, type_alias.name)?; | ||
377 | if self.parameters.len() > 0 { | ||
378 | write!(f, "<")?; | ||
379 | f.write_joined(&*self.parameters.0, ", ")?; | ||
380 | write!(f, ">")?; | ||
381 | } | ||
382 | } | ||
383 | TypeCtor::OpaqueType(opaque_ty_id) => { | ||
384 | let bounds = match opaque_ty_id { | ||
385 | OpaqueTyId::ReturnTypeImplTrait(func, idx) => { | ||
386 | let datas = | ||
387 | f.db.return_type_impl_traits(func).expect("impl trait id without data"); | ||
388 | let data = (*datas) | ||
389 | .as_ref() | ||
390 | .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone()); | ||
391 | data.subst(&self.parameters) | ||
392 | } | ||
393 | }; | ||
394 | write!(f, "impl ")?; | ||
395 | write_bounds_like_dyn_trait(&bounds.value, f)?; | ||
396 | // FIXME: it would maybe be good to distinguish this from the alias type (when debug printing), and to show the substitution | ||
397 | } | ||
398 | TypeCtor::Closure { .. } => { | ||
399 | let sig = self.parameters[0].callable_sig(f.db); | ||
400 | if let Some(sig) = sig { | ||
401 | if sig.params().is_empty() { | ||
402 | write!(f, "||")?; | ||
403 | } else if f.omit_verbose_types() { | ||
404 | write!(f, "|{}|", TYPE_HINT_TRUNCATION)?; | ||
405 | } else { | ||
406 | write!(f, "|")?; | ||
407 | f.write_joined(sig.params(), ", ")?; | ||
408 | write!(f, "|")?; | ||
409 | }; | ||
410 | |||
411 | let ret_display = if f.omit_verbose_types() { | ||
412 | sig.ret().display_truncated(f.db, f.max_size) | ||
413 | } else { | ||
414 | sig.ret().display(f.db) | ||
415 | }; | ||
416 | write!(f, " -> {}", ret_display)?; | ||
417 | } else { | ||
418 | write!(f, "{{closure}}")?; | ||
419 | } | ||
420 | } | ||
421 | } | ||
422 | Ok(()) | ||
423 | } | ||
424 | } | ||
425 | |||
426 | impl HirDisplay for ProjectionTy { | ||
427 | fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { | ||
428 | if f.should_truncate() { | ||
429 | return write!(f, "{}", TYPE_HINT_TRUNCATION); | ||
430 | } | ||
431 | |||
432 | let trait_ = f.db.trait_data(self.trait_(f.db)); | ||
433 | write!(f, "<{} as {}", self.parameters[0].display(f.db), trait_.name)?; | ||
434 | if self.parameters.len() > 1 { | ||
435 | write!(f, "<")?; | ||
436 | f.write_joined(&self.parameters[1..], ", ")?; | ||
437 | write!(f, ">")?; | ||
438 | } | ||
439 | write!(f, ">::{}", f.db.type_alias_data(self.associated_ty).name)?; | ||
440 | Ok(()) | ||
441 | } | ||
442 | } | ||
443 | |||
444 | impl HirDisplay for Ty { | ||
445 | fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { | ||
446 | if f.should_truncate() { | ||
447 | return write!(f, "{}", TYPE_HINT_TRUNCATION); | ||
448 | } | ||
449 | |||
450 | match self { | ||
451 | Ty::Apply(a_ty) => a_ty.hir_fmt(f)?, | ||
452 | Ty::Projection(p_ty) => p_ty.hir_fmt(f)?, | ||
453 | Ty::Placeholder(id) => { | ||
454 | let generics = generics(f.db.upcast(), id.parent); | ||
455 | let param_data = &generics.params.types[id.local_id]; | ||
456 | match param_data.provenance { | ||
457 | TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => { | ||
458 | write!(f, "{}", param_data.name.clone().unwrap_or_else(Name::missing))? | ||
459 | } | ||
460 | TypeParamProvenance::ArgumentImplTrait => { | ||
461 | write!(f, "impl ")?; | ||
462 | let bounds = f.db.generic_predicates_for_param(*id); | ||
463 | let substs = Substs::type_params_for_generics(&generics); | ||
464 | write_bounds_like_dyn_trait( | ||
465 | &bounds.iter().map(|b| b.clone().subst(&substs)).collect::<Vec<_>>(), | ||
466 | f, | ||
467 | )?; | ||
468 | } | ||
469 | } | ||
470 | } | ||
471 | Ty::Bound(idx) => write!(f, "?{}.{}", idx.debruijn.depth(), idx.index)?, | ||
472 | Ty::Dyn(predicates) => { | ||
473 | write!(f, "dyn ")?; | ||
474 | write_bounds_like_dyn_trait(predicates, f)?; | ||
475 | } | ||
476 | Ty::Opaque(opaque_ty) => { | ||
477 | let bounds = match opaque_ty.opaque_ty_id { | ||
478 | OpaqueTyId::ReturnTypeImplTrait(func, idx) => { | ||
479 | let datas = | ||
480 | f.db.return_type_impl_traits(func).expect("impl trait id without data"); | ||
481 | let data = (*datas) | ||
482 | .as_ref() | ||
483 | .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone()); | ||
484 | data.subst(&opaque_ty.parameters) | ||
485 | } | ||
486 | }; | ||
487 | write!(f, "impl ")?; | ||
488 | write_bounds_like_dyn_trait(&bounds.value, f)?; | ||
489 | } | ||
490 | Ty::Unknown => write!(f, "{{unknown}}")?, | ||
491 | Ty::Infer(..) => write!(f, "_")?, | ||
492 | } | ||
493 | Ok(()) | ||
494 | } | ||
495 | } | ||
496 | |||
497 | fn write_bounds_like_dyn_trait( | ||
498 | predicates: &[GenericPredicate], | ||
499 | f: &mut HirFormatter, | ||
500 | ) -> Result<(), HirDisplayError> { | ||
501 | // Note: This code is written to produce nice results (i.e. | ||
502 | // corresponding to surface Rust) for types that can occur in | ||
503 | // actual Rust. It will have weird results if the predicates | ||
504 | // aren't as expected (i.e. self types = $0, projection | ||
505 | // predicates for a certain trait come after the Implemented | ||
506 | // predicate for that trait). | ||
507 | let mut first = true; | ||
508 | let mut angle_open = false; | ||
509 | for p in predicates.iter() { | ||
510 | match p { | ||
511 | GenericPredicate::Implemented(trait_ref) => { | ||
512 | if angle_open { | ||
513 | write!(f, ">")?; | ||
514 | angle_open = false; | ||
515 | } | ||
516 | if !first { | ||
517 | write!(f, " + ")?; | ||
518 | } | ||
519 | // We assume that the self type is $0 (i.e. the | ||
520 | // existential) here, which is the only thing that's | ||
521 | // possible in actual Rust, and hence don't print it | ||
522 | write!(f, "{}", f.db.trait_data(trait_ref.trait_).name)?; | ||
523 | if trait_ref.substs.len() > 1 { | ||
524 | write!(f, "<")?; | ||
525 | f.write_joined(&trait_ref.substs[1..], ", ")?; | ||
526 | // there might be assoc type bindings, so we leave the angle brackets open | ||
527 | angle_open = true; | ||
528 | } | ||
529 | } | ||
530 | GenericPredicate::Projection(projection_pred) => { | ||
531 | // in types in actual Rust, these will always come | ||
532 | // after the corresponding Implemented predicate | ||
533 | if angle_open { | ||
534 | write!(f, ", ")?; | ||
535 | } else { | ||
536 | write!(f, "<")?; | ||
537 | angle_open = true; | ||
538 | } | ||
539 | let type_alias = f.db.type_alias_data(projection_pred.projection_ty.associated_ty); | ||
540 | write!(f, "{} = ", type_alias.name)?; | ||
541 | projection_pred.ty.hir_fmt(f)?; | ||
542 | } | ||
543 | GenericPredicate::Error => { | ||
544 | if angle_open { | ||
545 | // impl Trait<X, {error}> | ||
546 | write!(f, ", ")?; | ||
547 | } else if !first { | ||
548 | // impl Trait + {error} | ||
549 | write!(f, " + ")?; | ||
550 | } | ||
551 | p.hir_fmt(f)?; | ||
552 | } | ||
553 | } | ||
554 | first = false; | ||
555 | } | ||
556 | if angle_open { | ||
557 | write!(f, ">")?; | ||
558 | } | ||
559 | Ok(()) | ||
560 | } | ||
561 | |||
562 | impl TraitRef { | ||
563 | fn hir_fmt_ext(&self, f: &mut HirFormatter, use_as: bool) -> Result<(), HirDisplayError> { | ||
564 | if f.should_truncate() { | ||
565 | return write!(f, "{}", TYPE_HINT_TRUNCATION); | ||
566 | } | ||
567 | |||
568 | self.substs[0].hir_fmt(f)?; | ||
569 | if use_as { | ||
570 | write!(f, " as ")?; | ||
571 | } else { | ||
572 | write!(f, ": ")?; | ||
573 | } | ||
574 | write!(f, "{}", f.db.trait_data(self.trait_).name)?; | ||
575 | if self.substs.len() > 1 { | ||
576 | write!(f, "<")?; | ||
577 | f.write_joined(&self.substs[1..], ", ")?; | ||
578 | write!(f, ">")?; | ||
579 | } | ||
580 | Ok(()) | ||
581 | } | ||
582 | } | ||
583 | |||
584 | impl HirDisplay for TraitRef { | ||
585 | fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { | ||
586 | self.hir_fmt_ext(f, false) | ||
587 | } | ||
588 | } | ||
589 | |||
590 | impl HirDisplay for &GenericPredicate { | ||
591 | fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { | ||
592 | HirDisplay::hir_fmt(*self, f) | ||
593 | } | ||
594 | } | ||
595 | |||
596 | impl HirDisplay for GenericPredicate { | ||
597 | fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { | ||
598 | if f.should_truncate() { | ||
599 | return write!(f, "{}", TYPE_HINT_TRUNCATION); | ||
600 | } | ||
601 | |||
602 | match self { | ||
603 | GenericPredicate::Implemented(trait_ref) => trait_ref.hir_fmt(f)?, | ||
604 | GenericPredicate::Projection(projection_pred) => { | ||
605 | write!(f, "<")?; | ||
606 | projection_pred.projection_ty.trait_ref(f.db).hir_fmt_ext(f, true)?; | ||
607 | write!( | ||
608 | f, | ||
609 | ">::{} = {}", | ||
610 | f.db.type_alias_data(projection_pred.projection_ty.associated_ty).name, | ||
611 | projection_pred.ty.display(f.db) | ||
612 | )?; | ||
613 | } | ||
614 | GenericPredicate::Error => write!(f, "{{error}}")?, | ||
615 | } | ||
616 | Ok(()) | ||
617 | } | ||
618 | } | ||
619 | |||
620 | impl HirDisplay for Obligation { | ||
621 | fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { | ||
622 | Ok(match self { | ||
623 | Obligation::Trait(tr) => write!(f, "Implements({})", tr.display(f.db))?, | ||
624 | Obligation::Projection(proj) => write!( | ||
625 | f, | ||
626 | "Normalize({} => {})", | ||
627 | proj.projection_ty.display(f.db), | ||
628 | proj.ty.display(f.db) | ||
629 | )?, | ||
630 | }) | ||
631 | } | ||
632 | } | ||
diff --git a/crates/hir_ty/src/infer.rs b/crates/hir_ty/src/infer.rs new file mode 100644 index 000000000..03b00b101 --- /dev/null +++ b/crates/hir_ty/src/infer.rs | |||
@@ -0,0 +1,802 @@ | |||
1 | //! Type inference, i.e. the process of walking through the code and determining | ||
2 | //! the type of each expression and pattern. | ||
3 | //! | ||
4 | //! For type inference, compare the implementations in rustc (the various | ||
5 | //! check_* methods in librustc_typeck/check/mod.rs are a good entry point) and | ||
6 | //! IntelliJ-Rust (org.rust.lang.core.types.infer). Our entry point for | ||
7 | //! inference here is the `infer` function, which infers the types of all | ||
8 | //! expressions in a given function. | ||
9 | //! | ||
10 | //! During inference, types (i.e. the `Ty` struct) can contain type 'variables' | ||
11 | //! which represent currently unknown types; as we walk through the expressions, | ||
12 | //! we might determine that certain variables need to be equal to each other, or | ||
13 | //! to certain types. To record this, we use the union-find implementation from | ||
14 | //! the `ena` crate, which is extracted from rustc. | ||
15 | |||
16 | use std::borrow::Cow; | ||
17 | use std::mem; | ||
18 | use std::ops::Index; | ||
19 | use std::sync::Arc; | ||
20 | |||
21 | use arena::map::ArenaMap; | ||
22 | use hir_def::{ | ||
23 | body::Body, | ||
24 | data::{ConstData, FunctionData, StaticData}, | ||
25 | expr::{BindingAnnotation, ExprId, PatId}, | ||
26 | lang_item::LangItemTarget, | ||
27 | path::{path, Path}, | ||
28 | resolver::{HasResolver, Resolver, TypeNs}, | ||
29 | type_ref::{Mutability, TypeRef}, | ||
30 | AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, Lookup, TraitId, | ||
31 | TypeAliasId, VariantId, | ||
32 | }; | ||
33 | use hir_expand::{diagnostics::DiagnosticSink, name::name}; | ||
34 | use rustc_hash::FxHashMap; | ||
35 | use stdx::impl_from; | ||
36 | use syntax::SmolStr; | ||
37 | |||
38 | use super::{ | ||
39 | primitive::{FloatTy, IntTy}, | ||
40 | traits::{Guidance, Obligation, ProjectionPredicate, Solution}, | ||
41 | InEnvironment, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TypeCtor, TypeWalk, | ||
42 | }; | ||
43 | use crate::{ | ||
44 | db::HirDatabase, infer::diagnostics::InferenceDiagnostic, lower::ImplTraitLoweringMode, | ||
45 | }; | ||
46 | |||
47 | pub(crate) use unify::unify; | ||
48 | |||
49 | macro_rules! ty_app { | ||
50 | ($ctor:pat, $param:pat) => { | ||
51 | crate::Ty::Apply(crate::ApplicationTy { ctor: $ctor, parameters: $param }) | ||
52 | }; | ||
53 | ($ctor:pat) => { | ||
54 | ty_app!($ctor, _) | ||
55 | }; | ||
56 | } | ||
57 | |||
58 | mod unify; | ||
59 | mod path; | ||
60 | mod expr; | ||
61 | mod pat; | ||
62 | mod coerce; | ||
63 | |||
64 | /// The entry point of type inference. | ||
65 | pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> { | ||
66 | let _p = profile::span("infer_query"); | ||
67 | let resolver = def.resolver(db.upcast()); | ||
68 | let mut ctx = InferenceContext::new(db, def, resolver); | ||
69 | |||
70 | match def { | ||
71 | DefWithBodyId::ConstId(c) => ctx.collect_const(&db.const_data(c)), | ||
72 | DefWithBodyId::FunctionId(f) => ctx.collect_fn(&db.function_data(f)), | ||
73 | DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_data(s)), | ||
74 | } | ||
75 | |||
76 | ctx.infer_body(); | ||
77 | |||
78 | Arc::new(ctx.resolve_all()) | ||
79 | } | ||
80 | |||
81 | #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] | ||
82 | enum ExprOrPatId { | ||
83 | ExprId(ExprId), | ||
84 | PatId(PatId), | ||
85 | } | ||
86 | impl_from!(ExprId, PatId for ExprOrPatId); | ||
87 | |||
88 | /// Binding modes inferred for patterns. | ||
89 | /// https://doc.rust-lang.org/reference/patterns.html#binding-modes | ||
90 | #[derive(Copy, Clone, Debug, Eq, PartialEq)] | ||
91 | enum BindingMode { | ||
92 | Move, | ||
93 | Ref(Mutability), | ||
94 | } | ||
95 | |||
96 | impl BindingMode { | ||
97 | pub fn convert(annotation: BindingAnnotation) -> BindingMode { | ||
98 | match annotation { | ||
99 | BindingAnnotation::Unannotated | BindingAnnotation::Mutable => BindingMode::Move, | ||
100 | BindingAnnotation::Ref => BindingMode::Ref(Mutability::Shared), | ||
101 | BindingAnnotation::RefMut => BindingMode::Ref(Mutability::Mut), | ||
102 | } | ||
103 | } | ||
104 | } | ||
105 | |||
106 | impl Default for BindingMode { | ||
107 | fn default() -> Self { | ||
108 | BindingMode::Move | ||
109 | } | ||
110 | } | ||
111 | |||
112 | /// A mismatch between an expected and an inferred type. | ||
113 | #[derive(Clone, PartialEq, Eq, Debug, Hash)] | ||
114 | pub struct TypeMismatch { | ||
115 | pub expected: Ty, | ||
116 | pub actual: Ty, | ||
117 | } | ||
118 | |||
119 | /// The result of type inference: A mapping from expressions and patterns to types. | ||
120 | #[derive(Clone, PartialEq, Eq, Debug, Default)] | ||
121 | pub struct InferenceResult { | ||
122 | /// For each method call expr, records the function it resolves to. | ||
123 | method_resolutions: FxHashMap<ExprId, FunctionId>, | ||
124 | /// For each field access expr, records the field it resolves to. | ||
125 | field_resolutions: FxHashMap<ExprId, FieldId>, | ||
126 | /// For each field in record literal, records the field it resolves to. | ||
127 | record_field_resolutions: FxHashMap<ExprId, FieldId>, | ||
128 | record_field_pat_resolutions: FxHashMap<PatId, FieldId>, | ||
129 | /// For each struct literal, records the variant it resolves to. | ||
130 | variant_resolutions: FxHashMap<ExprOrPatId, VariantId>, | ||
131 | /// For each associated item record what it resolves to | ||
132 | assoc_resolutions: FxHashMap<ExprOrPatId, AssocItemId>, | ||
133 | diagnostics: Vec<InferenceDiagnostic>, | ||
134 | pub type_of_expr: ArenaMap<ExprId, Ty>, | ||
135 | pub type_of_pat: ArenaMap<PatId, Ty>, | ||
136 | pub(super) type_mismatches: ArenaMap<ExprId, TypeMismatch>, | ||
137 | } | ||
138 | |||
139 | impl InferenceResult { | ||
140 | pub fn method_resolution(&self, expr: ExprId) -> Option<FunctionId> { | ||
141 | self.method_resolutions.get(&expr).copied() | ||
142 | } | ||
143 | pub fn field_resolution(&self, expr: ExprId) -> Option<FieldId> { | ||
144 | self.field_resolutions.get(&expr).copied() | ||
145 | } | ||
146 | pub fn record_field_resolution(&self, expr: ExprId) -> Option<FieldId> { | ||
147 | self.record_field_resolutions.get(&expr).copied() | ||
148 | } | ||
149 | pub fn record_field_pat_resolution(&self, pat: PatId) -> Option<FieldId> { | ||
150 | self.record_field_pat_resolutions.get(&pat).copied() | ||
151 | } | ||
152 | pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantId> { | ||
153 | self.variant_resolutions.get(&id.into()).copied() | ||
154 | } | ||
155 | pub fn variant_resolution_for_pat(&self, id: PatId) -> Option<VariantId> { | ||
156 | self.variant_resolutions.get(&id.into()).copied() | ||
157 | } | ||
158 | pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option<AssocItemId> { | ||
159 | self.assoc_resolutions.get(&id.into()).copied() | ||
160 | } | ||
161 | pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<AssocItemId> { | ||
162 | self.assoc_resolutions.get(&id.into()).copied() | ||
163 | } | ||
164 | pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch> { | ||
165 | self.type_mismatches.get(expr) | ||
166 | } | ||
167 | pub fn add_diagnostics( | ||
168 | &self, | ||
169 | db: &dyn HirDatabase, | ||
170 | owner: DefWithBodyId, | ||
171 | sink: &mut DiagnosticSink, | ||
172 | ) { | ||
173 | self.diagnostics.iter().for_each(|it| it.add_to(db, owner, sink)) | ||
174 | } | ||
175 | } | ||
176 | |||
177 | impl Index<ExprId> for InferenceResult { | ||
178 | type Output = Ty; | ||
179 | |||
180 | fn index(&self, expr: ExprId) -> &Ty { | ||
181 | self.type_of_expr.get(expr).unwrap_or(&Ty::Unknown) | ||
182 | } | ||
183 | } | ||
184 | |||
185 | impl Index<PatId> for InferenceResult { | ||
186 | type Output = Ty; | ||
187 | |||
188 | fn index(&self, pat: PatId) -> &Ty { | ||
189 | self.type_of_pat.get(pat).unwrap_or(&Ty::Unknown) | ||
190 | } | ||
191 | } | ||
192 | |||
193 | /// The inference context contains all information needed during type inference. | ||
194 | #[derive(Clone, Debug)] | ||
195 | struct InferenceContext<'a> { | ||
196 | db: &'a dyn HirDatabase, | ||
197 | owner: DefWithBodyId, | ||
198 | body: Arc<Body>, | ||
199 | resolver: Resolver, | ||
200 | table: unify::InferenceTable, | ||
201 | trait_env: Arc<TraitEnvironment>, | ||
202 | obligations: Vec<Obligation>, | ||
203 | result: InferenceResult, | ||
204 | /// The return type of the function being inferred, or the closure if we're | ||
205 | /// currently within one. | ||
206 | /// | ||
207 | /// We might consider using a nested inference context for checking | ||
208 | /// closures, but currently this is the only field that will change there, | ||
209 | /// so it doesn't make sense. | ||
210 | return_ty: Ty, | ||
211 | diverges: Diverges, | ||
212 | breakables: Vec<BreakableContext>, | ||
213 | } | ||
214 | |||
215 | #[derive(Clone, Debug)] | ||
216 | struct BreakableContext { | ||
217 | pub may_break: bool, | ||
218 | pub break_ty: Ty, | ||
219 | pub label: Option<name::Name>, | ||
220 | } | ||
221 | |||
222 | fn find_breakable<'c>( | ||
223 | ctxs: &'c mut [BreakableContext], | ||
224 | label: Option<&name::Name>, | ||
225 | ) -> Option<&'c mut BreakableContext> { | ||
226 | match label { | ||
227 | Some(_) => ctxs.iter_mut().rev().find(|ctx| ctx.label.as_ref() == label), | ||
228 | None => ctxs.last_mut(), | ||
229 | } | ||
230 | } | ||
231 | |||
232 | impl<'a> InferenceContext<'a> { | ||
233 | fn new(db: &'a dyn HirDatabase, owner: DefWithBodyId, resolver: Resolver) -> Self { | ||
234 | InferenceContext { | ||
235 | result: InferenceResult::default(), | ||
236 | table: unify::InferenceTable::new(), | ||
237 | obligations: Vec::default(), | ||
238 | return_ty: Ty::Unknown, // set in collect_fn_signature | ||
239 | trait_env: TraitEnvironment::lower(db, &resolver), | ||
240 | db, | ||
241 | owner, | ||
242 | body: db.body(owner), | ||
243 | resolver, | ||
244 | diverges: Diverges::Maybe, | ||
245 | breakables: Vec::new(), | ||
246 | } | ||
247 | } | ||
248 | |||
249 | fn resolve_all(mut self) -> InferenceResult { | ||
250 | // FIXME resolve obligations as well (use Guidance if necessary) | ||
251 | let mut result = std::mem::take(&mut self.result); | ||
252 | for ty in result.type_of_expr.values_mut() { | ||
253 | let resolved = self.table.resolve_ty_completely(mem::replace(ty, Ty::Unknown)); | ||
254 | *ty = resolved; | ||
255 | } | ||
256 | for ty in result.type_of_pat.values_mut() { | ||
257 | let resolved = self.table.resolve_ty_completely(mem::replace(ty, Ty::Unknown)); | ||
258 | *ty = resolved; | ||
259 | } | ||
260 | result | ||
261 | } | ||
262 | |||
263 | fn write_expr_ty(&mut self, expr: ExprId, ty: Ty) { | ||
264 | self.result.type_of_expr.insert(expr, ty); | ||
265 | } | ||
266 | |||
267 | fn write_method_resolution(&mut self, expr: ExprId, func: FunctionId) { | ||
268 | self.result.method_resolutions.insert(expr, func); | ||
269 | } | ||
270 | |||
271 | fn write_field_resolution(&mut self, expr: ExprId, field: FieldId) { | ||
272 | self.result.field_resolutions.insert(expr, field); | ||
273 | } | ||
274 | |||
275 | fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantId) { | ||
276 | self.result.variant_resolutions.insert(id, variant); | ||
277 | } | ||
278 | |||
279 | fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: AssocItemId) { | ||
280 | self.result.assoc_resolutions.insert(id, item); | ||
281 | } | ||
282 | |||
283 | fn write_pat_ty(&mut self, pat: PatId, ty: Ty) { | ||
284 | self.result.type_of_pat.insert(pat, ty); | ||
285 | } | ||
286 | |||
287 | fn push_diagnostic(&mut self, diagnostic: InferenceDiagnostic) { | ||
288 | self.result.diagnostics.push(diagnostic); | ||
289 | } | ||
290 | |||
291 | fn make_ty_with_mode( | ||
292 | &mut self, | ||
293 | type_ref: &TypeRef, | ||
294 | impl_trait_mode: ImplTraitLoweringMode, | ||
295 | ) -> Ty { | ||
296 | // FIXME use right resolver for block | ||
297 | let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver) | ||
298 | .with_impl_trait_mode(impl_trait_mode); | ||
299 | let ty = Ty::from_hir(&ctx, type_ref); | ||
300 | let ty = self.insert_type_vars(ty); | ||
301 | self.normalize_associated_types_in(ty) | ||
302 | } | ||
303 | |||
304 | fn make_ty(&mut self, type_ref: &TypeRef) -> Ty { | ||
305 | self.make_ty_with_mode(type_ref, ImplTraitLoweringMode::Disallowed) | ||
306 | } | ||
307 | |||
308 | /// Replaces Ty::Unknown by a new type var, so we can maybe still infer it. | ||
309 | fn insert_type_vars_shallow(&mut self, ty: Ty) -> Ty { | ||
310 | match ty { | ||
311 | Ty::Unknown => self.table.new_type_var(), | ||
312 | _ => ty, | ||
313 | } | ||
314 | } | ||
315 | |||
316 | fn insert_type_vars(&mut self, ty: Ty) -> Ty { | ||
317 | ty.fold(&mut |ty| self.insert_type_vars_shallow(ty)) | ||
318 | } | ||
319 | |||
320 | fn resolve_obligations_as_possible(&mut self) { | ||
321 | let obligations = mem::replace(&mut self.obligations, Vec::new()); | ||
322 | for obligation in obligations { | ||
323 | let in_env = InEnvironment::new(self.trait_env.clone(), obligation.clone()); | ||
324 | let canonicalized = self.canonicalizer().canonicalize_obligation(in_env); | ||
325 | let solution = | ||
326 | self.db.trait_solve(self.resolver.krate().unwrap(), canonicalized.value.clone()); | ||
327 | |||
328 | match solution { | ||
329 | Some(Solution::Unique(substs)) => { | ||
330 | canonicalized.apply_solution(self, substs.0); | ||
331 | } | ||
332 | Some(Solution::Ambig(Guidance::Definite(substs))) => { | ||
333 | canonicalized.apply_solution(self, substs.0); | ||
334 | self.obligations.push(obligation); | ||
335 | } | ||
336 | Some(_) => { | ||
337 | // FIXME use this when trying to resolve everything at the end | ||
338 | self.obligations.push(obligation); | ||
339 | } | ||
340 |